repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
rene-anderes/edu | dbunitburner-maven-plugin/src/main/java/org/anderes/plugin/MojoJsonDataFileLoader.java | 1126 | package org.anderes.plugin;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.anderes.edu.dbunitburner.JsonDataFileLoader;
import org.dbunit.DatabaseUnitRuntimeException;
import org.dbunit.dataset.DataSetException;
import org.dbunit.dataset.DefaultDataSet;
import org.dbunit.dataset.IDataSet;
public class MojoJsonDataFileLoader extends JsonDataFileLoader {
@Override
public IDataSet load(String filename) throws DatabaseUnitRuntimeException {
IDataSet dataSet = new DefaultDataSet();
final Path jsonFile = Paths.get(filename);
try {
final URL url = jsonFile.toUri().toURL();
dataSet = loadDataSet(url);
dataSet = processReplacementTokens(dataSet);
} catch (DataSetException | IOException e) {
final String msg = String.format("DataSetException occurred loading data set file name='%s', msg='%s'", filename, e.getLocalizedMessage());
throw new DatabaseUnitRuntimeException(msg, e);
}
return dataSet;
}
}
| apache-2.0 |
dexter/elianto | src/main/java/it/cnr/isti/hpc/dexter/annotate/bean/EntityCandidates.java | 2917 | /**
* Copyright 2014 Diego Ceccarelli
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Copyright 2014 Diego Ceccarelli
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.cnr.isti.hpc.dexter.annotate.bean;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* @author Diego Ceccarelli <diego.ceccarelli@isti.cnr.it>
*
* Created on Jan 2, 2014
*/
public class EntityCandidates implements Serializable {
public List<EntityAnnotation> candidates = new ArrayList<EntityAnnotation>();
public EntityCandidates() {
}
public void add(int entity, int score) {
EntityAnnotation ea = new EntityAnnotation(entity, score);
if (candidates.contains(ea)) {
candidates.remove(ea);
}
candidates.add(ea);
}
public List<EntityAnnotation> getCandidates() {
return candidates;
}
public void setCandidates(List<EntityAnnotation> candidates) {
this.candidates = candidates;
}
public static class EntityAnnotation implements Serializable {
int entity;
int score;
public EntityAnnotation(int entity, int score) {
this.entity = entity;
this.score = score;
}
@Override
public String toString() {
return "EntityAnnotation [entity=" + entity + ", score=" + score
+ "]";
}
public int getEntity() {
return entity;
}
public void setEntity(int entity) {
this.entity = entity;
}
public int getScore() {
return score;
}
public void setScore(int score) {
this.score = score;
}
@Override
public int hashCode() {
return entity;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EntityAnnotation other = (EntityAnnotation) obj;
if (entity != other.entity)
return false;
return true;
}
}
}
| apache-2.0 |
andreipetrov/Java | mantis-tests/src/test/java/lect8/stqa/pft/mantis/appmanager/RegistrationHelper.java | 771 | package lect8.stqa.pft.mantis.appmanager;
import org.openqa.selenium.By;
/**
* Created by andre on 16.04.2016.
*/
public class RegistrationHelper extends HelperBase {
public RegistrationHelper(ApplicationManager app) {
super(app);
wd = app.getDriver();
}
public void start(String username, String email) {
wd.get(app.getProperty("web.baseUrl") + "signup_page.php");
type(By.name("username"), username);
type(By.name("email"), email);
click(By.cssSelector("input[value='Signup']"));
}
public void finish(String confirmationLink, String password) {
wd.get(confirmationLink);
type(By.name("password"), password);
type(By.name("password_confirm"), password);
click(By.cssSelector("input[value='Update User']"));
}
}
| apache-2.0 |
gitblit/iciql | src/main/java/com/iciql/QueryJoin.java | 2180 | /*
* Copyright 2004-2011 H2 Group.
* Copyright 2011 James Moger.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.iciql;
/**
* This class represents a query with a join.
*/
public class QueryJoin<T> {
private Query<T> query;
private SelectTable<T> join;
QueryJoin(Query<T> query, SelectTable<T> join) {
this.query = query;
this.join = join;
}
public QueryJoinCondition<T, Boolean> on(boolean x) {
query.getFrom().getAliasDefinition().checkMultipleBooleans();
return addPrimitive(x);
}
public QueryJoinCondition<T, Byte> on(byte x) {
return addPrimitive(x);
}
public QueryJoinCondition<T, Short> on(short x) {
return addPrimitive(x);
}
public QueryJoinCondition<T, Integer> on(int x) {
return addPrimitive(x);
}
public QueryJoinCondition<T, Long> on(long x) {
return addPrimitive(x);
}
public QueryJoinCondition<T, Float> on(float x) {
return addPrimitive(x);
}
public QueryJoinCondition<T, Double> on(double x) {
return addPrimitive(x);
}
private <A> QueryJoinCondition<T, A> addPrimitive(A x) {
A alias = query.getPrimitiveAliasByValue(x);
if (alias == null) {
// this will result in an unmapped field exception
return new QueryJoinCondition<T, A>(query, join, x);
}
return new QueryJoinCondition<T, A>(query, join, alias);
}
public <A> QueryJoinCondition<T, A> on(A x) {
return new QueryJoinCondition<T, A>(query, join, x);
}
}
| apache-2.0 |
idealo/logback-redis | src/main/java/de/idealo/logback/appender/jediswriter/BufferedJedisRPusher.java | 771 | package de.idealo.logback.appender.jediswriter;
import java.util.function.Function;
import de.idealo.logback.appender.jedisclient.JedisClient;
import ch.qos.logback.core.spi.DeferredProcessingAware;
import redis.clients.jedis.Pipeline;
public class BufferedJedisRPusher extends AbstractBufferedJedisWriter {
BufferedJedisRPusher(JedisClient client,
Function<DeferredProcessingAware, String> messageCreator,
String redisKey,
int maxBufferItems,
long flushBufferIntervalMillis) {
super(client, messageCreator, redisKey, maxBufferItems, flushBufferIntervalMillis);
}
@Override
void addValuesToPipeline(Pipeline pipeline, String... values) {
pipeline.rpush(getRedisKey(), values);
}
}
| apache-2.0 |
Muhamedali/crazy_text | src/main/java/com/input/text/crazy/client/widget/textbox/command/MoveCursorStartForceCommand.java | 996 | package com.input.text.crazy.client.widget.textbox.command;
import com.google.web.bindery.event.shared.Event;
import com.input.text.crazy.client.widget.textbox.DrawTextBox;
import com.input.text.crazy.client.widget.textbox.Text;
import javax.annotation.Nullable;
public class MoveCursorStartForceCommand extends SimpleCommand {
{ type = CommandType.MOVE_CURSOR_START_FORCE_COMMAND; }
public MoveCursorStartForceCommand() {}
public MoveCursorStartForceCommand(DrawTextBox textBox, @Nullable Event event) throws Exception {
super(textBox, event);
}
@Override
public boolean isExecutable() {
return true;
}
@Override
public boolean execute() throws Exception {
caret.setCursorPosition(Text.BEFORE_TEXT_POSITION);
return true;
}
@Override
public Command prototype(final DrawTextBox textBox, @Nullable final Event event) throws Exception {
return new MoveCursorStartForceCommand(textBox, event);
}
}
| apache-2.0 |
mread/buck | test/com/facebook/buck/cxx/CxxLibraryDescriptionTest.java | 8948 | /*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.HasBuildTarget;
import com.facebook.buck.python.PythonPackageComponents;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleParamsFactory;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildRuleSourcePath;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.TestSourcePath;
import com.facebook.buck.shell.Genrule;
import com.facebook.buck.shell.GenruleBuilder;
import com.google.common.base.Optional;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import org.junit.Test;
import java.nio.file.Path;
import java.nio.file.Paths;
public class CxxLibraryDescriptionTest {
private static FakeBuildRule createFakeBuildRule(
String target,
BuildRule... deps) {
return new FakeBuildRule(
new FakeBuildRuleParamsBuilder(BuildTargetFactory.newInstance(target))
.setDeps(ImmutableSortedSet.copyOf(deps))
.build());
}
@Test
@SuppressWarnings("PMD.UseAssertTrueInsteadOfAssertEquals")
public void createBuildRule() {
BuildRuleResolver resolver = new BuildRuleResolver();
// Setup a genrule the generates a header we'll list.
String genHeaderName = "test/foo.h";
BuildTarget genHeaderTarget = BuildTargetFactory.newInstance("//:genHeader");
Genrule genHeader = (Genrule) GenruleBuilder
.newGenruleBuilder(genHeaderTarget)
.setOut(genHeaderName)
.build(resolver);
// Setup a genrule the generates a source we'll list.
String genSourceName = "test/foo.cpp";
BuildTarget genSourceTarget = BuildTargetFactory.newInstance("//:genSource");
Genrule genSource = (Genrule) GenruleBuilder
.newGenruleBuilder(genSourceTarget)
.setOut(genSourceName)
.build(resolver);
// Setup a C/C++ library that we'll depend on form the C/C++ binary description.
final BuildRule header = createFakeBuildRule("//:header");
final BuildRule headerSymlinkTree = createFakeBuildRule("//:symlink");
final Path headerSymlinkTreeRoot = Paths.get("symlink/tree/root");
final BuildRule archive = createFakeBuildRule("//:archive");
final Path archiveOutput = Paths.get("output/path/lib.a");
BuildTarget depTarget = BuildTargetFactory.newInstance("//:dep");
BuildRuleParams depParams = BuildRuleParamsFactory.createTrivialBuildRuleParams(depTarget);
CxxLibrary dep = new CxxLibrary(depParams) {
@Override
public CxxPreprocessorInput getCxxPreprocessorInput() {
return new CxxPreprocessorInput(
ImmutableSet.of(
header.getBuildTarget(),
headerSymlinkTree.getBuildTarget()),
ImmutableList.<String>of(),
ImmutableList.<String>of(),
ImmutableMap.<Path, SourcePath>of(),
ImmutableList.of(headerSymlinkTreeRoot),
ImmutableList.<Path>of());
}
@Override
public NativeLinkableInput getNativeLinkableInput(Type type) {
return new NativeLinkableInput(
ImmutableList.<SourcePath>of(new BuildRuleSourcePath(archive)),
ImmutableList.of(archiveOutput.toString()));
}
@Override
public PythonPackageComponents getPythonPackageComponents() {
return new PythonPackageComponents(
ImmutableMap.<Path, SourcePath>of(),
ImmutableMap.<Path, SourcePath>of(),
ImmutableMap.<Path, SourcePath>of());
}
};
resolver.addAllToIndex(ImmutableList.of(header, headerSymlinkTree, archive));
// Setup the build params we'll pass to description when generating the build rules.
BuildTarget target = BuildTargetFactory.newInstance("//:rule");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target)
.setDeps(ImmutableSortedSet.<BuildRule>of(dep))
.build();
// Create the description arg.
CxxLibraryDescription.Arg arg = new CxxLibraryDescription.Arg();
arg.deps = Optional.of(ImmutableSortedSet.of(dep.getBuildTarget()));
arg.srcs = Optional.of(ImmutableList.<SourcePath>of(
new TestSourcePath("test/bar.cpp"),
new BuildRuleSourcePath(genSource)));
String headerName = "test/bar.h";
arg.headers = Optional.of(ImmutableList.<SourcePath>of(
new TestSourcePath(headerName),
new BuildRuleSourcePath(genHeader)));
arg.compilerFlags = Optional.absent();
arg.propagatedPpFlags = Optional.absent();
arg.preprocessorFlags = Optional.absent();
arg.linkWhole = Optional.absent();
arg.lexSrcs = Optional.absent();
arg.yaccSrcs = Optional.absent();
// Instantiate a description and call its `createBuildRule` method.
DefaultCxxPlatform cxxBuckConfig = new DefaultCxxPlatform(new FakeBuckConfig());
CxxLibraryDescription description = new CxxLibraryDescription(cxxBuckConfig);
CxxLibrary rule = description.createBuildRule(params, resolver, arg);
assertEquals(
new CxxPreprocessorInput(
ImmutableSet.of(
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(target)),
ImmutableList.<String>of(),
ImmutableList.<String>of(),
ImmutableMap.<Path, SourcePath>of(
Paths.get(headerName), new TestSourcePath(headerName),
Paths.get(genHeaderName), new BuildRuleSourcePath(genHeader)),
ImmutableList.of(
CxxDescriptionEnhancer.getHeaderSymlinkTreePath(target)),
ImmutableList.<Path>of()),
rule.getCxxPreprocessorInput());
// Verify that the archive rule has the correct deps: the object files from our sources.
BuildRule archiveRule = resolver.getRule(
CxxDescriptionEnhancer.createStaticLibraryBuildTarget(target));
assertNotNull(archiveRule);
assertEquals(
ImmutableSet.of(
CxxCompilableEnhancer.createCompileBuildTarget(
target,
"test/bar.cpp",
/* pic */ false),
CxxCompilableEnhancer.createCompileBuildTarget(
target,
genSourceName,
/* pic */ false)),
FluentIterable.from(archiveRule.getDeps())
.transform(HasBuildTarget.TO_TARGET)
.toSet());
// Verify that the compile rule for our user-provided source has correct deps setup
// for the various header rules.
BuildRule compileRule1 = resolver.getRule(
CxxCompilableEnhancer.createCompileBuildTarget(
target,
"test/bar.cpp",
/* pic */ false));
assertNotNull(compileRule1);
assertEquals(
ImmutableSet.of(
genHeaderTarget,
headerSymlinkTree.getBuildTarget(),
header.getBuildTarget(),
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(target)),
FluentIterable.from(compileRule1.getDeps())
.transform(HasBuildTarget.TO_TARGET)
.toSet());
// Verify that the compile rule for our genrule-generated source has correct deps setup
// for the various header rules and the generating genrule.
BuildRule compileRule2 = resolver.getRule(
CxxCompilableEnhancer.createCompileBuildTarget(
target,
genSourceName,
/* pic */ false));
assertNotNull(compileRule2);
assertEquals(
ImmutableSet.of(
genHeaderTarget,
genSourceTarget,
headerSymlinkTree.getBuildTarget(),
header.getBuildTarget(),
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(target)),
FluentIterable.from(compileRule2.getDeps())
.transform(HasBuildTarget.TO_TARGET)
.toSet());
}
}
| apache-2.0 |
nus-ncl/service-web | src/main/java/sg/ncl/testbed_interface/StatefulExperiment.java | 1449 | package sg.ncl.testbed_interface;
import lombok.Getter;
import lombok.Setter;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.HashMap;
import java.util.Map;
/**
* Created by dcszwang on 11/14/2017.
*/
@Getter
@Setter
public class StatefulExperiment {
private String teamId;
private String teamName;
private Long id;
private String name;
private String userId;
private String description;
private ZonedDateTime createdDate;
private ZonedDateTime lastModifiedDate;
private String state;
private int nodes;
private int minNodes;
private int maxDuration;
private Long idleHours;
private Map<String, Map<String, String>> nodesInfoMap;
private int platform;
private String stack_id;
public StatefulExperiment() {
nodes = 0;
maxDuration = 0;
minNodes = 0;
idleHours = 0L;
nodesInfoMap = new HashMap<>();
platform = 0;
}
public void addNodeInfo(String nodeName, Map<String, String> nodeInfo) {
nodesInfoMap.put(nodeName, nodeInfo);
}
public void setCreatedDate(Long epoch) {
this.createdDate = ZonedDateTime.ofInstant(Instant.ofEpochSecond(epoch), ZoneId.of("GMT+08:00"));
}
public void setLastModifiedDate(Long epoch) {
this.lastModifiedDate = ZonedDateTime.ofInstant(Instant.ofEpochSecond(epoch), ZoneId.of("GMT+08:00"));
}
}
| apache-2.0 |
FearTheDust/Worm_Part3 | src-provided/worms/gui/ErrorScreen.java | 1072 | package worms.gui;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.event.KeyEvent;
import java.util.StringTokenizer;
public class ErrorScreen extends Screen {
private final String message;
public ErrorScreen(WormsGUI gui, String message) {
super(gui);
this.message = message;
}
@Override
public void screenStarted() {
}
@Override
protected InputMode<ErrorScreen> createDefaultInputMode() {
return new InputMode<ErrorScreen>(this, null) {
@Override
public void keyReleased(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) {
getGUI().exit();
}
}
};
}
@Override
protected void paintScreen(Graphics2D g) {
g.setColor(Color.RED);
GUIUtils.drawCenteredString((Graphics2D) g, "An error has occurred",
getScreenWidth(), 20);
StringTokenizer tok = new StringTokenizer(message, "\n");
int y = 50;
while (tok.hasMoreElements()) {
String line = tok.nextToken();
GUIUtils.drawCenteredString((Graphics2D) g, line, getScreenWidth(),
y);
y += (g.getFont().getSize() * 7) / 5;
}
}
}
| apache-2.0 |
JobHive/saki-monkey | src/main/java/com/jobhive/sakimonkey/data/response/DedicatedIpPool.java | 1110 | package com.jobhive.sakimonkey.data.response;
import java.util.Date;
/**
*
* @author Hussachai
*
*/
public class DedicatedIpPool {
/**
* this pool's name
*/
private String name;
/**
* the date and time that this pool was created as
* a UTC timestamp in YYYY-MM-DD HH:MM:SS format
*/
private Date createdAt;
/**
* the dedicated IPs in this pool
*/
private DedicatedIp[] ips;
public String getName() {
return name;
}
public Date getCreatedAt() {
return createdAt;
}
public DedicatedIp[] getIps() {
return ips;
}
public static class PoolDeleteStatus {
/**
* the ip address
*/
private String pool;
/**
* whether the pool was deleted
*/
private Boolean deleted;
public String getPool() {
return pool;
}
public Boolean getDeleted() {
return deleted;
}
}
}
| apache-2.0 |
arshvin/scripts | hdfs_log_cleaner/src/test/java/prived/medved/utils/SimpleDocsCandidatesFinderImplTest.java | 7271 | package prived.medved.utils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.embedded.JettyConfig;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.impl.CloudSolrServer;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.cloud.AbstractZkTestCase;
import org.apache.solr.cloud.ZkTestServer;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.params.CollectionParams;
import org.apache.solr.common.params.CoreAdminParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.zookeeper.CreateMode;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.junit.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.stream.Collectors;
public class SimpleDocsCandidatesFinderImplTest {
private static String testCollection = "TestCollection";
private static String searchField = "event_time";
private static String dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
private static Logger log = LoggerFactory.getLogger(SimpleDocsCandidatesFinderImpl.class);
private static final String hostContext = "/sorl";
private static File zkDir;
private static File solrHome;
private static ZkTestServer zkTestServer;
private static JettySolrRunner jetty;
@AfterClass
public static void down() throws Exception {
jetty.stop();
zkTestServer.shutdown();
FileUtils.deleteDirectory(solrHome);
FileUtils.deleteDirectory(zkDir);
}
@BeforeClass
public static void setup() throws Exception {
/** We will not use the MiniCloudSolrCluster overall because it does not provide setup jetty
* in the socket. However we will use it for parts
*/
zkDir = Files.createTempDirectory("zkDir").toFile();
solrHome = Files.createTempDirectory("solrHome").toFile();
zkDir.deleteOnExit();
solrHome.deleteOnExit();
log.debug("Creating zkDir in '{}'", zkDir);
log.debug("Creating solrHome in '{}'", solrHome);
InputStream is = new FileInputStream(new File(SimpleDocsCandidatesFinderImplTest.class.getClassLoader().getResource("solr.xml").toURI()));
String solrXml = IOUtils.readLines(is).stream().collect(Collectors.joining());
zkTestServer = new ZkTestServer(zkDir.getAbsolutePath(),2181);
zkTestServer.run();
SolrZkClient zkClient = new SolrZkClient(zkTestServer.getZkHost(),
AbstractZkTestCase.TIMEOUT, 45000, null);
zkClient.makePath("/solr", false, true);
zkClient.create("/solr/solr.xml", solrXml.getBytes(), CreateMode.PERSISTENT, true);
log.debug("ZkTestServer was run");
JettyConfig jettyConfig = JettyConfig.builder()
.setContext("/solr")
.setPort(8989)
.build();
// tell solr to look in zookeeper for solr.xml
System.setProperty("solr.solrxml.location","zookeeper");
System.setProperty("zkHost", zkTestServer.getZkAddress());
// miniSolrCloudCluster = new MiniSolrCloudCluster(1, solrHome.toPath(), solrXml, jettyConfig, zkTestServer );
jetty = new JettySolrRunner(
solrHome.getAbsolutePath(),
jettyConfig.context,
jettyConfig.port);
jetty.start();
log.debug("Solr minicluster started");
}
@Before
public void prepareFixture() throws IOException, SolrServerException, URISyntaxException {
createTestCollection();
}
@After
public void clearFixture() throws IOException, SolrServerException {
deleteTestCollection();
}
static void createTestCollection() throws IOException, SolrServerException, URISyntaxException {
CloudSolrServer solrClient = new CloudSolrServer(zkTestServer.getZkAddress());
Path collectionConfig = Paths.get(SimpleDocsCandidatesFinderImplTest.class.getClassLoader().getResource("test-collection-config").toURI());
solrClient.uploadConfig(collectionConfig,testCollection);
log.debug("Uploaded config files of '{}' collecton to Zk", collectionConfig);
final ModifiableSolrParams params = new ModifiableSolrParams();
params.set(CoreAdminParams.ACTION, CollectionParams.CollectionAction.CREATE.name());
params.set(CoreAdminParams.NAME, testCollection);
params.set("numShards", 1);
params.set("replicationFactor", 1);
params.set("collection.configName", testCollection);
SolrRequest(params);
log.debug("The collection {} was created", testCollection);
solrClient.setDefaultCollection(testCollection);
}
private static void SolrRequest(ModifiableSolrParams params) throws IOException, SolrServerException {
CloudSolrServer solrClient = new CloudSolrServer(zkTestServer.getZkAddress());
final QueryRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
solrClient.request(request);
}
static void deleteTestCollection() throws IOException, SolrServerException {
final ModifiableSolrParams params = new ModifiableSolrParams();
params.set(CoreAdminParams.ACTION, CollectionParams.CollectionAction.DELETE.name());
params.set(CoreAdminParams.NAME, testCollection);
SolrRequest(params);
}
@Test
public void testFindingdDocsAbility() throws IOException, SolrServerException {
CloudSolrServer solrClient = new CloudSolrServer(zkTestServer.getZkAddress());
solrClient.setDefaultCollection(testCollection);
for (int i=0; i<10; i++){
SolrInputDocument document = new SolrInputDocument();
document.addField("id", i);
document.addField("msg", "some log message");
document.addField("log_level", "some log message");
document.addField(
searchField,
DateTimeFormat.forPattern(dateFormat).print(new DateTime().minusDays(i)));
solrClient.add(document);
}
solrClient.commit();
DocsCandidatesFinder finder = new SimpleDocsCandidatesFinderImpl(
solrClient,
testCollection,
searchField,
dateFormat);
SolrDocumentList docs = finder.find(3);
Assert.assertEquals(7, docs.getNumFound());
List ids = docs.stream().map(o -> o.get("id")).collect(Collectors.toList());
solrClient.deleteById(ids);
solrClient.commit();
docs = solrClient.query(new SolrQuery("*")).getResults();
Assert.assertEquals(3, docs.getNumFound());
}
}
| apache-2.0 |
Sable/mclab-core | languages/Natlab/src/natlab/tame/tamerplus/utils/IntOk.java | 758 | package natlab.tame.tamerplus.utils;
import java.util.ArrayList;
public class IntOk {
private Boolean isInt;
private Boolean depends;
private ArrayList<String> dependsOn;
public IntOk(Boolean isint, Boolean depends, ArrayList<String> dependson){
this.isInt = isint;
this.depends = depends;
this.dependsOn = dependson;
}
public void setIsInt(Boolean isint){
this.isInt = isint;
}
public void setDepends (Boolean depends){
this.depends = depends;
}
public void setDependsOn (ArrayList<String> dependson){
this.dependsOn = dependson;
}
public Boolean getIsInt(){
return this.isInt;
}
public Boolean getDepends (){
return this.depends;
}
public ArrayList<String> getDependsOn (){
return this.dependsOn;
}
}
| apache-2.0 |
OpenUniversity/ovirt-engine | backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/network/host/NetworkDeviceHelperImpl.java | 14248 | package org.ovirt.engine.core.bll.network.host;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.commons.lang.StringUtils;
import org.ovirt.engine.core.common.FeatureSupported;
import org.ovirt.engine.core.common.businessentities.Entities;
import org.ovirt.engine.core.common.businessentities.HostDevice;
import org.ovirt.engine.core.common.businessentities.VDS;
import org.ovirt.engine.core.common.businessentities.network.HostNicVfsConfig;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkInterface;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.dao.HostDeviceDao;
import org.ovirt.engine.core.dao.VdsDao;
import org.ovirt.engine.core.dao.network.HostNicVfsConfigDao;
import org.ovirt.engine.core.dao.network.InterfaceDao;
import org.ovirt.engine.core.utils.NetworkUtils;
@Singleton
class NetworkDeviceHelperImpl implements NetworkDeviceHelper {
private final InterfaceDao interfaceDao;
private final HostDeviceDao hostDeviceDao;
private final HostNicVfsConfigDao hostNicVfsConfigDao;
private final VdsDao vdsDao;
@Inject
NetworkDeviceHelperImpl(InterfaceDao interfaceDao,
HostDeviceDao hostDeviceDao,
HostNicVfsConfigDao hostNicVfsConfigDao,
VdsDao vdsDao) {
Objects.requireNonNull(interfaceDao, "interfaceDao cannot be null");
Objects.requireNonNull(hostDeviceDao, "hostDeviceDao cannot be null");
Objects.requireNonNull(hostNicVfsConfigDao, "hostNicVfsConfigDao cannot be null");
Objects.requireNonNull(vdsDao, "vdsDao cannot be null");
this.interfaceDao = interfaceDao;
this.hostDeviceDao = hostDeviceDao;
this.hostNicVfsConfigDao = hostNicVfsConfigDao;
this.vdsDao = vdsDao;
}
@Override
public VdsNetworkInterface getNicByPciDevice(final HostDevice pciDevice) {
return getNicByPciDevice(pciDevice, null);
}
@Override
public VdsNetworkInterface getNicByPciDevice(final HostDevice pciDevice, final Collection<HostDevice> devices) {
return getNicByPciDevice(pciDevice, devices, null);
}
private VdsNetworkInterface getNicByPciDevice(final HostDevice pciDevice,
final Collection<HostDevice> devices,
final Collection<VdsNetworkInterface> hostNics) {
final HostDevice netDevice = getFirstChildDevice(pciDevice, devices);
if (netDevice == null || !isNetworkDevice(netDevice)) {
return null;
}
final Collection<VdsNetworkInterface> hostInterfaces =
hostNics == null ? interfaceDao.getAllInterfacesForVds(netDevice.getHostId()) : hostNics;
return hostInterfaces.stream().filter(iface -> iface.getName().equals(netDevice.getNetworkInterfaceName()))
.findFirst().orElse(null);
}
private HostDevice getFirstChildDevice(final HostDevice pciDevice, final Collection<HostDevice> devices) {
Collection<HostDevice> hostDevices = devices == null ? getDevicesByHostId(pciDevice.getHostId()) : devices;
return hostDevices.stream().filter(device -> pciDevice.getDeviceName().equals(device.getParentDeviceName()))
.findFirst().orElse(null);
}
@Override
public boolean isSriovDevice(HostDevice device) {
return device.getTotalVirtualFunctions() != null;
}
@Override
public boolean isNetworkDevice(HostDevice device) {
return device.getNetworkInterfaceName() != null;
}
@Override
public void updateHostNicVfsConfigWithNumVfsData(HostNicVfsConfig hostNicVfsConfig) {
VdsNetworkInterface nic = getNicById(hostNicVfsConfig.getNicId());
updateVfsConfigWithNumOfVfsData(hostNicVfsConfig,
nic,
getDevicesByHostId(nic.getVdsId()));
}
@Override
public List<HostNicVfsConfig> getHostNicVfsConfigsWithNumVfsDataByHostId(Guid hostId) {
List<HostNicVfsConfig> hostNicVfsConfigList = hostNicVfsConfigDao.getAllVfsConfigByHostId(hostId);
List<HostDevice> deviceList = getDevicesByHostId(hostId);
for (HostNicVfsConfig hostNicVfsConfig : hostNicVfsConfigList) {
updateVfsConfigWithNumOfVfsData(hostNicVfsConfig, null, deviceList);
}
return hostNicVfsConfigList;
}
private void updateVfsConfigWithNumOfVfsData(HostNicVfsConfig hostNicVfsConfig,
VdsNetworkInterface nic,
List<HostDevice> deviceList) {
if (nic == null) {
nic = getNicById(hostNicVfsConfig.getNicId());
}
HostDevice pciDevice = getPciDeviceByNic(nic, deviceList);
hostNicVfsConfig.setMaxNumOfVfs(getMaxNumOfVfs(pciDevice));
hostNicVfsConfig.setNumOfVfs(getNumOfVfs(pciDevice, deviceList));
}
private HostDevice getPciDeviceByNic(final VdsNetworkInterface nic, List<HostDevice> deviceList) {
return getPciDeviceByNic(nic, deviceList, Entities.entitiesByName(deviceList));
}
private HostDevice getPciDeviceByNic(final VdsNetworkInterface nic,
List<HostDevice> deviceList,
Map<String, HostDevice> devicesByName) {
final String nicName = nic.getName();
final HostDevice netDevice = deviceList.stream()
.filter(device -> nicName.equals(device.getNetworkInterfaceName())).findFirst().orElse(null);
Objects.requireNonNull(netDevice,
String.format("Host \"%s\": nic \"%s\" doesn't have a net device", nic.getVdsName(), nicName));
final String parentDeviceName = netDevice.getParentDeviceName();
final HostDevice pciDevice = devicesByName.get(parentDeviceName);
Objects.requireNonNull(pciDevice,
String.format("Host \"%s\": net device \"%s\" doesn't have a parent pci device \"%s\"",
nic.getVdsName(),
netDevice.getName(),
parentDeviceName));
return pciDevice;
}
private int getMaxNumOfVfs(HostDevice pciDevice) {
return pciDevice.getTotalVirtualFunctions();
}
private int getNumOfVfs(HostDevice pciDevice, List<HostDevice> deviceList) {
List<HostDevice> vfs = getVfs(pciDevice, deviceList);
return vfs.size();
}
private VdsNetworkInterface getNicById(Guid nicId) {
return interfaceDao.get(nicId);
}
private List<HostDevice> getDevicesByHostId(Guid hostId) {
return hostDeviceDao.getHostDevicesByHostId(hostId);
}
private List<HostDevice> getVfs(final HostDevice pciDevice, List<HostDevice> deviceList) {
return deviceList.stream()
.filter(device -> pciDevice.getDeviceName().equals(device.getParentPhysicalFunction()))
.collect(Collectors.toList());
}
@Override
public boolean areAllVfsFree(VdsNetworkInterface nic) {
HostDevice nonFreeVf = getVf(nic, false);
return nonFreeVf == null;
}
@Override
public boolean isDeviceNetworkFree(HostDevice hostDevice) {
HostDevice firstChild = getFirstChildDevice(hostDevice, null);
if (firstChild == null || !isNetworkDevice(firstChild)) {
return true;
}
return isNetworkDeviceFree(hostDevice);
}
private boolean isVfFree(HostDevice vf) {
// Check if the VF is attached directly to a VM
if (vf.getVmId() != null) {
return false;
}
return isNetworkDeviceFree(vf);
}
private boolean isNetworkDeviceFree(HostDevice pciDevice) {
// Check that there is no macvtap device on top of the VM-
// nics with macvtap attached are not reported via the getVdsCaps
VdsNetworkInterface vfNic = getNicByPciDevice(pciDevice);
return vfNic != null && !isNetworkAttached(vfNic) && !isVlanDeviceAttached(vfNic) && !vfNic.isPartOfBond();
}
@Override
public HostDevice getFreeVf(VdsNetworkInterface nic, List<String> excludeVfs) {
return getVf(nic, true, excludeVfs);
}
private HostDevice getVf(VdsNetworkInterface nic, final boolean shouldBeFree, final List<String> excludeVfs) {
List<HostDevice> deviceList = getDevicesByHostId(nic.getVdsId());
HostDevice pciDevice = getPciDeviceByNic(nic, deviceList);
if (pciDevice == null) {
throw new NullPointerException("nic doesn't have a pci device");
}
if (!isSriovDevice(pciDevice)) {
throw new UnsupportedOperationException("'getVf' method should be called only for 'sriov' nics");
}
List<HostDevice> vfs = getVfs(pciDevice, deviceList);
return vfs.stream()
.filter(vf -> isVfFree(vf) == shouldBeFree && (excludeVfs == null || !excludeVfs.contains(vf.getDeviceName())))
.findFirst()
.orElse(null);
}
private HostDevice getVf(VdsNetworkInterface nic, final boolean shouldBeFree) {
return getVf(nic, shouldBeFree, null);
}
private boolean isNetworkAttached(VdsNetworkInterface vfNic) {
return vfNic.getNetworkName() != null;
}
boolean isVlanDeviceAttached(VdsNetworkInterface vfNic) {
return NetworkUtils.interfaceHasVlan(vfNic, interfaceDao.getAllInterfacesForVds(vfNic.getVdsId()));
}
@Override
public String getPciDeviceNameByNic(VdsNetworkInterface nic) {
return getPciDeviceByNic(nic, getDevicesByHostId(nic.getVdsId())).getDeviceName();
}
@Override
public void setVmIdOnVfs(Guid hostId, Guid vmId, final Set<String> vfsNames) {
List<HostDevice> hostDevices = hostDeviceDao.getHostDevicesByHostId(hostId);
List<HostDevice> vfs = hostDevices.stream()
.filter(device -> vfsNames.contains(device.getDeviceName()) && isVf(device))
.collect(Collectors.toList());
if (vmId != null) {
HostDevice alreadyTakenVf = vfs.stream().filter(vf -> vf.getVmId() != null).findFirst().orElse(null);
if (alreadyTakenVf != null) {
throw new IllegalStateException(
String.format("VF %s is already taken by VM %s",
alreadyTakenVf.getName(),
alreadyTakenVf.getVmId()));
}
}
setVmIdOnVfsDevices(vmId, new HashSet<>(vfs));
}
private void setVmIdOnVfsDevices(Guid vmId, Set<HostDevice> vfs) {
for (HostDevice vf : vfs) {
hostDeviceDao.setVmIdOnHostDevice(vf.getId(), vmId);
}
}
@Override
public Guid removeVmIdFromVfs(final Guid vmId) {
List<HostDevice> hostDevices = hostDeviceDao.getAll();
List<HostDevice> vfsUsedByVm = hostDevices.stream()
.filter(device -> vmId.equals(device.getVmId()) && isVf(device)).collect(Collectors.toList());
Guid hostId = vfsUsedByVm.isEmpty() ? null : vfsUsedByVm.get(0).getHostId();
if (hostId != null) {
setVmIdOnVfsDevices(null, new HashSet<>(vfsUsedByVm));
}
return hostId;
}
@Override
public Map<Guid, Guid> getVfMap(final Guid hostId) {
final VDS host = vdsDao.get(hostId);
if (!FeatureSupported.sriov(host.getClusterCompatibilityVersion())) {
return Collections.emptyMap();
}
final List<VdsNetworkInterface> hostNics = interfaceDao.getAllInterfacesForVds(hostId);
final List<HostDevice> hostDevices = hostDeviceDao.getHostDevicesByHostId(hostId);
final Map<String, HostDevice> hostDevicesByName = Entities.entitiesByName(hostDevices);
return hostNics.stream()
.filter(new VfNicPredicate(hostDevices, hostDevicesByName))
.collect(Collectors.toMap(VdsNetworkInterface::getId,
new VfNicToPfNicMapper(hostDevices, hostDevicesByName, hostNics)));
}
private boolean isVf(HostDevice device) {
return StringUtils.isNotBlank(device.getParentPhysicalFunction());
}
private class VfNicToPfNicMapper implements Function<VdsNetworkInterface, Guid> {
private final List<HostDevice> hostDevices;
private final Map<String, HostDevice> hostDevicesByName;
private final List<VdsNetworkInterface> hostNics;
public VfNicToPfNicMapper(List<HostDevice> hostDevices,
Map<String, HostDevice> hostDevicesByName,
List<VdsNetworkInterface> hostNics) {
this.hostDevices = hostDevices;
this.hostDevicesByName = hostDevicesByName;
this.hostNics = hostNics;
}
@Override
public Guid apply(VdsNetworkInterface nic) {
final HostDevice vfPciDevice =
getPciDeviceByNic(nic, hostDevices, hostDevicesByName);
final HostDevice pfPciDevice = hostDevicesByName.get(vfPciDevice.getParentPhysicalFunction());
final VdsNetworkInterface pfNic = getNicByPciDevice(pfPciDevice, hostDevices, hostNics);
return pfNic == null ? null : pfNic.getId();
}
}
private class VfNicPredicate implements Predicate<VdsNetworkInterface> {
private final List<HostDevice> hostDevices;
private final Map<String, HostDevice> hostDevicesByName;
public VfNicPredicate(List<HostDevice> hostDevices, Map<String, HostDevice> hostDevicesByName) {
this.hostDevices = hostDevices;
this.hostDevicesByName = hostDevicesByName;
}
@Override
public boolean test(VdsNetworkInterface nic) {
if (nic.isBond() || NetworkUtils.isVlan(nic)) {
return false;
}
try {
final HostDevice nicPciDevice =
getPciDeviceByNic(nic, hostDevices, hostDevicesByName);
return isVf(nicPciDevice);
} catch (Exception e) {
return false;
}
}
}
}
| apache-2.0 |
weiwenqiang/GitHub | SelectWidget/glide-master/library/src/main/java/com/bumptech/glide/request/target/DrawableThumbnailImageViewTarget.java | 495 | package com.bumptech.glide.request.target;
import android.graphics.drawable.Drawable;
import android.widget.ImageView;
/**
* Efficiently displays multiple Drawables loaded serially into a single {@link android.view.View}.
*/
public class DrawableThumbnailImageViewTarget extends ThumbnailImageViewTarget<Drawable> {
public DrawableThumbnailImageViewTarget(ImageView view) {
super(view);
}
@Override
protected Drawable getDrawable(Drawable resource) {
return resource;
}
}
| apache-2.0 |
ecarm002/incubator-asterixdb | asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableIntermediateSqlSumAggregateDescriptor.java | 2571 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.runtime.aggregates.serializable.std;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.asterix.runtime.aggregates.base.AbstractSerializableAggregateFunctionDynamicDescriptor;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluator;
import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluatorFactory;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public class SerializableIntermediateSqlSumAggregateDescriptor
extends AbstractSerializableAggregateFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = SerializableIntermediateSqlSumAggregateDescriptor::new;
@Override
public FunctionIdentifier getIdentifier() {
return BuiltinFunctions.SERIAL_INTERMEDIATE_SQL_SUM;
}
@Override
public ISerializedAggregateEvaluatorFactory createSerializableAggregateEvaluatorFactory(
final IScalarEvaluatorFactory[] args) {
return new ISerializedAggregateEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public ISerializedAggregateEvaluator createAggregateEvaluator(IHyracksTaskContext ctx)
throws HyracksDataException {
return new SerializableIntermediateSqlSumAggregateFunction(args, ctx, sourceLoc);
}
};
}
}
| apache-2.0 |
LeFullStack/FullStack | src/main/java/me/zbl/fullstack/consts/SessionConstants.java | 294 | package me.zbl.fullstack.consts;
/**
* 关于 Sesion 的全局常量
*
* @author James
*/
public class SessionConstants {
public static final String SESSION_CURRENT_USER = "session_current_user";
public static final String SESSION_ADMIN_CURRENT_USER = "session_current_admin_user";
}
| apache-2.0 |
blackducksoftware/hub-detect | hub-detect/src/main/groovy/com/blackducksoftware/integration/hub/detect/workflow/file/DetectFileUtils.java | 2017 | /**
* hub-detect
*
* Copyright (C) 2019 Black Duck Software, Inc.
* http://www.blackducksoftware.com/
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.blackducksoftware.integration.hub.detect.workflow.file;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DetectFileUtils {
private final static Logger logger = LoggerFactory.getLogger(DetectFileUtils.class);
public static File writeToFile(final File file, final String contents) throws IOException {
return writeToFile(file, contents, true);
}
private static File writeToFile(final File file, final String contents, final boolean overwrite) throws IOException {
if (file == null) {
return null;
}
if (overwrite && file.exists()) {
file.delete();
}
if (file.exists()) {
logger.info(String.format("%s exists and not being overwritten", file.getAbsolutePath()));
} else {
FileUtils.write(file, contents, StandardCharsets.UTF_8);
}
return file;
}
}
| apache-2.0 |
dbracewell/mango | src/main/java/com/davidbracewell/function/SerializableDoubleUnaryOperator.java | 1178 | /*
* (c) 2005 David B. Bracewell
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.davidbracewell.function;
import java.io.Serializable;
import java.util.function.DoubleUnaryOperator;
/**
* Version of DoubleUnaryOperator that is serializable
*/
@FunctionalInterface
public interface SerializableDoubleUnaryOperator extends DoubleUnaryOperator, Serializable {
}//END OF SerializableDoubleUnaryOperator
| apache-2.0 |
signed/intellij-community | python/testSrc/com/jetbrains/env/ut/PyUnitTestProcessRunner.java | 2665 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.env.ut;
import com.jetbrains.env.ProcessWithConsoleRunner;
import com.jetbrains.python.sdk.flavors.CPythonSdkFlavor;
import com.jetbrains.python.sdk.flavors.PythonSdkFlavor;
import com.jetbrains.python.testing.universalTests.PyUniversalUnitTestConfiguration;
import com.jetbrains.python.testing.universalTests.PyUniversalUnitTestFactory;
import com.jetbrains.python.testing.universalTests.TestTargetType;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
/**
* {@link ProcessWithConsoleRunner} to run unittest
*
* @author Ilya.Kazakevich
*/
public class PyUnitTestProcessRunner extends PyScriptTestProcessRunner<PyUniversalUnitTestConfiguration> {
/**
* Prefix to use test pattern. See {@link #TEST_PATTERN_PREFIX} doc because it is similar
*/
public static final String TEST_PATTERN_PREFIX = "pattern:";
public PyUnitTestProcessRunner(@NotNull final String scriptName, final int timesToRerunFailedTests) {
super(PyUniversalUnitTestFactory.INSTANCE,
PyUniversalUnitTestConfiguration.class, scriptName, timesToRerunFailedTests);
}
@Override
protected void configurationCreatedAndWillLaunch(@NotNull PyUniversalUnitTestConfiguration configuration) throws IOException {
super.configurationCreatedAndWillLaunch(configuration);
if (PythonSdkFlavor.getFlavor(configuration.getSdk()) instanceof CPythonSdkFlavor) {
// -Werror checks we do not use deprecated API in runners, but only works for cpython (not iron nor jython)
// and we can't use it for pytest/nose, since it is not our responsibility to check them for deprecation api usage
// while unit is part of stdlib and does not use deprecated api, so only runners are checked
configuration.setInterpreterOptions("-Werror");
}
if (myScriptName.startsWith(TEST_PATTERN_PREFIX)) {
configuration.getTarget().setTargetType(TestTargetType.PATH);
configuration.getTarget().setTarget(".");
configuration.setPattern(myScriptName.substring(TEST_PATTERN_PREFIX.length()));
}
}
}
| apache-2.0 |
LiuJiJiJi/Spring-Boot-example | java8/src/main/java/www/liujijiji/cn/java8/OptionTest.java | 1163 | package www.liujijiji.cn.java8;
import java.util.Optional;
/**
* @author liuji
* @date 2018 - 09 - 26 21:15
*/
public class OptionTest {
public static void main(String args[]){
OptionTest java8Tester = new OptionTest();
Integer value1 = null;
Integer value2 = new Integer(10);
// Optional.ofNullable - 允许传递为 null 参数
Optional<Integer> a = Optional.ofNullable(value1);
// Optional.of - 如果传递的参数是 null,抛出异常 NullPointerException
Optional<Integer> b = Optional.of(value2);
System.out.println(java8Tester.sum(a,b));
}
public Integer sum(Optional<Integer> a, Optional<Integer> b){
// Optional.isPresent - 判断值是否存在
System.out.println("第一个参数值存在: " + a.isPresent());
System.out.println("第二个参数值存在: " + b.isPresent());
// Optional.orElse - 如果值存在,返回它,否则返回默认值
Integer value1 = a.orElse(new Integer(0));
//Optional.get - 获取值,值需要存在
Integer value2 = b.get();
return value1 + value2;
}
}
| apache-2.0 |
Gianotti/dashboard | src/main/java/eu/hansolo/tilesfx/Tile.java | 194994 | /*
* Copyright (c) 2016 by Gerrit Grunwald
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.hansolo.tilesfx;
import eu.hansolo.tilesfx.chart.RadarChart;
import eu.hansolo.tilesfx.chart.RadarChart.Mode;
import eu.hansolo.tilesfx.events.AlarmEvent;
import eu.hansolo.tilesfx.events.AlarmEventListener;
import eu.hansolo.tilesfx.events.SwitchEvent;
import eu.hansolo.tilesfx.events.TileEvent;
import eu.hansolo.tilesfx.events.TileEvent.EventType;
import eu.hansolo.tilesfx.events.TileEventListener;
import eu.hansolo.tilesfx.events.TimeEvent;
import eu.hansolo.tilesfx.events.TimeEvent.TimeEventType;
import eu.hansolo.tilesfx.events.TimeEventListener;
import eu.hansolo.tilesfx.fonts.Fonts;
import eu.hansolo.tilesfx.skins.*;
import eu.hansolo.tilesfx.chart.ChartData;
import eu.hansolo.tilesfx.tools.TimeData;
import eu.hansolo.tilesfx.tools.Helper;
import eu.hansolo.tilesfx.tools.Location;
import eu.hansolo.tilesfx.tools.MovingAverage;
import eu.hansolo.tilesfx.tools.SectionComparator;
import eu.hansolo.tilesfx.tools.TimeSectionComparator;
import eu.hansolo.tilesfx.weather.DarkSky;
import javafx.animation.Animation.Status;
import javafx.animation.Interpolator;
import javafx.animation.KeyFrame;
import javafx.animation.KeyValue;
import javafx.animation.Timeline;
import javafx.application.Platform;
import javafx.beans.property.*;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.EventHandler;
import javafx.geometry.NodeOrientation;
import javafx.geometry.Orientation;
import javafx.geometry.Pos;
import javafx.scene.Node;
import javafx.scene.chart.Axis;
import javafx.scene.chart.CategoryAxis;
import javafx.scene.chart.NumberAxis;
import javafx.scene.chart.XYChart.Series;
import javafx.scene.control.Control;
import javafx.scene.control.Skin;
import javafx.scene.control.Tooltip;
import javafx.scene.paint.Color;
import javafx.scene.paint.Stop;
import javafx.scene.text.Font;
import javafx.scene.text.TextAlignment;
import javafx.util.Duration;
import java.io.IOException;
import java.io.InputStream;
import java.text.NumberFormat;
import java.time.Instant;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Queue;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import static eu.hansolo.tilesfx.tools.Helper.clamp;
import static eu.hansolo.tilesfx.tools.MovingAverage.MAX_PERIOD;
/**
* Created by hansolo on 19.12.16.
*/
public class Tile extends Control {
public enum SkinType { AREA_CHART("AreaChartTileSkin"), BAR_CHART("BarChartTileSkin"),
LINE_CHART("LineChartTileSkin"), CLOCK("ClockTileSkin"), GAUGE("GaugeTileSkin"),
HIGH_LOW("HighLowTileSkin)"), PERCENTAGE("PercentageTileSkin"),
PLUS_MINUS("PlusMinusTileSkin"), SLIDER("SliderTileSkin"),
SPARK_LINE("SparkLineTileSkin"), SWITCH("SwitchTileSkin"),
WORLDMAP("WorldMapTileSkin"), TIMER_CONTROL("TimerControlTileSkin"),
NUMBER("NumberTileSkin"), TEXT("TextTileSkin"),
WEATHER("WeatherTileSkin"), TIME("TimeTileSkin"),
CUSTOM("CustomTileSkin"), LEADER_BOARD("LeaderBoardTileSkin"),
MAP("MapTileSkin"), RADIAL_CHART("RadialChart"), DONUT_CHART("DonutChart"),
CIRCULAR_PROGRESS("CircularProgress"), STOCK("Stock"),
GAUGE_SPARK_LINE("GaugeSparkLine"), SMOOTH_AREA_CHART("SmoothAreaChartTileSkin"),
RADAR_CHART("RadarChart"), COUNTRY("Country");
public final String CLASS_NAME;
SkinType(final String CLASS_NAME) {
this.CLASS_NAME = CLASS_NAME;
}
}
public enum TextSize {
NORMAL(0.06),
BIGGER(0.08);
public final double factor;
TextSize(final double FACTOR) {
factor = FACTOR;
}
}
public enum TileColor {
GRAY(Color.rgb(139,144,146), "GRAY"),
RED(Color.rgb(229, 80, 76), "RED"),
LIGHT_RED(Color.rgb(255, 84, 56), "LIGHT_RED"),
GREEN(Color.rgb(143, 198, 94), "GREEN"),
LIGHT_GREEN(Color.rgb(132, 228, 50), "LIGHT_GREEN"),
BLUE(Color.rgb(55, 179, 252), "BLUE"),
DARK_BLUE(Color.rgb(55, 94, 252), "DARK_BLUE"),
ORANGE(Color.rgb(237, 162, 57), "ORANGE"),
YELLOW_ORANGE(Color.rgb(229, 198, 76), "YELLOW_ORANGE"),
YELLOW(Color.rgb(229, 229, 76), "YELLOW"),
MAGENTA(Color.rgb(198, 75, 232), "MAGENTA");
public final Color color;
public final String styleName;
TileColor(final Color COLOR, final String STYLE_NAME) {
color = COLOR;
styleName = STYLE_NAME;
}
}
public enum MapProvider {
BW("blackwhite"),
STREET("street"),
BRIGHT("bright"),
DARK("dark"),
SAT("sat"),
TOPO("topo");
public final String name;
MapProvider(final String NAME) {
name = NAME;
}
}
public static final Color BACKGROUND = Color.rgb(42, 42, 42);
public static final Color FOREGROUND = Color.rgb(223, 223, 223);
public static final Color GRAY = TileColor.GRAY.color;
public static final Color RED = TileColor.RED.color;
public static final Color LIGHT_RED = TileColor.LIGHT_RED.color;
public static final Color GREEN = TileColor.GREEN.color;
public static final Color LIGHT_GREEN = TileColor.LIGHT_GREEN.color;
public static final Color BLUE = TileColor.BLUE.color;
public static final Color DARK_BLUE = TileColor.DARK_BLUE.color;
public static final Color ORANGE = TileColor.ORANGE.color;
public static final Color YELLOW_ORANGE = TileColor.YELLOW_ORANGE.color;
public static final Color YELLOW = TileColor.YELLOW.color;
public static final Color MAGENTA = TileColor.MAGENTA.color;
public static final int SHORT_INTERVAL = 20;
public static final int LONG_INTERVAL = 1000;
private static final int MAX_NO_OF_DECIMALS = 3;
private static final String COUNTRY_PROPERTIES = "eu/hansolo/tilesfx/lowres.properties";
private final TileEvent EXCEEDED_EVENT = new TileEvent(EventType.THRESHOLD_EXCEEDED);
private final TileEvent UNDERRUN_EVENT = new TileEvent(EventType.THRESHOLD_UNDERRUN);
private final TileEvent MAX_VALUE_EXCEEDED = new TileEvent(EventType.MAX_VALUE_EXCEEDED);
private final TileEvent MIN_VALUE_UNDERRUN = new TileEvent(EventType.MIN_VALUE_UNDERRUN);
private final TileEvent VALUE_IN_RANGE = new TileEvent(EventType.VALUE_IN_RANGE);
private final TileEvent RECALC_EVENT = new TileEvent(EventType.RECALC);
private final TileEvent REDRAW_EVENT = new TileEvent(EventType.REDRAW);
private final TileEvent RESIZE_EVENT = new TileEvent(EventType.RESIZE);
private final TileEvent VISIBILITY_EVENT = new TileEvent(EventType.VISIBILITY);
private final TileEvent SECTION_EVENT = new TileEvent(EventType.SECTION);
private final TileEvent SERIES_EVENT = new TileEvent(EventType.SERIES);
private final TileEvent DATA_EVENT = new TileEvent(EventType.DATA);
private final TileEvent ALERT_EVENT = new TileEvent(EventType.ALERT);
private final TileEvent VALUE_EVENT = new TileEvent(EventType.VALUE);
private final TileEvent FINISHED_EVENT = new TileEvent(EventType.FINISHED);
private final TileEvent GRAPHIC_EVENT = new TileEvent(EventType.GRAPHIC);
private final TileEvent AVERAGING_EVENT = new TileEvent(EventType.AVERAGING);
private final TileEvent LOCATION_EVENT = new TileEvent(EventType.LOCATION);
private final TileEvent TRACK_EVENT = new TileEvent(EventType.TRACK);
private final TileEvent MAP_PROVIDER_EVENT = new TileEvent(EventType.MAP_PROVIDER);
// Tile events
private List<TileEventListener> listenerList = new CopyOnWriteArrayList<>();
private List<AlarmEventListener> alarmListenerList = new CopyOnWriteArrayList<>();
private List<TimeEventListener> timeEventListenerList = new CopyOnWriteArrayList<>();
// Data related
private DoubleProperty value;
private DoubleProperty oldValue; // last value
private DoubleProperty currentValue;
private DoubleProperty formerValue; // last current value
private double _minValue;
private DoubleProperty minValue;
private double _maxValue;
private DoubleProperty maxValue;
private double _range;
private DoubleProperty range;
private double _threshold;
private DoubleProperty threshold;
private double _referenceValue;
private DoubleProperty referenceValue;
private boolean _autoReferenceValue;
private BooleanProperty autoReferenceValue;
private String _title;
private StringProperty title;
private TextAlignment _titleAlignment;
private ObjectProperty<TextAlignment> titleAlignment;
private String _description;
private StringProperty description;
private Pos _descriptionAlignment;
private ObjectProperty<Pos> descriptionAlignment;
private String _unit;
private StringProperty unit;
private String _text;
private StringProperty text;
private TextAlignment _textAlignment;
private ObjectProperty<TextAlignment> textAlignment;
private boolean _selected;
private BooleanProperty selected;
private boolean _averagingEnabled;
private BooleanProperty averagingEnabled;
private int _averagingPeriod;
private IntegerProperty averagingPeriod;
private MovingAverage movingAverage;
private ObservableList<Section> sections;
private ObservableList<Series<String, Number>> series;
private Properties countryProperties;
private Map<String, List<CountryPath>> countryPaths;
private List<Stop> gradientStops;
private ObjectProperty<ZonedDateTime> time;
private LongProperty currentTime;
private ZoneId zoneId;
private int updateInterval;
private ObservableList<TimeSection> timeSections;
private LocalTime _duration;
private ObjectProperty<LocalTime> duration;
private ObservableList<BarChartItem> barChartItems;
private List<LeaderBoardItem> leaderBoardItems;
private ObjectProperty<Node> graphic;
private Location _currentLocation;
private ObjectProperty<Location> currentLocation;
private ObservableList<Location> poiList;
private ObservableList<ChartData> chartDataList;
private List<Location> track;
private TileColor _trackColor;
private ObjectProperty<TileColor> trackColor;
private MapProvider _mapProvider;
private ObjectProperty<MapProvider> mapProvider;
// UI related
private SkinType skinType;
private TextSize _textSize;
private ObjectProperty<TextSize> textSize;
private boolean _roundedCorners;
private BooleanProperty roundedCorners;
private boolean _startFromZero;
private BooleanProperty startFromZero;
private boolean _returnToZero;
private BooleanProperty returnToZero;
private double _minMeasuredValue;
private DoubleProperty minMeasuredValue;
private double _maxMeasuredValue;
private DoubleProperty maxMeasuredValue;
private boolean _minMeasuredValueVisible;
private BooleanProperty minMeasuredValueVisible;
private boolean _maxMeasuredValueVisible;
private BooleanProperty maxMeasuredValueVisible;
private boolean _oldValueVisible;
private BooleanProperty oldValueVisible;
private boolean _valueVisible;
private BooleanProperty valueVisible;
private Color _foregroundColor;
private ObjectProperty<Color> foregroundColor;
private Color _backgroundColor;
private ObjectProperty<Color> backgroundColor;
private Color _borderColor;
private ObjectProperty<Color> borderColor;
private double _borderWidth;
private DoubleProperty borderWidth;
private Color _activeColor;
private ObjectProperty<Color> activeColor;
private Color _knobColor;
private ObjectProperty<Color> knobColor;
private boolean _animated;
private BooleanProperty animated;
private long animationDuration;
private double _startAngle;
private DoubleProperty startAngle;
private double _angleRange;
private DoubleProperty angleRange;
private double _angleStep;
private DoubleProperty angleStep;
private boolean _autoScale;
private BooleanProperty autoScale;
private boolean _shadowsEnabled;
private BooleanProperty shadowsEnabled;
private Locale _locale;
private ObjectProperty<Locale> locale;
private NumberFormat _numberFormat;
private ObjectProperty<NumberFormat> numberFormat;
private int _decimals;
private IntegerProperty decimals;
private int _tickLabelDecimals;
private IntegerProperty tickLabelDecimals;
private Color _needleColor;
private ObjectProperty<Color> needleColor;
private Color _barColor;
private ObjectProperty<Color> barColor;
private Color _barBackgroundColor;
private ObjectProperty<Color> barBackgroundColor;
private Color _titleColor;
private ObjectProperty<Color> titleColor;
private Color _descriptionColor;
private ObjectProperty<Color> descriptionColor;
private Color _unitColor;
private ObjectProperty<Color> unitColor;
private Color _valueColor;
private ObjectProperty<Color> valueColor;
private Color _thresholdColor;
private ObjectProperty<Color> thresholdColor;
private boolean _checkSectionsForValue;
private BooleanProperty checkSectionsForValue;
private boolean _checkThreshold;
private BooleanProperty checkThreshold;
private boolean _innerShadowEnabled;
private BooleanProperty innerShadowEnabled;
private boolean _thresholdVisible;
private BooleanProperty thresholdVisible;
private boolean _averageVisible;
private BooleanProperty averageVisible;
private boolean _sectionsVisible;
private BooleanProperty sectionsVisible;
private boolean _sectionsAlwaysVisible;
private BooleanProperty sectionsAlwaysVisible;
private boolean _sectionTextVisible;
private BooleanProperty sectionTextVisible;
private boolean _sectionIconsVisible;
private BooleanProperty sectionIconsVisible;
private boolean _highlightSections;
private BooleanProperty highlightSections;
private Orientation _orientation;
private ObjectProperty<Orientation> orientation;
private boolean _keepAspect;
private BooleanProperty keepAspect;
private boolean _customFontEnabled;
private BooleanProperty customFontEnabled;
private Font _customFont;
private ObjectProperty<Font> customFont;
private boolean _alert;
private BooleanProperty alert;
private String _alertMessage;
private StringProperty alertMessage;
private boolean _smoothing;
private BooleanProperty smoothing;
private double increment;
private double originalMinValue;
private double originalMaxValue;
private double originalThreshold;
private Timeline timeline;
private Instant lastCall;
private boolean withinSpeedLimit;
private boolean _discreteSeconds;
private BooleanProperty discreteSeconds;
private boolean _discreteMinutes;
private BooleanProperty discreteMinutes;
private boolean _discreteHours;
private BooleanProperty discreteHours;
private boolean _secondsVisible;
private BooleanProperty secondsVisible;
private boolean _textVisible;
private BooleanProperty textVisible;
private boolean _dateVisible;
private BooleanProperty dateVisible;
private boolean _running;
private BooleanProperty running;
private Color _textColor;
private ObjectProperty<Color> textColor;
private Color _dateColor;
private ObjectProperty<Color> dateColor;
private Color _hourTickMarkColor;
private ObjectProperty<Color> hourTickMarkColor;
private Color _minuteTickMarkColor;
private ObjectProperty<Color> minuteTickMarkColor;
private Color _alarmColor;
private ObjectProperty<Color> alarmColor;
private boolean _hourTickMarksVisible;
private BooleanProperty hourTickMarksVisible;
private boolean _minuteTickMarksVisible;
private BooleanProperty minuteTickMarksVisible;
private Color _hourColor;
private ObjectProperty<Color> hourColor;
private Color _minuteColor;
private ObjectProperty<Color> minuteColor;
private Color _secondColor;
private ObjectProperty<Color> secondColor;
private boolean _alarmsEnabled;
private BooleanProperty alarmsEnabled;
private boolean _alarmsVisible;
private BooleanProperty alarmsVisible;
private ObservableList<Alarm> alarms;
private List<Alarm> alarmsToRemove;
private boolean _strokeWithGradient;
private BooleanProperty strokeWithGradient;
private DarkSky darkSky;
private String _tooltipText;
private StringProperty tooltipText;
private Tooltip tooltip;
private Axis _xAxis;
private ObjectProperty<Axis> xAxis;
private Axis _yAxis;
private ObjectProperty<Axis> yAxis;
private RadarChart.Mode _radarChartMode;
private ObjectProperty<RadarChart.Mode> radarChartMode;
private Color _chartGridColor;
private ObjectProperty<Color> chartGridColor;
private Country _country;
private ObjectProperty<Country> country;
private volatile ScheduledFuture<?> periodicTickTask;
private static ScheduledExecutorService periodicTickExecutorService;
// ******************** Constructors **************************************
public Tile() {
this(SkinType.GAUGE, ZonedDateTime.now());
}
public Tile(final SkinType SKIN) {
this(SKIN, ZonedDateTime.now());
}
public Tile(final SkinType SKIN_TYPE, final ZonedDateTime TIME) {
setNodeOrientation(NodeOrientation.LEFT_TO_RIGHT);
skinType = SKIN_TYPE;
getStyleClass().add("tile");
init(TIME);
registerListeners();
}
// ******************** Initialization ************************************
private void init(final ZonedDateTime TIME) {
_minValue = 0;
_maxValue = 100;
value = new DoublePropertyBase(_minValue) {
private void update() {
final double VALUE = get();
withinSpeedLimit = !(Instant.now().minusMillis(getAnimationDuration()).isBefore(lastCall));
lastCall = Instant.now();
if (isAnimated() && withinSpeedLimit) {
long animationDuration = isReturnToZero() ? (long) (0.2 * getAnimationDuration()) : getAnimationDuration();
timeline.stop();
final KeyValue KEY_VALUE = new KeyValue(currentValue, VALUE, Interpolator.SPLINE(0.5, 0.4, 0.4, 1.0));
final KeyFrame KEY_FRAME = new KeyFrame(Duration.millis(animationDuration), KEY_VALUE);
timeline.getKeyFrames().setAll(KEY_FRAME);
timeline.play();
} else {
currentValue.set(VALUE);
fireTileEvent(FINISHED_EVENT);
}
if (isAveragingEnabled()) { movingAverage.addData(new TimeData(VALUE)); }
}
@Override protected void invalidated() { update(); }
@Override public void set(final double VALUE) {
// ATTENTION There is an optimization in the properties so that properties
// only get invalid if the the new value is different from the old value
if (Helper.equals(VALUE, getFormerValue())) { update(); }
super.set(VALUE);
fireTileEvent(VALUE_EVENT);
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "value"; }
};
oldValue = new SimpleDoubleProperty(Tile.this, "oldValue", value.get());
currentValue = new DoublePropertyBase(value.get()) {
@Override protected void invalidated() {
final double VALUE = get();
if (isCheckThreshold()) {
double thrshld = getThreshold();
if (formerValue.get() < thrshld && VALUE > thrshld) {
fireTileEvent(EXCEEDED_EVENT);
} else if (formerValue.get() > thrshld && VALUE < thrshld) {
fireTileEvent(UNDERRUN_EVENT);
}
}
if (VALUE < getMinMeasuredValue()) {
setMinMeasuredValue(VALUE);
} else if (VALUE > getMaxMeasuredValue()) {
setMaxMeasuredValue(VALUE);
}
formerValue.set(VALUE);
}
@Override public void set(final double VALUE) { super.set(VALUE); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "currentValue";}
};
formerValue = new SimpleDoubleProperty(Tile.this, "formerValue", value.get());
_range = _maxValue - _minValue;
_threshold = _maxValue;
_referenceValue = _minValue;
_autoReferenceValue = true;
time = new ObjectPropertyBase<ZonedDateTime>(TIME) {
@Override protected void invalidated() {
if (!isRunning() && isAnimated()) {
long animationDuration = getAnimationDuration();
timeline.stop();
final KeyValue KEY_VALUE = new KeyValue(currentTime, TIME.toEpochSecond());
final KeyFrame KEY_FRAME = new KeyFrame(javafx.util.Duration.millis(animationDuration), KEY_VALUE);
timeline.getKeyFrames().setAll(KEY_FRAME);
timeline.setOnFinished(e -> fireTileEvent(FINISHED_EVENT));
timeline.play();
} else {
currentTime.set(TIME.toEpochSecond());
fireTileEvent(FINISHED_EVENT);
}
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "time"; }
};
currentTime = new LongPropertyBase(time.get().toEpochSecond()) {
@Override protected void invalidated() {}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "currentTime"; }
};
zoneId = time.get().getZone();
_title = "";
_titleAlignment = TextAlignment.LEFT;
_description = "";
_descriptionAlignment = Pos.TOP_RIGHT;
_unit = "";
_selected = false;
_text = "";
_textAlignment = TextAlignment.LEFT;
_averagingEnabled = false;
_averagingPeriod = 10;
_duration = LocalTime.of(1, 0);
_currentLocation = new Location(0, 0);
poiList = FXCollections.observableArrayList();
chartDataList = FXCollections.observableArrayList();
movingAverage = new MovingAverage(_averagingPeriod);
sections = FXCollections.observableArrayList();
series = FXCollections.observableArrayList();
timeSections = FXCollections.observableArrayList();
alarms = FXCollections.observableArrayList();
alarmsToRemove = new ArrayList<>();
barChartItems = FXCollections.observableArrayList();
track = new ArrayList<>();
_trackColor = TileColor.BLUE;
_mapProvider = MapProvider.BW;
leaderBoardItems = new ArrayList<>();
gradientStops = new ArrayList<>(4);
_textSize = TextSize.NORMAL;
_roundedCorners = true;
_startFromZero = false;
_returnToZero = false;
_minMeasuredValue = _maxValue;
_maxMeasuredValue = _minValue;
_minMeasuredValueVisible = false;
_maxMeasuredValueVisible = false;
_oldValueVisible = false;
_valueVisible = true;
_foregroundColor = FOREGROUND;
_backgroundColor = BACKGROUND;
_borderColor = Color.TRANSPARENT;
_borderWidth = 1;
_knobColor = FOREGROUND;
_activeColor = BLUE;
_animated = false;
animationDuration = 800;
_startAngle = 0;
_angleRange = 180;
_angleStep = _angleRange / _range;
_autoScale = true;
_shadowsEnabled = false;
_locale = Locale.US;
_numberFormat = NumberFormat.getInstance(_locale);
_decimals = 1;
_tickLabelDecimals = 1;
_needleColor = FOREGROUND;
_hourColor = FOREGROUND;
_minuteColor = FOREGROUND;
_secondColor = FOREGROUND;
_barColor = BLUE;
_barBackgroundColor = BACKGROUND;
_titleColor = FOREGROUND;
_descriptionColor = FOREGROUND;
_unitColor = FOREGROUND;
_valueColor = FOREGROUND;
_textColor = FOREGROUND;
_dateColor = FOREGROUND;
_hourTickMarkColor = FOREGROUND;
_minuteTickMarkColor = FOREGROUND;
_alarmColor = FOREGROUND;
_thresholdColor = RED;
_checkSectionsForValue = false;
_checkThreshold = false;
_innerShadowEnabled = false;
_thresholdVisible = false;
_averageVisible = false;
_sectionsVisible = false;
_sectionsAlwaysVisible = false;
_sectionTextVisible = false;
_sectionIconsVisible = false;
_highlightSections = false;
_orientation = Orientation.HORIZONTAL;
_keepAspect = true;
_customFontEnabled = false;
_customFont = Fonts.latoRegular(12);
_alert = false;
_alertMessage = "";
_smoothing = false;
_secondsVisible = false;
_discreteSeconds = true;
_discreteMinutes = true;
_discreteHours = false;
_textVisible = true;
_dateVisible = false;
_running = false;
_hourTickMarksVisible = true;
_minuteTickMarksVisible = true;
_alarmsEnabled = false;
_alarmsVisible = false;
_strokeWithGradient = false;
tooltip = new Tooltip(null);
_xAxis = new CategoryAxis();
_yAxis = new NumberAxis();
_radarChartMode = Mode.POLYGON;
_chartGridColor = Tile.GRAY;
_country = Country.DE;
updateInterval = LONG_INTERVAL;
increment = 1;
originalMinValue = -Double.MAX_VALUE;
originalMaxValue = Double.MAX_VALUE;
originalThreshold = Double.MAX_VALUE;
lastCall = Instant.now();
timeline = new Timeline();
timeline.setOnFinished(e -> {
if (isReturnToZero() && !Helper.equals(currentValue.get(), 0.0)) {
final KeyValue KEY_VALUE2 = new KeyValue(value, 0, Interpolator.SPLINE(0.5, 0.4, 0.4, 1.0));
final KeyFrame KEY_FRAME2 = new KeyFrame(Duration.millis((long) (0.8 * getAnimationDuration())), KEY_VALUE2);
timeline.getKeyFrames().setAll(KEY_FRAME2);
timeline.play();
}
fireTileEvent(FINISHED_EVENT);
});
}
private void registerListeners() {
disabledProperty().addListener(o -> setOpacity(isDisabled() ? 0.4 : 1));
valueProperty().addListener((o, ov, nv) -> oldValue.set(ov.doubleValue()));
currentValueProperty().addListener(o -> {
double currentValue = getCurrentValue();
if (currentValue > getMaxValue()) {
fireTileEvent(MAX_VALUE_EXCEEDED);
} else if (currentValue < getMinValue()) {
fireTileEvent(MIN_VALUE_UNDERRUN);
} else {
fireTileEvent(VALUE_IN_RANGE);
}
});
}
// ******************** Data related methods ******************************
/**
* Returns the value of the Tile. If animated == true this value represents
* the value at the end of the animation. Where currentValue represents the
* current value during the animation.
*
* @return the value of the gauge
*/
public double getValue() { return value.get(); }
/**
* Sets the value of the Tile to the given double. If animated == true this
* value will be the end value after the animation is finished.
*
* @param VALUE
*/
public void setValue(final double VALUE) { value.set(VALUE); }
public DoubleProperty valueProperty() { return value; }
/**
* Returns the current value of the Tile. If animated == true this value
* represents the current value during the animation. Otherwise it's returns
* the same value as the getValue() method.
*
* @return the current value of the gauge
*/
public double getCurrentValue() { return currentValue.get(); }
public ReadOnlyDoubleProperty currentValueProperty() { return currentValue; }
/**
* Returns the last value of the Tile. This will not be the last value during
* an animation but the final last value after the animation was finished.
* If you need to get the last value during an animation you should use
* formerValue instead.
*
* @return the last value of the gauge
*/
public double getOldValue() { return oldValue.get(); }
public ReadOnlyDoubleProperty oldValueProperty() { return oldValue; }
/**
* Returns the last value of the Tile. This will be the last value during
* an animation.
* If you need to get the last value after the animation is finished or if
* you don't use animation at all (when using real values) you should use
* oldValue instead.
*
* @return the last value of the gauge during an animation
*/
public double getFormerValue() { return formerValue.get(); }
public ReadOnlyDoubleProperty formerValueProperty() { return formerValue; }
/**
* Returns the minimum value of the scale. This value represents the lower
* limit of the visible gauge values.
*
* @return the minimum value of the gauge scale
*/
public double getMinValue() { return null == minValue ? _minValue : minValue.get(); }
/**
* Sets the minimum value of the gauge scale to the given value
*
* @param VALUE
*/
public void setMinValue(final double VALUE) {
if (Status.RUNNING == timeline.getStatus()) { timeline.jumpTo(Duration.ONE); }
if (null == minValue) {
if (VALUE > getMaxValue()) { setMaxValue(VALUE); }
_minValue = clamp(-Double.MAX_VALUE, getMaxValue(), VALUE);
setRange(getMaxValue() - _minValue);
if (Helper.equals(originalMinValue, -Double.MAX_VALUE)) originalMinValue = _minValue;
if (isStartFromZero() && _minValue < 0) setValue(0);
if (Helper.equals(originalThreshold, getThreshold())) { setThreshold(clamp(_minValue, getMaxValue(), originalThreshold)); }
fireTileEvent(RECALC_EVENT);
if (!valueProperty().isBound()) Tile.this.setValue(clamp(getMinValue(), getMaxValue(), Tile.this.getValue()));
} else {
minValue.set(VALUE);
}
}
public DoubleProperty minValueProperty() {
if (null == minValue) {
minValue = new DoublePropertyBase(_minValue) {
@Override protected void invalidated() {
final double VALUE = get();
if (VALUE > getMaxValue()) setMaxValue(VALUE);
setRange(getMaxValue() - VALUE);
if (Helper.equals(originalMinValue, -Double.MAX_VALUE)) originalMinValue = VALUE;
if (isStartFromZero() && _minValue < 0) Tile.this.setValue(0);
if (Helper.lessThan(originalThreshold, getThreshold())) { setThreshold(clamp(VALUE, getMaxValue(), originalThreshold)); }
fireTileEvent(RECALC_EVENT);
if (!valueProperty().isBound()) Tile.this.setValue(clamp(getMinValue(), getMaxValue(), Tile.this.getValue()));
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "minValue";}
};
}
return minValue;
}
/**
* Returns the maximum value of the scale. This value represents the upper limit
* of the visible gauge values.
*
* @return the maximum value of the gauge scale
*/
public double getMaxValue() { return null == maxValue ? _maxValue : maxValue.get(); }
/**
* Sets the maximum value of the gauge scale to the given value
*
* @param VALUE
*/
public void setMaxValue(final double VALUE) {
if (Status.RUNNING == timeline.getStatus()) { timeline.jumpTo(Duration.ONE); }
if (null == maxValue) {
if (VALUE < getMinValue()) { setMinValue(VALUE); }
_maxValue = clamp(getMinValue(), Double.MAX_VALUE, VALUE);
setRange(_maxValue - getMinValue());
if (Helper.equals(originalMaxValue, Double.MAX_VALUE)) originalMaxValue = _maxValue;
if (Helper.biggerThan(originalThreshold, getThreshold())) { setThreshold(clamp(getMinValue(), _maxValue, originalThreshold)); }
fireTileEvent(RECALC_EVENT);
if (!valueProperty().isBound()) Tile.this.setValue(clamp(getMinValue(), getMaxValue(), Tile.this.getValue()));
} else {
maxValue.set(VALUE);
}
}
public DoubleProperty maxValueProperty() {
if (null == maxValue) {
maxValue = new DoublePropertyBase(_maxValue) {
@Override protected void invalidated() {
final double VALUE = get();
if (VALUE < getMinValue()) setMinValue(VALUE);
setRange(VALUE - getMinValue());
if (Helper.equals(originalMaxValue, Double.MAX_VALUE)) originalMaxValue = VALUE;
if (Helper.biggerThan(originalThreshold, getThreshold())) { setThreshold(clamp(getMinValue(), VALUE, originalThreshold)); }
fireTileEvent(RECALC_EVENT);
if (!valueProperty().isBound()) Tile.this.setValue(clamp(getMinValue(), getMaxValue(), Tile.this.getValue()));
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "maxValue"; }
};
}
return maxValue;
}
/**
* Always returns the range of the gauge scale (maxValue - minValue).
* This value will be automatically calculated each time
* the min- or maxValue will change.
*
* @return the range of the gauge scale
*/
public double getRange() { return null == range ? _range : range.get(); }
/**
* This is a private method that sets the range to the given value
* which is always (maxValue - minValue).
*
* @param RANGE
*/
private void setRange(final double RANGE) {
if (null == range) {
_range = RANGE;
setAngleStep(getAngleRange() / RANGE);
} else {
range.set(RANGE);
}
}
public ReadOnlyDoubleProperty rangeProperty() {
if (null == range) {
range = new DoublePropertyBase((getMaxValue() - getMinValue())) {
@Override protected void invalidated() { setAngleStep(getAngleRange() / get()); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "range"; }
};
}
return range;
}
/**
* Returns the threshold value that can be used to visualize a
* threshold value on the scale. There are also events that will
* be fired if the threshold was exceeded or underrun.
* The value will be clamped to range of the gauge.
*
* @return the threshold value of the gauge
*/
public double getThreshold() { return null == threshold ? _threshold : threshold.get(); }
/**
* Sets the threshold of the gauge to the given value. The value
* will be clamped to the range of the gauge.
*
* @param THRESHOLD
*/
public void setThreshold(final double THRESHOLD) {
originalThreshold = THRESHOLD;
if (null == threshold) {
_threshold = clamp(getMinValue(), getMaxValue(), THRESHOLD);
fireTileEvent(RESIZE_EVENT);
} else {
threshold.set(THRESHOLD);
}
}
public DoubleProperty tresholdProperty() {
if (null == threshold) {
threshold = new DoublePropertyBase(_threshold) {
@Override protected void invalidated() {
final double THRESHOLD = get();
if (THRESHOLD < getMinValue() || THRESHOLD > getMaxValue()) set(clamp(getMinValue(), getMaxValue(), THRESHOLD));
fireTileEvent(RESIZE_EVENT);
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "threshold"; }
};
}
return threshold;
}
/**
* Returns the reference value that will be used in the HighLowTileSkin
* to visualize the increase or decrease of the current value compared
* to the reference value;
* @return the reference value that will be used in the HighLowTileSkin
*/
public double getReferenceValue() { return null == referenceValue ? _referenceValue : referenceValue.get(); }
/**
* Defines the reference value that will be used in the HighLowTileSkin
* @param VALUE
*/
public void setReferenceValue(final double VALUE) {
if (null == referenceValue) {
_referenceValue = VALUE;
fireTileEvent(REDRAW_EVENT);
} else {
referenceValue.set(VALUE);
}
}
public DoubleProperty referenceValueProperty() {
if (null == referenceValue) {
referenceValue = new DoublePropertyBase(_referenceValue) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "referenceValue"; }
};
}
return referenceValue;
}
/**
* Returns true if the reference value for the StockTileSkin will be calculated automatically
* @return true if the reference value for the StockTileSkin will be calculated automatically
*/
public boolean isAutoReferenceValue() { return null == autoReferenceValue ? _autoReferenceValue : autoReferenceValue.get(); }
/**
* Defines if the reference value for the StockTileSkin should be calculated automatically
* @param AUTO_REFERENCE_VALUE
*/
public void setAutoReferenceValue(final boolean AUTO_REFERENCE_VALUE) {
if (null == autoReferenceValue) {
_autoReferenceValue = AUTO_REFERENCE_VALUE;
} else {
autoReferenceValue.set(AUTO_REFERENCE_VALUE);
}
}
public BooleanProperty autoReferenceValueProperty() {
if (null == autoReferenceValue) {
autoReferenceValue = new BooleanPropertyBase(_autoReferenceValue) {
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "autoReferenceValue"; }
};
}
return autoReferenceValue;
}
/**
* Returns the title of the gauge. This title will usually
* only be visible if it is not empty.
*
* @return the title of the gauge
*/
public String getTitle() { return null == title ? _title : title.get(); }
/**
* Sets the title of the gauge. This title will only be visible
* if it is not empty.
*
* @param TITLE
*/
public void setTitle(final String TITLE) {
if (null == title) {
_title = TITLE;
fireTileEvent(VISIBILITY_EVENT);
} else {
title.set(TITLE);
}
}
public StringProperty titleProperty() {
if (null == title) {
title = new StringPropertyBase(_title) {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "title"; }
};
_title = null;
}
return title;
}
/**
* Returns the alignment that will be used to align the title
* in the Tile. Keep in mind that this property will not be used
* by every skin
* @return the alignment of the title
*/
public TextAlignment getTitleAlignment() { return null == titleAlignment ? _titleAlignment : titleAlignment.get(); }
/**
* Defines the alignment that will be used to align the title
* in the Tile. Keep in mind that this property will not be used
* by every skin.
* @param ALIGNMENT
*/
public void setTitleAlignment(final TextAlignment ALIGNMENT) {
if (null == titleAlignment) {
_titleAlignment = ALIGNMENT;
fireTileEvent(RESIZE_EVENT);
} else {
titleAlignment.set(ALIGNMENT);
}
}
public ObjectProperty<TextAlignment> titleAlignmentProperty() {
if (null == titleAlignment) {
titleAlignment = new ObjectPropertyBase<TextAlignment>(_titleAlignment) {
@Override protected void invalidated() { fireTileEvent(RESIZE_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "titleAlignment"; }
};
_titleAlignment = null;
}
return titleAlignment;
}
/**
* Returns the description text of the gauge. This description text will usually
* only be visible if it is not empty.
*
* @return the description text of the gauge
*/
public String getDescription() { return null == description ? _description : description.get(); }
/**
* Sets the description text of the gauge. This description text will usually
* only be visible if it is not empty.
*
* @param DESCRIPTION
*/
public void setDescription(final String DESCRIPTION) {
if (null == description) {
_description = DESCRIPTION;
fireTileEvent(VISIBILITY_EVENT);
} else {
description.set(DESCRIPTION);
}
}
public StringProperty descriptionProperty() {
if (null == description) {
description = new StringPropertyBase(_description) {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "description"; }
};
_description = null;
}
return description;
}
/**
* Returns the current alignment of the description text (esp. in TextTileSkin)
* @return the current alignment of the description text (esp. in TextTileSkin)
*/
public Pos getDescriptionAlignment() { return null == descriptionAlignment ? _descriptionAlignment : descriptionAlignment.get(); }
/**
* Defines the alignment of the description text (esp. for the TextTileSkin).
* Valid values are TOP_LEFT and TOP_RIGHT
* @param ALIGNMENT
*/
public void setDescriptionAlignment(final Pos ALIGNMENT) {
if (null == descriptionAlignment) {
_descriptionAlignment = ALIGNMENT;
fireTileEvent(REDRAW_EVENT);
} else {
descriptionAlignment.set(ALIGNMENT);
}
}
public ObjectProperty<Pos> descriptionAlignmentProperty() {
if (null == descriptionAlignment) {
descriptionAlignment = new ObjectPropertyBase<Pos>(_descriptionAlignment) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "descriptionAlignment"; }
};
_descriptionAlignment = null;
}
return descriptionAlignment;
}
/**
* Returns the unit of the gauge. This unit will usually only
* be visible if it is not empty.
*
* @return the unit of the gauge
*/
public String getUnit() { return null == unit ? _unit : unit.get(); }
/**
* Sets the unit of the gauge. This unit will usually only be
* visible if it is not empty.
*
* @param UNIT
*/
public void setUnit(final String UNIT) {
if (null == unit) {
_unit = UNIT;
fireTileEvent(VISIBILITY_EVENT);
fireTileEvent(REDRAW_EVENT);
} else {
unit.set(UNIT);
}
}
public StringProperty unitProperty() {
if (null == unit) {
unit = new StringPropertyBase(_unit) {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "unit"; }
};
_unit = null;
}
return unit;
}
/**
* Returns true if the switch in the SwitchTileSkin is selected
* @return true if the switch in the SwitchTileSkin is selected
*/
public boolean isSelected() { return null == selected ? _selected : selected.get(); }
/**
* Defines if the switch in the SwitchTileSkin is selected
* @param SELECTED
*/
public void setSelected(final boolean SELECTED) {
if (null == selected) {
_selected = SELECTED;
fireTileEvent(REDRAW_EVENT);
} else {
selected.set(SELECTED);
}
}
public BooleanProperty selectedProperty() {
if (null == selected) {
selected = new BooleanPropertyBase(_selected) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "selected"; }
};
}
return selected;
}
/**
* Returns the moving average object
* @return the moving average object
*/
public MovingAverage getMovingAverage() { return movingAverage; }
/**
* Returns true if the averaging functionality is enabled.
* @return true if the averaging functionality is enabled
*/
public boolean isAveragingEnabled() { return null == averagingEnabled ? _averagingEnabled : averagingEnabled.get(); }
/**
* Defines if the averaging functionality will be enabled.
*/
public void setAveragingEnabled(final boolean ENABLED) {
if (null == averagingEnabled) {
_averagingEnabled = ENABLED;
fireTileEvent(REDRAW_EVENT);
} else {
averagingEnabled.set(ENABLED);
}
}
public BooleanProperty averagingEnabledProperty() {
if (null == averagingEnabled) {
averagingEnabled = new BooleanPropertyBase(_averagingEnabled) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "averagingEnabled"; }
};
}
return averagingEnabled;
}
/**
* Returns the number of values that should be used for
* the averaging of values. The value must be in the
* range of 1 - 1000.
* @return the number of values used for averaging
*/
public int getAveragingPeriod() { return null == averagingPeriod ? _averagingPeriod : averagingPeriod.get(); }
/**
* Defines the number values that should be used for
* the averaging of values. The value must be in the
* range of 1 - 1000.
* @param PERIOD
*/
public void setAveragingPeriod(final int PERIOD) {
if (null == averagingPeriod) {
_averagingPeriod = Helper.clamp(0, MAX_PERIOD, PERIOD);
movingAverage.setPeriod(_averagingPeriod); // MAX 1000 values
fireTileEvent(AVERAGING_EVENT);
} else {
averagingPeriod.set(Helper.clamp(0, MAX_PERIOD, PERIOD));
}
}
public IntegerProperty averagingPeriodProperty() {
if (null == averagingPeriod) {
averagingPeriod = new IntegerPropertyBase(_averagingPeriod) {
@Override protected void invalidated() {
movingAverage.setPeriod(get());
fireTileEvent(AVERAGING_EVENT);
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "averagingPeriod"; }
};
}
return averagingPeriod;
}
/**
* Returns a deep copy of the current list of Data objects that will
* be used to calculate the moving average.
* @return the current list of Data objects used for the moving average
*/
public Queue<TimeData> getAveragingWindow() { return movingAverage.getWindow(); }
/**
* Returns the moving average over the number of values
* defined by averagingPeriod.
* @return the moving the average over the number of values defined by averagingPeriod
*/
public double getAverage() { return movingAverage.getAverage(); }
/**
* Returns the moving average over the given duration.
* @param DURATION
* @return the moving average over the given duration
*/
public double getTimeBasedAverageOf(final java.time.Duration DURATION) { return movingAverage.getTimeBasedAverageOf(DURATION); }
/**
* Returns a duration that will be used in the TimeTileSkin
* @return a duration that will be used in the TimeTileSkin
*/
public LocalTime getDuration() { return null == duration ? _duration : duration.get(); }
/**
* Defines a duration that is used in the TimeTileSkin
* @param DURATION
*/
public void setDuration(final LocalTime DURATION) {
if (null == duration) {
_duration = DURATION;
fireTileEvent(REDRAW_EVENT);
} else {
duration.set(DURATION);
}
}
public ObjectProperty<LocalTime> durationProperty() {
if (null == duration) {
duration = new ObjectPropertyBase<LocalTime>(_duration) {
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "duration"; }
};
_duration = null;
}
return duration;
}
/**
* Returns an observable list of Section objects. The sections
* will be used to colorize areas with a special meaning such
* as the red area in a rpm gauge. Sections in the Medusa library
* usually are less eye-catching than Areas.
*
* @return an observable list of Section objects
*/
public ObservableList<Section> getSections() { return sections; }
/**
* Sets the sections to the given list of Section objects. The
* sections will be used to colorize areas with a special
* meaning such as the red area in a rpm gauge.
* Sections in the Medusa library
* usually are less eye-catching than Areas.
*
* @param SECTIONS
*/
public void setSections(final List<Section> SECTIONS) {
sections.setAll(SECTIONS);
sections.sort(new SectionComparator());
fireTileEvent(SECTION_EVENT);
}
/**
* Sets the sections to the given array of Section objects. The
* sections will be used to colorize areas with a special
* meaning such as the red area in a rpm gauge.
*
* @param SECTIONS
*/
public void setSections(final Section... SECTIONS) { setSections(Arrays.asList(SECTIONS)); }
/**
* Adds the given Section to the list of sections.
* Sections in the Medusa library
* usually are less eye-catching than Areas.
*
* @param SECTION
*/
public void addSection(final Section SECTION) {
if (null == SECTION) return;
sections.add(SECTION);
sections.sort(new SectionComparator());
fireTileEvent(SECTION_EVENT);
}
/**
* Removes the given Section from the list of sections.
* Sections in the Medusa library
* usually are less eye-catching than Areas.
*
* @param SECTION
*/
public void removeSection(final Section SECTION) {
if (null == SECTION) return;
sections.remove(SECTION);
sections.sort(new SectionComparator());
fireTileEvent(SECTION_EVENT);
}
/**
* Clears the list of sections.
*/
public void clearSections() {
sections.clear();
fireTileEvent(SECTION_EVENT);
}
public Collection<Series<String, Number>> getSeries() { return series; }
public void setSeries(final List<Series<String, Number>> SERIES) {
series.setAll(SERIES);
fireTileEvent(SERIES_EVENT);
}
public void setSeries(final Series<String, Number>... SERIES) { setSeries(Arrays.asList(SERIES)); }
public void addSeries(final Series<String, Number> SERIES) {
if (null == SERIES) return;
series.add(SERIES);
fireTileEvent(SERIES_EVENT);
}
public void removeSeries(final Series<String, Number> SERIES) {
if (null == SERIES) return;
series.remove(SERIES);
fireTileEvent(SERIES_EVENT);
}
public void clearSeries() {
series.clear();
fireTileEvent(SERIES_EVENT);
}
public ObservableList<BarChartItem> getBarChartItems() { return barChartItems; }
public void setBarChartItems(final List<BarChartItem> ITEMS) {
barChartItems.setAll(ITEMS);
fireTileEvent(DATA_EVENT);
}
public void setBarChartItems(final BarChartItem... ITEMS) { setBarChartItems(Arrays.asList(ITEMS)); }
public void addBarChartItem(final BarChartItem ITEM) {
if (null == ITEM) return;
barChartItems.add(ITEM);
fireTileEvent(DATA_EVENT);
}
public void removeBarChartItem(final BarChartItem ITEM) {
if (null == ITEM) return;
barChartItems.remove(ITEM);
fireTileEvent(DATA_EVENT);
}
public void clearBarChartItems() {
barChartItems.clear();
fireTileEvent(DATA_EVENT);
}
public List<LeaderBoardItem> getLeaderBoardItems() { return leaderBoardItems; }
public void setLeaderBoardItems(final List<LeaderBoardItem> ITEMS) {
leaderBoardItems.clear();
leaderBoardItems.addAll(ITEMS);
fireTileEvent(DATA_EVENT);
}
public void setLeaderBoardItems(final LeaderBoardItem... ITEMS) { setLeaderBoardItems(Arrays.asList(ITEMS)); }
public void addLeaderBoardItem(final LeaderBoardItem ITEM) {
if (null == ITEM) return;
leaderBoardItems.add(ITEM);
fireTileEvent(DATA_EVENT);
}
public void removeLeaderBoardItem(final LeaderBoardItem ITEM) {
if (null == ITEM) return;
leaderBoardItems.remove(ITEM);
fireTileEvent(DATA_EVENT);
}
public void clearLeaderBoardItems() {
leaderBoardItems.clear();
fireTileEvent(DATA_EVENT);
}
public List<Stop> getGradientStops() { return gradientStops; }
public void setGradientStops(final Stop... STOPS) {
setGradientStops(Arrays.asList(STOPS));
}
public void setGradientStops(final List<Stop> STOPS) {
gradientStops.clear();
gradientStops.addAll(STOPS);
fireTileEvent(REDRAW_EVENT);
}
/**
* Returns an optional node that can be used in combination with the
* CustomTileSkin
* @return an optional node that can be used in combination with the CustomTileSkin
*/
public Node getGraphic() { return null == graphic ? null : graphic.get(); }
/**
* Defines an optional node that can be used in combination with the
* CustomTileSkin.
* @param GRAPHIC
*/
public void setGraphic(final Node GRAPHIC) { graphicProperty().set(GRAPHIC); }
public ObjectProperty<Node> graphicProperty() {
if (null == graphic) {
graphic = new ObjectPropertyBase<Node>() {
@Override protected void invalidated() {
fireTileEvent(GRAPHIC_EVENT);
fireTileEvent(RESIZE_EVENT);
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "graphic"; }
};
}
return graphic;
}
public Location getCurrentLocation() { return null == currentLocation ? _currentLocation : currentLocation.get(); }
public void setCurrentLocation(final Location LOCATION) {
if (null == currentLocation) {
_currentLocation = LOCATION;
fireTileEvent(LOCATION_EVENT);
} else {
currentLocation.set(LOCATION);
}
}
public ObjectProperty<Location> currentLocationProperty() {
if (null == currentLocation) {
currentLocation = new ObjectPropertyBase<Location>(_currentLocation) {
@Override protected void invalidated() { fireTileEvent(LOCATION_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "currentLocation"; }
};
_currentLocation = null;
}
return currentLocation;
}
public void updateLocation(final double LATITUDE, final double LONGITUDE) {
getCurrentLocation().set(LATITUDE, LONGITUDE);
fireTileEvent(LOCATION_EVENT);
}
public ObservableList<Location> getPoiList() { return poiList; }
public void setPoiList(final List<Location> POI_LIST) {
poiList.clear();
poiList.addAll(POI_LIST);
fireTileEvent(LOCATION_EVENT);
}
public void setPoiLocations(final Location... LOCATIONS) { setPoiList(Arrays.asList(LOCATIONS)); }
public void addPoiLocation(final Location LOCATION) {
if (null == LOCATION) return;
poiList.add(LOCATION);
fireTileEvent(LOCATION_EVENT);
}
public void removePoiLocation(final Location LOCATION) {
if (null == LOCATION) return;
poiList.remove(LOCATION);
fireTileEvent(LOCATION_EVENT);
}
public void clearPoiLocations() {
poiList.clear();
fireTileEvent(DATA_EVENT);
}
public List<Location> getTrack() { return track; }
public void setTrack(final Location... LOCATIONS) {
setTrack(Arrays.asList(LOCATIONS));
}
public void setTrack(final List<Location> LOCATIONS) {
track.clear();
track.addAll(LOCATIONS);
fireTileEvent(TRACK_EVENT);
}
public void clearTrack() {
track.clear();
fireTileEvent(TRACK_EVENT);
}
public TileColor getTrackColor() { return null == trackColor ? _trackColor : trackColor.get(); }
public void setTrackColor(final TileColor COLOR) {
if (null == trackColor) {
_trackColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
trackColor.set(COLOR);
}
}
public ObjectProperty<TileColor> trackColorProperty() {
if (null == trackColor) {
trackColor = new ObjectPropertyBase<TileColor>(_trackColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "trackColor"; }
};
_trackColor = null;
}
return trackColor;
}
public MapProvider getMapProvider() { return null == mapProvider ? _mapProvider : mapProvider.get(); }
public void setMapProvider(final MapProvider PROVIDER) {
if (null == mapProvider) {
_mapProvider = PROVIDER;
fireTileEvent(MAP_PROVIDER_EVENT);
} else {
mapProvider.set(PROVIDER);
}
}
public ObjectProperty<MapProvider> mapProviderProperty() {
if (null == mapProvider) {
mapProvider = new ObjectPropertyBase<MapProvider>(_mapProvider) {
@Override protected void invalidated() { fireTileEvent(MAP_PROVIDER_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "mapProvider"; }
};
_mapProvider = null;
}
return mapProvider;
}
public ObservableList<ChartData> getChartData() { return chartDataList; }
public void addChartData(final ChartData... DATA) { chartDataList.addAll(DATA); }
public void addChartData(final List<ChartData> DATA) { chartDataList.addAll(DATA); }
public void setChartData(final ChartData... DATA) { chartDataList.setAll(DATA); }
public void setChartData(final List<ChartData> DATA) { chartDataList.setAll(DATA); }
public void removeChartData(final ChartData DATA) { chartDataList.remove(DATA); }
public void clearChartData() { chartDataList.clear(); }
/**
* A convenient method to set the color of foreground elements like
* title, description, unit, value, tickLabel and tickMark to the given
* Color.
*
* @param COLOR
*/
public void setForegroundBaseColor(final Color COLOR) {
if (null == titleColor) { _titleColor = COLOR; } else { titleColor.set(COLOR); }
if (null == descriptionColor) { _descriptionColor = COLOR; } else { descriptionColor.set(COLOR); }
if (null == unitColor) { _unitColor = COLOR; } else { unitColor.set(COLOR); }
if (null == valueColor) { _valueColor = COLOR; } else { valueColor.set(COLOR); }
if (null == textColor) { _textColor = COLOR; } else { textColor.set(COLOR); }
if (null == foregroundColor) { _foregroundColor = COLOR; } else { foregroundColor.set(COLOR); }
fireTileEvent(REDRAW_EVENT);
}
/**
* Returns the text size that will be used for the title,
* subtitle and text in the different skins.
* The factor in the text size will be used to calculate the
* height of the font.
* @return the text size that will be used for the title, subtitle and text
*/
public TextSize getTextSize() { return null == textSize ? _textSize : textSize.get(); }
/**
* Defines the text size that will be used for the title,
* subtitle and text in the different skins.
* @param SIZE
*/
public void setTextSize(final TextSize SIZE) {
if (null == textSize) {
_textSize = SIZE;
fireTileEvent(REDRAW_EVENT);
} else {
textSize.set(SIZE);
}
}
public ObjectProperty<TextSize> textSizeProperty() {
if (null == textSize) {
textSize = new ObjectPropertyBase<TextSize>(_textSize) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT);}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "textSize"; }
};
}
return textSize;
}
/**
* Returns true if the corners of the Tiles are rounded
* @return true if the corners of the Tiles are rounded
*/
public boolean getRoundedCorners() { return null == roundedCorners ? _roundedCorners : roundedCorners.get(); }
/**
* Switches the corners of the Tiles between rounded and rectangular
* @param ROUNDED
*/
public void setRoundedCorners(final boolean ROUNDED) {
if (null == roundedCorners) {
_roundedCorners = ROUNDED;
fireTileEvent(REDRAW_EVENT);
} else {
roundedCorners.set(ROUNDED);
}
}
public BooleanProperty roundedCornersProperty() {
if (null == roundedCorners) {
roundedCorners = new BooleanPropertyBase(_roundedCorners) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "roundedCorners"; }
};
}
return roundedCorners;
}
/**
* Returns true if the visualization of the value should start from 0. This
* is especially useful when you work for example with a gauge that has a
* range with a negative minValue
*
* @return true if the visualization of the value should start from 0
*/
public boolean isStartFromZero() { return null == startFromZero ? _startFromZero : startFromZero.get(); }
/**
* Defines the behavior of the visualization where the needle/bar should
* start from 0 instead of the minValue. This is especially useful when
* working with a gauge that has a range with a negative minValue
*
* @param IS_TRUE
*/
public void setStartFromZero(final boolean IS_TRUE) {
if (null == startFromZero) {
_startFromZero = IS_TRUE;
setValue(IS_TRUE && getMinValue() < 0 ? 0 : getMinValue());
fireTileEvent(REDRAW_EVENT);
} else {
startFromZero.set(IS_TRUE);
}
}
public BooleanProperty startFromZeroProperty() {
if (null == startFromZero) {
startFromZero = new BooleanPropertyBase(_startFromZero) {
@Override protected void invalidated() {
Tile.this.setValue((get() && getMinValue() < 0) ? 0 : getMinValue());
fireTileEvent(REDRAW_EVENT);
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "startFromZero"; }
};
}
return startFromZero;
}
/**
* Returns true if the needle/bar should always return to zero. This setting
* only makes sense if animated == true and the data rate is not too high.
* Set to false when using real measured live data.
*
* @return true if the needle/bar should always return to zero.
*/
public boolean isReturnToZero() { return null == returnToZero ? _returnToZero : returnToZero.get(); }
/**
* Defines the behavior of the visualization where the needle/bar should
* always return to 0 after it reached the final value. This setting only makes
* sense if animated == true and the data rate is not too high.
* Set to false when using real measured live data.
*
* @param IS_TRUE
*/
public void setReturnToZero(final boolean IS_TRUE) {
if (null == returnToZero) {
_returnToZero = Double.compare(getMinValue(), 0.0) <= 0 && IS_TRUE;
fireTileEvent(REDRAW_EVENT);
} else {
returnToZero.set(IS_TRUE);
}
}
public BooleanProperty returnToZeroProperty() {
if (null == returnToZero) {
returnToZero = new BooleanPropertyBase(_returnToZero) {
@Override protected void invalidated() {
if (Helper.biggerThan(getMaxValue(), 0.0)) set(false);
fireTileEvent(REDRAW_EVENT);
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "returnToZero"; }
};
}
return returnToZero;
}
/**
* Returns the smallest value that was measured after the last reset.
* The default value is the maxValue of the gauge.
*
* @return the smallest value that was measured after the last reset
*/
public double getMinMeasuredValue() { return null == minMeasuredValue ? _minMeasuredValue : minMeasuredValue.get(); }
/**
* Sets the minMeasuredValue to the given value.
*
* @param MIN_MEASURED_VALUE
*/
public void setMinMeasuredValue(final double MIN_MEASURED_VALUE) {
if (null == minMeasuredValue) {
_minMeasuredValue = MIN_MEASURED_VALUE;
} else {
minMeasuredValue.set(MIN_MEASURED_VALUE);
}
}
public ReadOnlyDoubleProperty minMeasuredValueProperty() {
if (null == minMeasuredValue) { minMeasuredValue = new SimpleDoubleProperty(this, "minMeasuredValue", _minMeasuredValue); }
return minMeasuredValue;
}
/**
* Returns the biggest value that was measured after the last reset.
* The default value is the minValue of the gauge.
*
* @return the biggest value that was measured after the last reset
*/
public double getMaxMeasuredValue() {
return null == maxMeasuredValue ? _maxMeasuredValue : maxMeasuredValue.get();
}
/**
* Sets the maxMeasuredVAlue to the given value.
*
* @param MAX_MEASURED_VALUE
*/
public void setMaxMeasuredValue(final double MAX_MEASURED_VALUE) {
if (null == maxMeasuredValue) {
_maxMeasuredValue = MAX_MEASURED_VALUE;
} else {
maxMeasuredValue.set(MAX_MEASURED_VALUE);
}
}
public ReadOnlyDoubleProperty maxMeasuredValueProperty() {
if (null == maxMeasuredValue) { maxMeasuredValue = new SimpleDoubleProperty(this, "maxMeasuredValue", _maxMeasuredValue); }
return maxMeasuredValue;
}
/**
* Resets the min- and maxMeasuredValue to the value of the gauge.
*/
public void resetMeasuredValues() {
setMinMeasuredValue(getValue());
setMaxMeasuredValue(getValue());
}
/**
* Returns true if the indicator of the minMeasuredValue is visible.
*
* @return true if the indicator of the minMeasuredValue is visible
*/
public boolean isMinMeasuredValueVisible() { return null == minMeasuredValueVisible ? _minMeasuredValueVisible : minMeasuredValueVisible.get(); }
/**
* Defines if the indicator of the minMeasuredValue should be visible.
*
* @param VISIBLE
*/
public void setMinMeasuredValueVisible(final boolean VISIBLE) {
if (null == minMeasuredValueVisible) {
_minMeasuredValueVisible = VISIBLE;
fireTileEvent(VISIBILITY_EVENT);
} else {
minMeasuredValueVisible.set(VISIBLE);
}
}
public BooleanProperty minMeasuredValueVisibleProperty() {
if (null == minMeasuredValueVisible) {
minMeasuredValueVisible = new BooleanPropertyBase(_minMeasuredValueVisible) {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "minMeasuredValueVisible"; }
};
}
return minMeasuredValueVisible;
}
/**
* Returns true if the indicator of the maxMeasuredValue is visible.
*
* @return true if the indicator of the maxMeasuredValue is visible
*/
public boolean isMaxMeasuredValueVisible() { return null == maxMeasuredValueVisible ? _maxMeasuredValueVisible : maxMeasuredValueVisible.get(); }
/**
* Defines if the indicator of the maxMeasuredValue should be visible.
*
* @param VISIBLE
*/
public void setMaxMeasuredValueVisible(final boolean VISIBLE) {
if (null == maxMeasuredValueVisible) {
_maxMeasuredValueVisible = VISIBLE;
fireTileEvent(VISIBILITY_EVENT);
} else {
maxMeasuredValueVisible.set(VISIBLE);
}
}
public BooleanProperty maxMeasuredValueVisibleProperty() {
if (null == maxMeasuredValueVisible) {
maxMeasuredValueVisible = new BooleanPropertyBase(_maxMeasuredValueVisible) {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "maxMeasuredValueVisible"; }
};
}
return maxMeasuredValueVisible;
}
/**
* Returns true if the old value of the gauge is visible (not implemented)
*
* @return true if the old value of the gauge is visible (not implemented)
*/
public boolean isOldValueVisible() { return null == oldValueVisible ? _oldValueVisible : oldValueVisible.get(); }
/**
* Defines if the old value of the gauge should be visible (not implemented)
*
* @param VISIBLE
*/
public void setOldValueVisible(final boolean VISIBLE) {
if (null == oldValueVisible) {
_oldValueVisible = VISIBLE;
fireTileEvent(VISIBILITY_EVENT);
} else {
oldValueVisible.set(VISIBLE);
}
}
public BooleanProperty oldValueVisibleProperty() {
if (null == oldValueVisible) {
oldValueVisible = new BooleanPropertyBase(_oldValueVisible) {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "oldValueVisible"; }
};
}
return oldValueVisible;
}
/**
* Returns true if the visualization of the gauge value is visible.
* Usually this is a Label or Text node.
*
* @return true if the visualization of the gauge value is visible
*/
public boolean isValueVisible() { return null == valueVisible ? _valueVisible : valueVisible.get(); }
/**
* Defines if the visualization of the gauge value should be visible.
*
* @param VISIBLE
*/
public void setValueVisible(final boolean VISIBLE) {
if (null == valueVisible) {
_valueVisible = VISIBLE;
fireTileEvent(VISIBILITY_EVENT);
} else {
valueVisible.set(VISIBLE);
}
}
public BooleanProperty valueVisibleProperty() {
if (null == valueVisible) {
valueVisible = new BooleanPropertyBase(_valueVisible) {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "valueVisible"; }
};
}
return valueVisible;
}
/**
* Returns the Paint object that will be used to fill the gauge foreground.
* This is usally a Color object.
*
* @return the Paint object that will be used to fill the gauge foreground
*/
public Color getForegroundColor() { return null == foregroundColor ? _foregroundColor : foregroundColor.get(); }
/**
* Defines the Paint object that will be used to fill the gauge foreground.
*
* @param COLOR
*/
public void setForegroundColor(final Color COLOR) {
if (null == foregroundColor) {
_foregroundColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
foregroundColor.set(COLOR);
}
}
public ObjectProperty<Color> foregroundColorProperty() {
if (null == foregroundColor) {
foregroundColor = new ObjectPropertyBase<Color>(_foregroundColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "foregroundColor"; }
};
_foregroundColor = null;
}
return foregroundColor;
}
/**
* Returns the Paint object that will be used to fill the gauge background.
* This is usally a Color object.
*
* @return the Paint object that will be used to fill the gauge background
*/
public Color getBackgroundColor() { return null == backgroundColor ? _backgroundColor : backgroundColor.get(); }
/**
* Defines the Paint object that will be used to fill the gauge background.
*
* @param COLOR
*/
public void setBackgroundColor(final Color COLOR) {
if (null == backgroundColor) {
_backgroundColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
backgroundColor.set(COLOR);
}
}
public ObjectProperty<Color> backgroundColorProperty() {
if (null == backgroundColor) {
backgroundColor = new ObjectPropertyBase<Color>(_backgroundColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "backgroundColor"; }
};
_backgroundColor = null;
}
return backgroundColor;
}
/**
* Returns the Paint object that will be used to draw the border of the gauge.
* Usually this is a Color object.
*
* @return the Paint object that will be used to draw the border of the gauge
*/
public Color getBorderColor() { return null == borderColor ? _borderColor : borderColor.get(); }
/**
* Defines the Paint object that will be used to draw the border of the gauge.
*
* @param PAINT
*/
public void setBorderColor(final Color PAINT) {
if (null == borderColor) {
_borderColor = PAINT;
fireTileEvent(REDRAW_EVENT);
} else {
borderColor.set(PAINT);
}
}
public ObjectProperty<Color> borderColorProperty() {
if (null == borderColor) {
borderColor = new ObjectPropertyBase<Color>(_borderColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "borderColor"; }
};
_borderColor = null;
}
return borderColor;
}
/**
* Returns the width in pixels that will be used to draw the border of the gauge.
* The value will be clamped between 0 and 50 pixels.
*
* @return the width in pixels that will be used to draw the border of the gauge
*/
public double getBorderWidth() { return null == borderWidth ? _borderWidth : borderWidth.get(); }
/**
* Defines the width in pixels that will be used to draw the border of the gauge.
* The value will be clamped between 0 and 50 pixels.
*
* @param WIDTH
*/
public void setBorderWidth(final double WIDTH) {
if (null == borderWidth) {
_borderWidth = clamp(0.0, 50.0, WIDTH);
fireTileEvent(REDRAW_EVENT);
} else {
borderWidth.set(WIDTH);
}
}
public DoubleProperty borderWidthProperty() {
if (null == borderWidth) {
borderWidth = new DoublePropertyBase(_borderWidth) {
@Override protected void invalidated() {
final double WIDTH = get();
if (WIDTH < 0 || WIDTH > 50) set(clamp(0.0, 50.0, WIDTH));
fireTileEvent(REDRAW_EVENT);
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "borderWidth"; }
};
}
return borderWidth;
}
/**
* Returns the color that will be used to colorize the knob of
* the radial gauges.
*
* @return the color that will be used to colorize the knob of the radial gauges
*/
public Color getKnobColor() { return null == knobColor ? _knobColor : knobColor.get(); }
/**
* Defines the color that will be used to colorize the knob of
* the radial gauges.
*
* @param COLOR
*/
public void setKnobColor(final Color COLOR) {
if (null == knobColor) {
_knobColor = COLOR;
fireTileEvent(RESIZE_EVENT);
} else {
knobColor.set(COLOR);
}
}
public ObjectProperty<Color> knobColorProperty() {
if (null == knobColor) {
knobColor = new ObjectPropertyBase<Color>(_knobColor) {
@Override protected void invalidated() { fireTileEvent(RESIZE_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "knobColor"; }
};
_knobColor = null;
}
return knobColor;
}
public Color getActiveColor() { return null == activeColor ? _activeColor : activeColor.get(); }
public void setActiveColor(final Color COLOR) {
if (null == activeColor) {
_activeColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
activeColor.set(COLOR);
}
}
public ObjectProperty<Color> activeColorProperty() {
if (null == activeColor) {
activeColor = new ObjectPropertyBase<Color>(_activeColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "activeColor"; }
};
_activeColor = null;
}
return activeColor;
}
/**
* Returns true if setting the value of the gauge will be animated
* using the duration defined in animationDuration [ms].
* Keep in mind that it only makes sense to animate the setting if
* the data rate is low (more than 1 value per second). If you use real
* live measured data you should set animated to false.
*
* @return true if setting the value of the gauge will be animated
*/
public boolean isAnimated() { return null == animated ? _animated : animated.get(); }
/**
* Defines if setting the value of the gauge should be animated using
* the duration defined in animationDuration [ms].
* Keep in mind that it only makes sense to animate the setting if
* the data rate is low (more than 1 value per second). If you use real
* live measured data you should set animated to false.
*
* @param ANIMATED
*/
public void setAnimated(final boolean ANIMATED) {
if (null == animated) {
_animated = ANIMATED;
} else {
animated.set(ANIMATED);
}
}
public BooleanProperty animatedProperty() {
if (null == animated) { animated = new SimpleBooleanProperty(Tile.this, "animated", _animated); }
return animated;
}
/**
* Returns the duration in milliseconds that will be used to animate
* the needle/bar of the gauge from the last value to the next value.
* This will only be used if animated == true. This value will be
* clamped in the range of 10ms - 10s.
*
* @return the duration in ms that will be used to animate the needle/bar
*/
public long getAnimationDuration() { return animationDuration; }
/**
* Defines the duration ín milliseconds that will be used to animate
* the needle/bar of the gauge from the last value to the next value.
* This will only be used if animated == true. This value will be
* clamped in the range of 10ms - 10s.
*
* @param ANIMATION_DURATION
*/
public void setAnimationDuration(final long ANIMATION_DURATION) { animationDuration = clamp(10, 10000, ANIMATION_DURATION); }
/**
* Returns the angle in degree that defines the start of the scale with
* it's minValue in a radial gauge. If set to 0 the scale will start at
* the bottom center and the direction of counting is mathematical correct
* counter-clockwise.
* Means if you would like to start the scale on the left side in the
* middle of the gauge height the startAngle should be set to 270 degrees.
*
* @return the angle in degree that defines the start of the scale
*/
public double getStartAngle() { return null == startAngle ? _startAngle : startAngle.get(); }
/**
* Defines the angle in degree that defines the start of the scale with
* it's minValue in a radial gauge. If set to 0 the scale will start at
* the bottom center and the direction of counting is mathematical correct
* counter-clockwise.
* Means if you would like to start the scale on the left side in the
* middle of the gauge height the startAngle should be set to 270 degrees.
*
* @param ANGLE
*/
public void setStartAngle(final double ANGLE) {
if (null == startAngle) {
_startAngle = clamp(0.0, 360.0, ANGLE);
fireTileEvent(RECALC_EVENT);
} else {
startAngle.set(ANGLE);
}
}
public DoubleProperty startAngleProperty() {
if (null == startAngle) {
startAngle = new DoublePropertyBase(_startAngle) {
@Override protected void invalidated() {
final double ANGLE = get();
if (ANGLE < 0 || ANGLE > 360 ) set(clamp(0.0, 360.0, ANGLE));
fireTileEvent(RECALC_EVENT);
}
@Override public Object getBean() { return this; }
@Override public String getName() { return "startAngle"; }
};
}
return startAngle;
}
/**
* Returns the angle range in degree that will be used to draw the scale
* of the radial gauge. The given range will be clamped in the range of
* 0 - 360 degrees and will be drawn in the direction dependent on the
* scaleDirection.
*
* @return the angle range in degree that will be used to draw the scale
*/
public double getAngleRange() { return null == angleRange ? _angleRange : angleRange.get(); }
/**
* Defines the angle range in degree that will be used to draw the scale
* of the radial gauge. The given range will be clamped in the range of
* 0 - 360 degrees. The range will start at the startAngle and will be
* drawn in the direction dependent on the scaleDirection.
*
* @param RANGE
*/
public void setAngleRange(final double RANGE) {
double tmpAngleRange = clamp(0.0, 360.0, RANGE);
if (null == angleRange) {
_angleRange = tmpAngleRange;
setAngleStep(tmpAngleRange / getRange());
if (isAutoScale()) { calcAutoScale(); }
fireTileEvent(RECALC_EVENT);
} else {
angleRange.set(tmpAngleRange);
}
}
public DoubleProperty angleRangeProperty() {
if (null == angleRange) {
angleRange = new DoublePropertyBase(_angleRange) {
@Override protected void invalidated() {
final double ANGLE_RANGE = get();
if (ANGLE_RANGE < 0 || ANGLE_RANGE > 360) set(clamp(0.0, 360.0, ANGLE_RANGE));
setAngleStep(get() / getRange());
if (isAutoScale()) { calcAutoScale(); }
fireTileEvent(RECALC_EVENT);
}
@Override public Object getBean() { return this; }
@Override public String getName() { return "angleRange"; }
};
}
return angleRange;
}
/**
* Returns the value that is calculated by dividing the angleRange
* by the range. The angleStep will always be recalculated when changing
* the min-, maxValue or angleRange.
* E.g. angleRange = 180 degrees, range = 0 - 100 will lead to angleStep = 180/100 = 1.8
*
* @return the value that is calculated by dividing the angleRange by the range
*/
public double getAngleStep() { return null == angleStep ? _angleStep : angleStep.get(); }
/**
* Private method that will be used to set the angleStep
*
* @param STEP
*/
private void setAngleStep(final double STEP) {
if (null == angleStep) {
_angleStep = STEP;
} else {
angleStep.set(STEP);
}
}
public ReadOnlyDoubleProperty angleStepProperty() {
if (null == angleStep) { angleStep = new SimpleDoubleProperty(Tile.this, "angleStep", _angleStep); }
return angleStep;
}
/**
* Returns true if the scale will be calculated automatically based
* on the defined values for min- and maxValue.
* The autoscaling is on per default because otherwise you will
* run into problems when having very large or very small scales like
* 0 - 10000 or 0 - 1.
*
* @return true if the scale will be calculated automatically
*/
public boolean isAutoScale() { return null == autoScale ? _autoScale : autoScale.get(); }
/**
* Defines if the scale should be calculated automatically based on
* the defined values for min- and maxValue.
* The autoscaling is on per default because otherwise you will
* run into problems when having very large or very small scales like
* 0 - 10000 or 0 - 1.
*
* @param AUTO_SCALE
*/
public void setAutoScale(final boolean AUTO_SCALE) {
if (null == autoScale) {
_autoScale = AUTO_SCALE;
if (_autoScale) {
originalMinValue = getMinValue();
originalMaxValue = getMaxValue();
calcAutoScale();
} else {
setMinValue(Helper.equals(-Double.MAX_VALUE, originalMinValue) ? getMinValue() : originalMinValue);
setMaxValue(Helper.equals(Double.MAX_VALUE, originalMaxValue) ? getMaxValue() : originalMaxValue);
}
fireTileEvent(RECALC_EVENT);
} else {
autoScale.set(AUTO_SCALE);
}
}
public BooleanProperty autoScaleProperty() {
if (null == autoScale) {
autoScale = new BooleanPropertyBase(_autoScale) {
@Override protected void invalidated() {
if (get()) {
calcAutoScale();
} else {
setMinValue(originalMinValue);
setMaxValue(originalMaxValue);
}
fireTileEvent(RECALC_EVENT);
}
@Override public Object getBean() { return this; }
@Override public String getName() { return "autoScale"; }
};
}
return autoScale;
}
/**
* Returns true if effects like shadows will be drawn.
* In some gauges inner- and dropshadows will be used which will be
* switched on/off by setting the shadowsEnabled property.
*
* @return true if effects like shadows will be drawn
*/
public boolean isShadowsEnabled() { return null == shadowsEnabled ? _shadowsEnabled : shadowsEnabled.get(); }
/**
* Defines if effects like shadows should be drawn.
* In some gauges inner- and dropshadows will be used which will be
* switched on/off by setting the shadowsEnabled property.
*
* @param ENABLED
*/
public void setShadowsEnabled(final boolean ENABLED) {
if (null == shadowsEnabled) {
_shadowsEnabled = ENABLED;
fireTileEvent(REDRAW_EVENT);
} else {
shadowsEnabled.set(ENABLED);
}
}
public BooleanProperty shadowsEnabledProperty() {
if (null == shadowsEnabled) {
shadowsEnabled = new BooleanPropertyBase(_shadowsEnabled) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "shadowsEnabled"; }
};
}
return shadowsEnabled;
}
public Locale getLocale() { return null == locale ? _locale : locale.get(); }
public void setLocale(final Locale LOCALE) {
if (null == locale) {
_locale = null == LOCALE ? Locale.US : LOCALE;
fireTileEvent(REDRAW_EVENT);
} else {
locale.set(LOCALE);
}
}
public ObjectProperty<Locale> localeProperty() {
if (null == locale) {
locale = new ObjectPropertyBase<Locale>(_locale) {
@Override protected void invalidated() {
if (null == get()) set(Locale.US);
fireTileEvent(REDRAW_EVENT);
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "locale"; }
};
_locale = null;
}
return locale;
}
/**
* Returns the number format that will be used to format the value
* in the gauge (NOT USED AT THE MOMENT)
*
* @return the number format that will bused to format the value
*/
public NumberFormat getNumberFormat() { return null == numberFormat ? _numberFormat : numberFormat.get(); }
/**
* Defines the number format that will be used to format the value
* in the gauge (NOT USED AT THE MOMENT)
*
* @param FORMAT
*/
public void setNumberFormat(final NumberFormat FORMAT) {
if (null == numberFormat) {
_numberFormat = null == FORMAT ? NumberFormat.getInstance(getLocale()) : FORMAT;
fireTileEvent(RESIZE_EVENT);
} else {
numberFormat.set(FORMAT);
}
}
public ObjectProperty<NumberFormat> numberFormatProperty() {
if (null == numberFormat) {
numberFormat = new ObjectPropertyBase<NumberFormat>(_numberFormat) {
@Override protected void invalidated() {
if (null == get()) set(NumberFormat.getInstance(getLocale()));
fireTileEvent(RESIZE_EVENT);
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "numberFormat"; }
};
_numberFormat = null;
}
return numberFormat;
}
/**
* Returns the number of decimals that will be used to format the
* value of the gauge. The number of decimals will be clamped to
* a value between 0-3.
*
* @return the number of decimals that will be used to format the value
*/
public int getDecimals() { return null == decimals ? _decimals : decimals.get(); }
/**
* Defines the number of decimals that will be used to format the
* value of the gauge. The number of decimals will be clamped to
* a value between 0-3.
*
* @param DECIMALS
*/
public void setDecimals(final int DECIMALS) {
if (null == decimals) {
_decimals = clamp(0, MAX_NO_OF_DECIMALS, DECIMALS);
fireTileEvent(REDRAW_EVENT);
} else {
decimals.set(DECIMALS);
}
}
public IntegerProperty decimalsProperty() {
if (null == decimals) {
decimals = new IntegerPropertyBase(_decimals) {
@Override protected void invalidated() {
final int VALUE = get();
if (VALUE < 0 || VALUE > MAX_NO_OF_DECIMALS) set(clamp(0, MAX_NO_OF_DECIMALS, VALUE));
fireTileEvent(REDRAW_EVENT);
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "decimals"; }
};
}
return decimals;
}
/**
* Returns the number of tickLabelDecimals that will be used to format the
* ticklabels of the gauge. The number of tickLabelDecimals will be clamped to
* a value between 0-3.
*
* @return the number of tickLabelDecimals that will be used to format the ticklabels
*/
public int getTickLabelDecimals() { return null == tickLabelDecimals ? _tickLabelDecimals : tickLabelDecimals.get(); }
/**
* Defines the number of tickLabelDecimals that will be used to format the
* ticklabels of the gauge. The number of tickLabelDecimals will be clamped to
* a value between 0-3.
*
* @param DECIMALS
*/
public void setTickLabelDecimals(final int DECIMALS) {
if (null == tickLabelDecimals) {
_tickLabelDecimals = clamp(0, MAX_NO_OF_DECIMALS, DECIMALS);
fireTileEvent(REDRAW_EVENT);
} else {
tickLabelDecimals.set(DECIMALS);
}
}
public IntegerProperty tickLabelDecimalsProperty() {
if (null == tickLabelDecimals) {
tickLabelDecimals = new IntegerPropertyBase(_tickLabelDecimals) {
@Override protected void invalidated() {
final int VALUE = get();
if (VALUE < 0 || VALUE > MAX_NO_OF_DECIMALS) set(clamp(0, MAX_NO_OF_DECIMALS, VALUE));
fireTileEvent(REDRAW_EVENT);
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "tickLabelDecimals"; }
};
}
return tickLabelDecimals;
}
/**
* Returns the color that will be used to colorize the needle of
* the radial gauges.
*
* @return the color that wil be used to colorize the needle
*/
public Color getNeedleColor() { return null == needleColor ? _needleColor : needleColor.get(); }
/**
* Defines the color that will be used to colorize the needle of
* the radial gauges.
*
* @param COLOR
*/
public void setNeedleColor(final Color COLOR) {
if (null == needleColor) {
_needleColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
needleColor.set(COLOR);
}
}
public ObjectProperty<Color> needleColorProperty() {
if (null == needleColor) {
needleColor = new ObjectPropertyBase<Color>(_needleColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "needleColor"; }
};
_needleColor = null;
}
return needleColor;
}
/**
* Returns the color that will be used to colorize the bar of
* the gauge (if it has a bar).
*
* @return the color that will be used to colorized the bar (if available)
*/
public Color getBarColor() { return null == barColor ? _barColor : barColor.get(); }
/**
* Defines the color that will be used to colorize the bar of
* the gauge (if it has a bar).
*
* @param COLOR
*/
public void setBarColor(final Color COLOR) {
if (null == barColor) {
_barColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
barColor.set(COLOR);
}
}
public ObjectProperty<Color> barColorProperty() {
if (null == barColor) {
barColor = new ObjectPropertyBase<Color>(_barColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "barColor"; }
};
_barColor = null;
}
return barColor;
}
/**
* Returns the color that will be used to colorize the bar background of
* the gauge (if it has a bar).
*
* @return the color that will be used to colorize the bar background
*/
public Color getBarBackgroundColor() { return null == barBackgroundColor ? _barBackgroundColor : barBackgroundColor.get(); }
/**
* Returns the color that will be used to colorize the bar background of
* the gauge (if it has a bar).
*
* @param COLOR
*/
public void setBarBackgroundColor(final Color COLOR) {
if (null == barBackgroundColor) {
_barBackgroundColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
barBackgroundColor.set(COLOR);
}
}
public ObjectProperty<Color> barBackgroundColorProperty() {
if (null == barBackgroundColor) {
barBackgroundColor = new ObjectPropertyBase<Color>(_barBackgroundColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "barBackgroundColor"; }
};
_barBackgroundColor = null;
}
return barBackgroundColor;
}
/**
* Returns the color that will be used to colorize the title
* of the gauge.
*
* @return the color that will be used to colorize the title
*/
public Color getTitleColor() { return null == titleColor ? _titleColor : titleColor.get(); }
/**
* Defines the color that will be used to colorize the title
* of the gauge.
*
* @param COLOR
*/
public void setTitleColor(final Color COLOR) {
if (null == titleColor) {
_titleColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
titleColor.set(COLOR);
}
}
public ObjectProperty<Color> titleColorProperty() {
if (null == titleColor) {
titleColor = new ObjectPropertyBase<Color>(_titleColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "titleColor"; }
};
_titleColor = null;
}
return titleColor;
}
/**
* Returns the color that will be used to colorize the description text
* of the gauge.
*
* @return the color that will be used to colorize the description
*/
public Color getDescriptionColor() { return null == descriptionColor ? _descriptionColor : descriptionColor.get(); }
/**
* Defines the color that will be used to colorize the description text
* of the gauge.
*
* @param COLOR
*/
public void setDescriptionColor(final Color COLOR) {
if (null == descriptionColor) {
_descriptionColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
descriptionColor.set(COLOR);
}
}
public ObjectProperty<Color> descriptionColorProperty() {
if (null == descriptionColor) {
descriptionColor = new ObjectPropertyBase<Color>(_descriptionColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "descriptionColor"; }
};
_descriptionColor = null;
}
return descriptionColor;
}
/**
* Returns the color that will be used to colorize the unit
* of the gauge.
*
* @return the color that will be used to colorize the unit
*/
public Color getUnitColor() { return null == unitColor ? _unitColor : unitColor.get(); }
/**
* Defines the color that will be used to colorize the unit
* of the gauge.
*
* @param COLOR
*/
public void setUnitColor(final Color COLOR) {
if (null == unitColor) {
_unitColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
unitColor.set(COLOR);
}
}
public ObjectProperty<Color> unitColorProperty() {
if (null == unitColor) {
unitColor = new ObjectPropertyBase<Color>(_unitColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "unitColor"; }
};
_unitColor = null;
}
return unitColor;
}
/**
* Returns the color that will be used to colorize the value
* of the gauge.
*
* @return the color that will be used to colorize the value
*/
public Color getValueColor() { return null == valueColor ? _valueColor : valueColor.get(); }
/**
* Defines the color that will be used to colorize the value
* of the gauge.
*
* @param COLOR
*/
public void setValueColor(final Color COLOR) {
if (null == valueColor) {
_valueColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
valueColor.set(COLOR);
}
}
public ObjectProperty<Color> valueColorProperty() {
if (null == valueColor) {
valueColor = new ObjectPropertyBase<Color>(_valueColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "valueColor"; }
};
_valueColor = null;
}
return valueColor;
}
/**
* Returns the color that will be used to colorize the threshold
* indicator of the gauge.
*
* @return the color that will be used to colorize the threshold indicator
*/
public Color getThresholdColor() { return null == thresholdColor ? _thresholdColor : thresholdColor.get(); }
/**
* Defines the color that will be used to colorize the threshold
* indicator of the gauge.
*
* @param COLOR
*/
public void setThresholdColor(final Color COLOR) {
if (null == thresholdColor) {
_thresholdColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
thresholdColor.set(COLOR);
}
}
public ObjectProperty<Color> thresholdColorProperty() {
if (null == thresholdColor) {
thresholdColor = new ObjectPropertyBase<Color>(_thresholdColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "thresholdColor"; }
};
_thresholdColor = null;
}
return thresholdColor;
}
/**
* Returns true if the value of the gauge should be checked against
* all sections (if sections not empty). If a value enters a section
* or leaves a section it will fire an event. The check will be performed
* after the animation is finished (if animated == true).
*
* @return true if the value of the gauge should be checked against all sections
*/
public boolean getCheckSectionsForValue() { return null == checkSectionsForValue ? _checkSectionsForValue : checkSectionsForValue.get(); }
/**
* Defines if the value of the gauge should be checked against
* all sections (if sections not empty). If a value enters a section
* or leaves a section it will fire an event. The check will be performed
* after the animation is finished (if animated == true).
*
* @param CHECK
*/
public void setCheckSectionsForValue(final boolean CHECK) {
if (null == checkSectionsForValue) { _checkSectionsForValue = CHECK; } else { checkSectionsForValue.set(CHECK); }
}
public BooleanProperty checkSectionsForValueProperty() {
if (null == checkSectionsForValue) { checkSectionsForValue = new SimpleBooleanProperty(Tile.this, "checkSectionsForValue", _checkSectionsForValue); }
return checkSectionsForValue;
}
/**
* Returns true if the value of the gauge should be checked against
* the threshold. If a value crosses the threshold it will fire an
* event (EXCEEDED and UNDERRUN. The check will be performed
* after the animation is finished (if animated == true).
*
* @return true if the value of the gauge should be checked against the threshold
*/
public boolean isCheckThreshold() { return null == checkThreshold ? _checkThreshold : checkThreshold.get(); }
/**
* Defines if the value of the gauge should be checked against
* the threshold. If a value crosses the threshold it will fire an
* event (EXCEEDED and UNDERRUN. The check will be performed
* after the animation is finished (if animated == true).
*
* @param CHECK
*/
public void setCheckThreshold(final boolean CHECK) {
if (null == checkThreshold) {
_checkThreshold = CHECK;
} else {
checkThreshold.set(CHECK);
}
}
public BooleanProperty checkThresholdProperty() {
if (null == checkThreshold) { checkThreshold = new SimpleBooleanProperty(Tile.this, "checkThreshold", _checkThreshold); }
return checkThreshold;
}
/**
* Returns true if an inner shadow should be drawn on the gauge
* background.
*
* @return true if an inner shadow should be drawn on the gauge background
*/
public boolean isInnerShadowEnabled() { return null == innerShadowEnabled ? _innerShadowEnabled : innerShadowEnabled.get(); }
/**
* Defines if an inner shadow should be drawn on the gauge
* background.
*
* @param ENABLED
*/
public void setInnerShadowEnabled(final boolean ENABLED) {
if (null == innerShadowEnabled) {
_innerShadowEnabled = ENABLED;
fireTileEvent(REDRAW_EVENT);
} else {
innerShadowEnabled.set(ENABLED);
}
}
public BooleanProperty innerShadowEnabledProperty() {
if (null == innerShadowEnabled) {
innerShadowEnabled = new BooleanPropertyBase(_innerShadowEnabled) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "innerShadowEnabled"; }
};
}
return innerShadowEnabled;
}
/**
* Returns true if the threshold indicator should be drawn.
*
* @return true if the threshold indicator should be drawn
*/
public boolean isThresholdVisible() { return null == thresholdVisible ? _thresholdVisible : thresholdVisible.get(); }
/**
* Defines if the threshold indicator should be drawn
*
* @param VISIBLE
*/
public void setThresholdVisible(final boolean VISIBLE) {
if (null == thresholdVisible) {
_thresholdVisible = VISIBLE;
fireTileEvent(VISIBILITY_EVENT);
} else {
thresholdVisible.set(VISIBLE);
}
}
public BooleanProperty thresholdVisibleProperty() {
if (null == thresholdVisible) {
thresholdVisible = new BooleanPropertyBase(_thresholdVisible) {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "thresholdVisible"; }
};
}
return thresholdVisible;
}
/**
* Returns true if the average indicator should be drawn.
*
* @return true if the average indicator should be drawn
*/
public boolean isAverageVisible() { return null == averageVisible ? _averageVisible : averageVisible.get(); }
/**
* Defines if the average indicator should be drawn
*
* @param VISIBLE
*/
public void setAverageVisible(final boolean VISIBLE) {
if (null == averageVisible) {
_averageVisible = VISIBLE;
fireTileEvent(VISIBILITY_EVENT);
} else {
averageVisible.set(VISIBLE);
}
}
public BooleanProperty averageVisibleProperty() {
if (null == averageVisible) {
averageVisible = new BooleanPropertyBase() {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "averageVisible"; }
};
}
return averageVisible;
}
/**
* Returns true if the sections will be drawn
*
* @return true if the sections will be drawn
*/
public boolean getSectionsVisible() { return null == sectionsVisible ? _sectionsVisible : sectionsVisible.get(); }
/**
* Defines if the sections will be drawn
*
* @param VISIBLE
*/
public void setSectionsVisible(final boolean VISIBLE) {
if (null == sectionsVisible) {
_sectionsVisible = VISIBLE;
fireTileEvent(REDRAW_EVENT);
} else {
sectionsVisible.set(VISIBLE);
}
}
public BooleanProperty sectionsVisibleProperty() {
if (null == sectionsVisible) {
sectionsVisible = new BooleanPropertyBase(_sectionsVisible) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "sectionsVisible"; }
};
}
return sectionsVisible;
}
/**
* Returns true if the sections in the IndicatorSkin
* will always be visible
* @return
*/
public boolean getSectionsAlwaysVisible() { return null == sectionsAlwaysVisible ? _sectionsAlwaysVisible : sectionsAlwaysVisible.get(); }
/**
* Defines if the sections will always be visible.
* This is currently only used in the IndicatorSkin
* @param VISIBLE
*/
public void setSectionsAlwaysVisible(final boolean VISIBLE) {
if (null == sectionsAlwaysVisible) {
_sectionsAlwaysVisible = VISIBLE;
fireTileEvent(REDRAW_EVENT);
} else {
sectionsAlwaysVisible.set(VISIBLE);
}
}
public BooleanProperty sectionsAlwaysVisibleProperty() {
if (null == sectionsAlwaysVisible) {
sectionsAlwaysVisible = new BooleanPropertyBase(_sectionsAlwaysVisible) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "sectionsAlwaysVisible"; }
};
}
return sectionsAlwaysVisible;
}
/**
* Returns true if the text of the sections should be drawn inside
* the sections. This is currently only used in the SimpleSkin.
*
* @return true if the text of the sections should be drawn
*/
public boolean isSectionTextVisible() { return null == sectionTextVisible ? _sectionTextVisible : sectionTextVisible.get(); }
/**
* Defines if the text of the sections should be drawn inside
* the sections. This is currently only used in the SimpleSkin.
*
* @param VISIBLE
*/
public void setSectionTextVisible(final boolean VISIBLE) {
if (null == sectionTextVisible) {
_sectionTextVisible = VISIBLE;
fireTileEvent(REDRAW_EVENT);
} else {
sectionTextVisible.set(VISIBLE);
}
}
public BooleanProperty sectionTextVisibleProperty() {
if (null == sectionTextVisible) {
sectionTextVisible = new BooleanPropertyBase(_sectionTextVisible) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "sectionTextVisible"; }
};
}
return sectionTextVisible;
}
/**
* Returns true if the icon of the sections should be drawn inside
* the sections. This is currently only used in the SimpleSkin.
*
* @return true if the icon of the sections should be drawn
*/
public boolean getSectionIconsVisible() { return null == sectionIconsVisible ? _sectionIconsVisible : sectionIconsVisible.get(); }
/**
* Defines if the icon of the sections should be drawn inside
* the sections. This is currently only used in the SimpleSkin.
*
* @param VISIBLE
*/
public void setSectionIconsVisible(final boolean VISIBLE) {
if (null == sectionIconsVisible) {
_sectionIconsVisible = VISIBLE;
fireTileEvent(REDRAW_EVENT);
} else {
sectionIconsVisible.set(VISIBLE);
}
}
public BooleanProperty sectionIconsVisibleProperty() {
if (null == sectionIconsVisible) {
sectionIconsVisible = new BooleanPropertyBase(_sectionIconsVisible) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "sectionIconsVisible"; }
};
}
return sectionIconsVisible;
}
/**
* Returns true if sections should be highlighted in case they
* contain the current value.
*
* @return true if sections should be highlighted
*/
public boolean isHighlightSections() { return null == highlightSections ? _highlightSections : highlightSections.get(); }
/**
* Defines if sections should be highlighted in case they
* contain the current value
*
* @param HIGHLIGHT
*/
public void setHighlightSections(final boolean HIGHLIGHT) {
if (null == highlightSections) {
_highlightSections = HIGHLIGHT;
fireTileEvent(REDRAW_EVENT);
} else {
highlightSections.set(HIGHLIGHT);
}
}
public BooleanProperty highlightSectionsProperty() {
if (null == highlightSections) {
highlightSections = new BooleanPropertyBase(_highlightSections) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "highlightSections"; }
};
}
return highlightSections;
}
/**
* Returns the orientation of the control. This feature
* will only be used in the BulletChartSkin and LinearSkin.
* Values are HORIZONTAL and VERTICAL
*
* @return the orientation of the control
*/
public Orientation getOrientation() { return null == orientation ? _orientation : orientation.get(); }
/**
* Defines the orientation of the control. This feature
* will only be used in the BulletChartSkin and LinearSkin.
* Values are HORIZONTAL and VERTICAL
*
* @param ORIENTATION
*/
public void setOrientation(final Orientation ORIENTATION) {
if (null == orientation) {
_orientation = ORIENTATION;
fireTileEvent(RESIZE_EVENT);
} else {
orientation.set(ORIENTATION);
}
}
public ObjectProperty<Orientation> orientationProperty() {
if (null == orientation) {
orientation = new ObjectPropertyBase<Orientation>(_orientation) {
@Override protected void invalidated() { fireTileEvent(RESIZE_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "orientation"; }
};
_orientation = null;
}
return orientation;
}
/**
* Returns true if the control should keep it's aspect. This is
* in principle only needed if the control has different width and
* height.
*
* @return true if the control should keep it's aspect
*/
public boolean isKeepAspect() { return null == keepAspect ? _keepAspect : keepAspect.get(); }
/**
* Defines if the control should keep it's aspect. This is
* in principle only needed if the control has different width and
* height.
*
* @param KEEP
*/
public void setKeepAspect(final boolean KEEP) {
if (null == keepAspect) {
_keepAspect = KEEP;
} else {
keepAspect.set(KEEP);
}
}
public BooleanProperty keepAspectProperty() {
if (null == keepAspect) { keepAspect = new SimpleBooleanProperty(Tile.this, "keepAspect", _keepAspect); }
return keepAspect;
}
/**
* Returns true if the alert property was set.
* This property can be used to visualize an alert
* situation in a skin.
* @return true if the alert property was set
*/
public boolean isAlert() { return null == alert ? _alert : alert.get(); }
/**
* Defines if the alert property should be set. This
* property can be used to visualize an alert situation
* in the skin.
* @param ALERT
*/
public void setAlert(final boolean ALERT) {
if (null == alert) {
_alert = ALERT;
fireTileEvent(ALERT_EVENT);
} else {
alert.set(ALERT);
}
}
public BooleanProperty alertProperty() {
if (null == alert) {
alert = new BooleanPropertyBase(_alert) {
@Override protected void invalidated() { fireTileEvent(ALERT_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "alert"; }
};
}
return alert;
}
/**
* Returns the alert message text that could be used in a tooltip
* in case of an alert.
* @return the alert message text
*/
public String getAlertMessage() { return null == alertMessage ? _alertMessage : alertMessage.get(); }
/**
* Defines the text that could be used in a tooltip as an
* alert message.
* @param MESSAGE
*/
public void setAlertMessage(final String MESSAGE) {
if (null == alertMessage) {
_alertMessage = MESSAGE;
fireTileEvent(ALERT_EVENT);
} else {
alertMessage.set(MESSAGE);
}
}
public StringProperty alertMessageProperty() {
if (null == alertMessage) {
alertMessage = new StringPropertyBase(_alertMessage) {
@Override protected void invalidated() { fireTileEvent(ALERT_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "alertMessage"; }
};
_alertMessage = null;
}
return alertMessage;
}
/**
* Returns true when smoothing is enabled. This property is only used
* in the TileSparklineSkin to smooth the path. In a custom skin it
* could be also used for other things.
* @return true when smoothing is enabled
*/
public boolean isSmoothing() { return null == smoothing ? _smoothing : smoothing.get(); }
/**
* Defines if the smoothing property should be enabled/disabled.
* At the moment this is only used in the TileSparklineSkin.
* @param SMOOTHING
*/
public void setSmoothing(final boolean SMOOTHING) {
if (null == smoothing) {
_smoothing = SMOOTHING;
fireTileEvent(REDRAW_EVENT);
} else {
smoothing.set(SMOOTHING);
}
}
public BooleanProperty smoothingProperty() {
if (null == smoothing) {
smoothing = new BooleanPropertyBase(_smoothing) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "smoothing"; }
};
}
return smoothing;
}
/**
* Calling this method will lead to a recalculation of the scale
*/
public void calcAutoScale() {
double maxNoOfMajorTicks = 10;
//double maxNoOfMinorTicks = 10;
double niceRange = (Helper.calcNiceNumber(getRange(), false));
double majorTickSpace = Helper.calcNiceNumber(niceRange / (maxNoOfMajorTicks - 1), true);
double niceMinValue = (Math.floor(getMinValue() / majorTickSpace) * majorTickSpace);
double niceMaxValue = (Math.ceil(getMaxValue() / majorTickSpace) * majorTickSpace);
//double minorTickSpace = Helper.calcNiceNumber(majorTickSpace / (maxNoOfMinorTicks - 1), true);
setMinValue(niceMinValue);
setMaxValue(niceMaxValue);
}
/**
* Returns the current time of the clock.
* @return the current time of the clock
*/
public ZonedDateTime getTime() { return time.get(); }
/**
* Defines the current time of the clock.
* @param TIME
*/
public void setTime(final ZonedDateTime TIME) { time.set(TIME); }
public void setTime(final long EPOCH_SECONDS) {
time.set(ZonedDateTime.ofInstant(Instant.ofEpochSecond(EPOCH_SECONDS), getZoneId()));
}
public ObjectProperty<ZonedDateTime> timeProperty() { return time; }
/**
* Returns the current time in epoch seconds
* @return the current time in epoch seconds
*/
public long getCurrentTime() { return currentTime.get(); }
public ReadOnlyLongProperty currentTimeProperty() { return currentTime; }
public ZoneId getZoneId() { return zoneId; }
/**
* Returns the text that was defined for the clock.
* This text could be used for additional information.
* @return the text that was defined for the clock
*/
public String getText() { return null == text ? _text : text.get(); }
/**
* Define the text for the clock.
* This text could be used for additional information.
* @param TEXT
*/
public void setText(final String TEXT) {
if (null == text) {
_text = TEXT;
fireTileEvent(REDRAW_EVENT);
} else {
text.set(TEXT);
}
}
public StringProperty textProperty() {
if (null == text) {
text = new StringPropertyBase(_text) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "text"; }
};
_text = null;
}
return text;
}
/**
* Returns the alignment that will be used to align the text
* in the Tile. Keep in mind that this property will not be used
* by every skin
* @return the alignment of the text
*/
public TextAlignment getTextAlignment() { return null == textAlignment ? _textAlignment : textAlignment.get(); }
/**
* Defines the alignment that will be used to align the text
* in the Tile. Keep in mind that this property will not be used
* by every skin.
* @param ALIGNMENT
*/
public void setTextAlignment(final TextAlignment ALIGNMENT) {
if (null == textAlignment) {
_textAlignment = ALIGNMENT;
fireTileEvent(RESIZE_EVENT);
} else {
textAlignment.set(ALIGNMENT);
}
}
public ObjectProperty<TextAlignment> textAlignmentProperty() {
if (null == textAlignment) {
textAlignment = new ObjectPropertyBase<TextAlignment>(_textAlignment) {
@Override protected void invalidated() { fireTileEvent(RESIZE_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "textAlignment"; }
};
_textAlignment = null;
}
return textAlignment;
}
/**
* Returns an observable list of TimeSection objects. The sections
* will be used to colorize areas with a special meaning.
* TimeSections in the Medusa library usually are less eye-catching than
* Areas.
* @return an observable list of TimeSection objects
*/
public ObservableList<TimeSection> getTimeSections() { return timeSections; }
/**
* Sets the sections to the given list of TimeSection objects. The
* sections will be used to colorize areas with a special
* meaning. Sections in the Medusa library usually are less eye-catching
* than Areas.
* @param SECTIONS
*/
public void setTimeSections(final List<TimeSection> SECTIONS) {
timeSections.setAll(SECTIONS);
timeSections.sort(new TimeSectionComparator());
fireTileEvent(SECTION_EVENT);
}
/**
* Sets the sections to the given array of TimeSection objects. The
* sections will be used to colorize areas with a special
* meaning. Sections in the Medusa library usually are less eye-catching
* than Areas.
* @param SECTIONS
*/
public void setTimeSections(final TimeSection... SECTIONS) { setTimeSections(Arrays.asList(SECTIONS)); }
/**
* Adds the given TimeSection to the list of sections.
* Sections in the Medusa library usually are less eye-catching
* than Areas.
* @param SECTION
*/
public void addTimeSection(final TimeSection SECTION) {
if (null == SECTION) return;
timeSections.add(SECTION);
timeSections.sort(new TimeSectionComparator());
fireTileEvent(SECTION_EVENT);
}
/**
* Removes the given TimeSection from the list of sections.
* Sections in the Medusa library usually are less eye-catching
* than Areas.
* @param SECTION
*/
public void removeTimeSection(final TimeSection SECTION) {
if (null == SECTION) return;
timeSections.remove(SECTION);
timeSections.sort(new TimeSectionComparator());
fireTileEvent(SECTION_EVENT);
}
/**
* Clears the list of sections.
*/
public void clearTimeSections() {
sections.clear();
fireTileEvent(SECTION_EVENT);
}
/**
* Returns true if the second hand of the clock should move
* in discrete steps of 1 second. Otherwise it will move continuously like
* in an automatic clock.
* @return true if the second hand of the clock should move in discrete steps of 1 second
*/
public boolean isDiscreteSeconds() { return null == discreteSeconds ? _discreteSeconds : discreteSeconds.get(); }
/**
* Defines if the second hand of the clock should move in
* discrete steps of 1 second. Otherwise it will move continuously like
* in an automatic clock.
* @param DISCRETE
*/
public void setDiscreteSeconds(boolean DISCRETE) {
if (null == discreteSeconds) {
_discreteSeconds = DISCRETE;
stopTask(periodicTickTask);
if (isAnimated()) return;
scheduleTickTask();
} else {
discreteSeconds.set(DISCRETE);
}
}
public BooleanProperty discreteSecondsProperty() {
if (null == discreteSeconds) {
discreteSeconds = new BooleanPropertyBase() {
@Override protected void invalidated() {
stopTask(periodicTickTask);
if (isAnimated()) return;
scheduleTickTask();
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "discreteSeconds"; }
};
}
return discreteSeconds;
}
/**
* Returns true if the minute hand of the clock should move in
* discrete steps of 1 minute. Otherwise it will move continuously like
* in an automatic clock.
* @return true if the minute hand of the clock should move in discrete steps of 1 minute
*/
public boolean isDiscreteMinutes() { return null == discreteMinutes ? _discreteMinutes : discreteMinutes.get(); }
/**
* Defines if the minute hand of the clock should move in
* discrete steps of 1 minute. Otherwise it will move continuously like
* in an automatic clock.
* @param DISCRETE
*/
public void setDiscreteMinutes(boolean DISCRETE) {
if (null == discreteMinutes) {
_discreteMinutes = DISCRETE;
stopTask(periodicTickTask);
if (isAnimated()) return;
scheduleTickTask();
} else {
discreteMinutes.set(DISCRETE);
}
}
public BooleanProperty discreteMinutesProperty() {
if (null == discreteMinutes) {
discreteMinutes = new BooleanPropertyBase() {
@Override protected void invalidated() {
stopTask(periodicTickTask);
if (isAnimated()) return;
scheduleTickTask();
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "discreteMinutes"; }
};
}
return discreteMinutes;
}
/**
* Returns true if the hour hand of the clock should move in
* discrete steps of 1 hour. This behavior was more or less
* implemented to realize the clock of clocks and should usually
* be false.
* @return true if the hour hand of the clock should move in discrete steps of 1 hour
*/
public boolean isDiscreteHours() { return null == discreteHours ? _discreteHours : discreteHours.get(); }
/**
* Defines if the hour hand of the clock should move in
* discrete steps of 1 hour. This behavior was more or less
* implemented to realize the clock of clocks and should usually
* be false.
* @param DISCRETE
*/
public void setDiscreteHours(final boolean DISCRETE) {
if (null == discreteHours) {
_discreteHours = DISCRETE;
} else {
discreteHours.set(DISCRETE);
}
}
public BooleanProperty discreteHoursProperty() {
if (null == discreteHours) { discreteHours = new SimpleBooleanProperty(Tile.this, "discreteHours", _discreteHours); }
return discreteHours;
}
/**
* Returns true if the second hand of the clock will be drawn.
* @return true if the second hand of the clock will be drawn.
*/
public boolean isSecondsVisible() { return null == secondsVisible ? _secondsVisible : secondsVisible.get(); }
/**
* Defines if the second hand of the clock will be drawn.
* @param VISIBLE
*/
public void setSecondsVisible(boolean VISIBLE) {
if (null == secondsVisible) {
_secondsVisible = VISIBLE;
fireTileEvent(REDRAW_EVENT);
} else {
secondsVisible.set(VISIBLE);
}
}
public BooleanProperty secondsVisibleProperty() {
if (null == secondsVisible) {
secondsVisible = new BooleanPropertyBase(_secondsVisible) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "secondsVisible"; }
};
}
return secondsVisible;
}
/**
* Returns true if the text of the clock will be drawn.
* @return true if the text of the clock will be drawn
*/
public boolean isTextVisible() { return null == textVisible ? _textVisible : textVisible.get(); }
/**
* Defines if the text of the clock will be drawn.
* @param VISIBLE
*/
public void setTextVisible(final boolean VISIBLE) {
if (null == textVisible) {
_textVisible = VISIBLE;
fireTileEvent(VISIBILITY_EVENT);
} else {
textVisible.set(VISIBLE);
}
}
public BooleanProperty textVisibleProperty() {
if (null == textVisible) {
textVisible = new BooleanPropertyBase(_textVisible) {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "textVisible"; }
};
}
return textVisible;
}
/**
* Returns true if the date of the clock will be drawn.
* @return true if the date of the clock will be drawn
*/
public boolean isDateVisible() { return null == dateVisible ? _dateVisible : dateVisible.get(); }
/**
* Defines if the date of the clock will be drawn.
* @param VISIBLE
*/
public void setDateVisible(final boolean VISIBLE) {
if (null == dateVisible) {
_dateVisible = VISIBLE;
fireTileEvent(VISIBILITY_EVENT);
} else {
dateVisible.set(VISIBLE);
}
}
public BooleanProperty dateVisibleProperty() {
if (null == dateVisible) {
dateVisible = new BooleanPropertyBase(_dateVisible) {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "dateVisible"; }
};
}
return dateVisible;
}
/**
* Returns true if the clock is running and shows the current time.
* The clock will only start running if animated == false.
* @return true if the clock is running
*/
public boolean isRunning() { return null == running ? _running : running.get(); }
/**
* Defines if the clock is running.
* The clock will only start running if animated == false;
* @param RUNNING
*/
public void setRunning(boolean RUNNING) {
if (null == running) {
_running = RUNNING;
if (RUNNING && !isAnimated()) { scheduleTickTask(); } else { stopTask(periodicTickTask); }
} else {
running.set(RUNNING);
}
}
public BooleanProperty runningProperty() {
if (null == running) {
running = new BooleanPropertyBase(_running) {
@Override protected void invalidated() {
if (get() && !isAnimated()) { scheduleTickTask(); } else { stopTask(periodicTickTask); }
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "running"; }
}; }
return running;
}
/**
* Returns the color that will be used to colorize the text of the clock.
* @return the color that will be used to colorize the text of the clock
*/
public Color getTextColor() { return null == textColor ? _textColor : textColor.get(); }
/**
* Defines the color that will be used to colorize the text of the clock.
* @param COLOR
*/
public void setTextColor(final Color COLOR) {
if (null == textColor) {
_textColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
textColor.set(COLOR);
}
}
public ObjectProperty<Color> textColorProperty() {
if (null == textColor) {
textColor = new ObjectPropertyBase<Color>(_textColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "textColor"; }
};
_textColor = null;
}
return textColor;
}
/**
* Returns the color that will be used to colorize the date of the clock.
* @return the color that will be used to colorize the date of the clock
*/
public Color getDateColor() { return null == dateColor ? _dateColor : dateColor.get(); }
/**
* Defines the color that will be used to colorize the date of the clock
* @param COLOR
*/
public void setDateColor(final Color COLOR) {
if (null == dateColor) {
_dateColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
dateColor.set(COLOR);
}
}
public ObjectProperty<Color> dateColorProperty() {
if (null == dateColor) {
dateColor = new ObjectPropertyBase<Color>(_dateColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "dateColor"; }
};
_dateColor = null;
}
return dateColor;
}
/**
* Returns the color that will be used to colorize the hour tickmarks of the clock.
* @return the color that will be used to colorize the hour tickmarks of the clock
*/
public Color getHourTickMarkColor() { return null == hourTickMarkColor ? _hourTickMarkColor : hourTickMarkColor.get(); }
/**
* Defines the color that will be used to colorize the hour tickmarks of the clock.
* @param COLOR
*/
public void setHourTickMarkColor(final Color COLOR) {
if (null == hourTickMarkColor) {
_hourTickMarkColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
hourTickMarkColor.set(COLOR);
}
}
public ObjectProperty<Color> hourTickMarkColorProperty() {
if (null == hourTickMarkColor) {
hourTickMarkColor = new ObjectPropertyBase<Color>(_hourTickMarkColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "hourTickMarkColor"; }
};
_hourTickMarkColor = null;
}
return hourTickMarkColor;
}
/**
* Returns the color that will be used to colorize the minute tickmarks of the clock.
* @return the color that will be used to colorize the minute tickmarks of the clock
*/
public Color getMinuteTickMarkColor() { return null == minuteTickMarkColor ? _minuteTickMarkColor : minuteTickMarkColor.get(); }
/**
* Defines the color that will be used to colorize the minute tickmarks of the clock.
* @param COLOR
*/
public void setMinuteTickMarkColor(final Color COLOR) {
if (null == minuteTickMarkColor) {
_minuteTickMarkColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
minuteTickMarkColor.set(COLOR);
}
}
public ObjectProperty<Color> minuteTickMarkColorProperty() {
if (null == minuteTickMarkColor) {
minuteTickMarkColor = new ObjectPropertyBase<Color>(_minuteTickMarkColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "minuteTickMarkColor"; }
};
_minuteTickMarkColor = null;
}
return minuteTickMarkColor;
}
/**
* Returns the color that will be used to colorize the alarm icon.
* @return the color that will be used to colorize the alarm icon
*/
public Color getAlarmColor() { return null == alarmColor ? _alarmColor : alarmColor.get(); }
/**
* Defines the color that will be used to colorize the alarm icon
* @param COLOR
*/
public void setAlarmColor(final Color COLOR) {
if (null == alarmColor) {
_alarmColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
alarmColor.set(COLOR);
}
}
public ObjectProperty<Color> alarmColorProperty() {
if (null == alarmColor) {
alarmColor = new ObjectPropertyBase<Color>(_alarmColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "alarmColor"; }
};
_alarmColor = null;
}
return alarmColor;
}
/**
* Returns true if the hour tickmarks will be drawn.
* @return true if the hour tickmarks will be drawn
*/
public boolean isHourTickMarksVisible() { return null == hourTickMarksVisible ? _hourTickMarksVisible : hourTickMarksVisible.get(); }
/**
* Defines if the hour tickmarks will be drawn.
* @param VISIBLE
*/
public void setHourTickMarksVisible(final boolean VISIBLE) {
if (null == hourTickMarksVisible) {
_hourTickMarksVisible = VISIBLE;
fireTileEvent(REDRAW_EVENT);
} else {
hourTickMarksVisible.set(VISIBLE);
}
}
public BooleanProperty hourTickMarksVisibleProperty() {
if (null == hourTickMarksVisible) {
hourTickMarksVisible = new BooleanPropertyBase(_hourTickMarksVisible) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "hourTickMarksVisible"; }
};
}
return hourTickMarksVisible;
}
/**
* Returns true if the minute tickmarks will be drawn.
* @return true if the minute tickmarks will be drawn
*/
public boolean isMinuteTickMarksVisible() { return null == minuteTickMarksVisible ? _minuteTickMarksVisible : minuteTickMarksVisible.get(); }
/**
* Defines if the minute tickmarks will be drawn.
* @param VISIBLE
*/
public void setMinuteTickMarksVisible(final boolean VISIBLE) {
if (null == minuteTickMarksVisible) {
_minuteTickMarksVisible = VISIBLE;
fireTileEvent(REDRAW_EVENT);
} else {
minuteTickMarksVisible.set(VISIBLE);
}
}
public BooleanProperty minuteTickMarksVisibleProperty() {
if (null == minuteTickMarksVisible) {
minuteTickMarksVisible = new BooleanPropertyBase(_minuteTickMarksVisible) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "minuteTickMarksVisible"; }
};
}
return minuteTickMarksVisible;
}
/**
* Returns the color that will be used to colorize the hour hand of the clock.
* @return the color that will be used to colorize the hour hand of the clock
*/
public Color getHourColor() { return null == hourColor ? _hourColor : hourColor.get(); }
/**
* Defines the color that will be used to colorize the hour hand of the clock
* @param COLOR
*/
public void setHourColor(final Color COLOR) {
if (null == hourColor) {
_hourColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
hourColor.set(COLOR);
}
}
public ObjectProperty<Color> hourColorProperty() {
if (null == hourColor) {
hourColor = new ObjectPropertyBase<Color>(_hourColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "hourColor"; }
};
_hourColor = null;
}
return hourColor;
}
/**
* Returns the color that will be used to colorize the minute hand of the clock.
* @return the color that will be used to colorize the minute hand of the clock
*/
public Color getMinuteColor() { return null == minuteColor ? _minuteColor : minuteColor.get(); }
/**
* Defines the color that will be used to colorize the minute hand of the clock.
* @param COLOR
*/
public void setMinuteColor(final Color COLOR) {
if (null == minuteColor) {
_minuteColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
minuteColor.set(COLOR);
}
}
public ObjectProperty<Color> minuteColorProperty() {
if (null == minuteColor) {
minuteColor = new ObjectPropertyBase<Color>(_minuteColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "minuteColor"; }
};
_minuteColor = null;
}
return minuteColor;
}
/**
* Returns the color that will be used to colorize the second hand of the clock.
* @return the color that will be used to colorize the second hand of the clock
*/
public Color getSecondColor() { return null == secondColor ? _secondColor : secondColor.get(); }
/**
* Defines the color that will be used to colorize the second hand of the clock
* @param COLOR
*/
public void setSecondColor(final Color COLOR) {
if (null == secondColor) {
_secondColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
secondColor.set(COLOR);
}
}
public ObjectProperty<Color> secondColorProperty() {
if (null == secondColor) {
secondColor = new ObjectPropertyBase<Color>(_secondColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "secondColor"; }
};
_secondColor = null;
}
return secondColor;
}
/**
* Returns true if alarms are enabled.
* If false then no alarms will be triggered
* @return true if alarms are enabled
*/
public boolean isAlarmsEnabled() { return null == alarmsEnabled ? _alarmsEnabled : alarmsEnabled.get(); }
/**
* Defines if alarms are enabled.
* If false then no alarms will be triggered.
* @param CHECK
*/
public void setAlarmsEnabled(final boolean CHECK) {
if (null == alarmsEnabled) {
_alarmsEnabled = CHECK;
fireTileEvent(VISIBILITY_EVENT);
} else {
alarmsEnabled.set(CHECK);
}
}
public BooleanProperty alarmsEnabledProperty() {
if (null == alarmsEnabled) {
alarmsEnabled = new BooleanPropertyBase(_alarmsEnabled) {
@Override protected void invalidated() { fireTileEvent(VISIBILITY_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "alarmsEnabled"; }
};
}
return alarmsEnabled;
}
/**
* Returns true if alarm markers should be drawn.
* @return true if alarm markers should be drawn
*/
public boolean isAlarmsVisible() { return null == alarmsVisible ? _alarmsVisible : alarmsVisible.get(); }
/**
* Defines if alarm markers should be drawn.
* @param VISIBLE
*/
public void setAlarmsVisible(final boolean VISIBLE) {
if (null == alarmsVisible) {
_alarmsVisible = VISIBLE;
fireTileEvent(REDRAW_EVENT);
} else {
alarmsVisible.set(VISIBLE);
}
}
public BooleanProperty alarmsVisibleProperty() {
if (null == alarmsVisible) {
alarmsVisible = new BooleanPropertyBase(_alarmsVisible) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "alarmsVisible"; }
};
}
return alarmsVisible;
}
/**
* Returns an observable list of Alarm objects.
* @return an observable list of Alarm objects
*/
public ObservableList<Alarm> getAlarms() { return alarms; }
/**
* Sets the alarms to the given list of Alarm objects.
* @param ALARMS
*/
public void setAlarms(final List<Alarm> ALARMS) { alarms.setAll(ALARMS); }
/**
* Sets the alarms to the given array of Alarm objects.
* @param ALARMS
*/
public void setAlarms(final Alarm... ALARMS) { setAlarms(Arrays.asList(ALARMS)); }
/**
* Adds the given Alarm object from the list of alarms.
* @param ALARM
*/
public void addAlarm(final Alarm ALARM) { if (!alarms.contains(ALARM)) alarms.add(ALARM); }
/**
* Removes the given Alarm object from the list of alarms.
* @param ALARM
*/
public void removeAlarm(final Alarm ALARM) { if (alarms.contains(ALARM)) alarms.remove(ALARM); }
/**
* Clears the list of alarms.
*/
public void clearAlarms() { alarms.clear(); }
/**
* Returns the text that will be shown in the Tile tooltip
* @return the text that will be shown in the Tile tooltip
*/
public String getTooltipText() { return null == tooltipText ? _tooltipText : tooltipText.get(); }
/**
* Defines the text that will be shown in the Tile tooltip
* @param TEXT
*/
public void setTooltipText(final String TEXT) {
if (null == tooltipText) {
tooltip.setText(TEXT);
if (null == TEXT || TEXT.isEmpty()) {
setTooltip(null);
} else {
setTooltip(tooltip);
}
} else {
tooltipText.set(TEXT);
}
}
public StringProperty tooltipTextProperty() {
if (null == tooltipText) {
tooltipText = new StringPropertyBase() {
@Override protected void invalidated() {
tooltip.setText(get());
if (null == get() || get().isEmpty()) {
setTooltip(null);
} else {
setTooltip(tooltip);
}
}
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "tooltipText"; }
};
_tooltipText = null;
}
return tooltipText;
}
public Axis getXAxis() { return null == xAxis ? _xAxis : xAxis.get(); }
public void setXAxis(final Axis AXIS) {
if (null == xAxis) {
_xAxis = AXIS;
fireTileEvent(RESIZE_EVENT);
} else {
xAxis.set(AXIS);
}
}
public ObjectProperty<Axis> xAxisProperty() {
if (null == xAxis) {
xAxis = new ObjectPropertyBase<Axis>(_xAxis) {
@Override protected void invalidated() { fireTileEvent(RESIZE_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() {
return "xAxis";
}
};
_xAxis = null;
}
return xAxis;
}
public Axis getYAxis() { return null == yAxis ? _yAxis : yAxis.get(); }
public void setYAxis(final Axis AXIS) {
if (null == yAxis) {
_yAxis = AXIS;
fireTileEvent(RESIZE_EVENT);
} else {
yAxis.set(AXIS);
}
}
public ObjectProperty<Axis> yAxisProperty() {
if (null == yAxis) {
yAxis = new ObjectPropertyBase<Axis>(_yAxis) {
@Override protected void invalidated() { fireTileEvent(RESIZE_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "yAxis"; }
};
_yAxis = null;
}
return yAxis;
}
/**
* Returns the mode of the RadarChartTileSkin.
* There are Mode.POLYGON and Mode.SECTOR.
* @return the mode of the RadarChartTileSkin
*/
public RadarChart.Mode getRadarChartMode() { return null == radarChartMode ? _radarChartMode : radarChartMode.get(); }
/**
* Defines the mode that is used in the RadarChartTileSkin
* to visualize the data in the RadarChart.
* There are Mode.POLYGON and Mode.SECTOR.
* @param MODE
*/
public void setRadarChartMode(final RadarChart.Mode MODE) {
if (null == radarChartMode) {
_radarChartMode = MODE;
fireTileEvent(RECALC_EVENT);
} else {
radarChartMode.set(MODE);
}
}
public ObjectProperty<RadarChart.Mode> radarChartModeProperty() {
if (null == radarChartMode) {
radarChartMode = new ObjectPropertyBase<Mode>(_radarChartMode) {
@Override protected void invalidated() { fireTileEvent(RECALC_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "radarChartMode"; }
};
_radarChartMode = null;
}
return radarChartMode;
}
/**
* Returns the color that will be used to colorize lines in
* charts e.g. the grid in the RadarChartTileSkin
* @return the color that will be used to colorize lines in charts
*/
public Color getChartGridColor() { return null == chartGridColor ? _chartGridColor : chartGridColor.get(); }
/**
* Defines the color that will be used to colorize lines in
* charts e.g. the grid in the RadarChartTileSkin
* @param COLOR
*/
public void setChartGridColor(final Color COLOR) {
if (null == chartGridColor) {
_chartGridColor = COLOR;
fireTileEvent(REDRAW_EVENT);
} else {
chartGridColor.set(COLOR);
}
}
public ObjectProperty<Color> chartGridColorProperty() {
if (null == chartGridColor) {
chartGridColor = new ObjectPropertyBase<Color>(_chartGridColor) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "chartGridColor"; }
};
_chartGridColor = null;
}
return chartGridColor;
}
/**
* Returns the Locale that will be used to visualize the country
* in the CountryTileSkin
* @return the Locale that will be used to visualize the country in the CountryTileSkin
*/
public Country getCountry() { return null == country ? _country : country.get(); }
/**
* Defines the Locale that will be used to visualize the country
* in the CountryTileSkin
* @param COUNTRY
*/
public void setCountry(final Country COUNTRY) {
if (null == country) {
_country = COUNTRY;
fireTileEvent(RECALC_EVENT);
} else {
country.set(COUNTRY);
}
}
public ObjectProperty<Country> countryProperty() {
if (null == country) {
country = new ObjectPropertyBase<Country>(_country) {
@Override protected void invalidated() { fireTileEvent(RECALC_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "country"; }
};
_country = null;
}
return country;
}
public double getIncrement() { return increment; }
public void setIncrement(final double INCREMENT) { increment = clamp(0, 10, INCREMENT); }
/**
* Returns true if the control uses the given customFont to
* render all text elements.
* @return true if the control uses the given customFont
*/
public boolean isCustomFontEnabled() { return null == customFontEnabled ? _customFontEnabled : customFontEnabled.get(); }
/**
* Defines if the control should use the given customFont
* to render all text elements
* @param ENABLED
*/
public void setCustomFontEnabled(final boolean ENABLED) {
if (null == customFontEnabled) {
_customFontEnabled = ENABLED;
fireTileEvent(RESIZE_EVENT);
} else {
customFontEnabled.set(ENABLED);
}
}
public BooleanProperty customFontEnabledProperty() {
if (null == customFontEnabled) {
customFontEnabled = new BooleanPropertyBase(_customFontEnabled) {
@Override protected void invalidated() { fireTileEvent(RESIZE_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "customFontEnabled"; }
};
}
return customFontEnabled;
}
/**
* Returns the given custom Font that can be used to render
* all text elements. To enable the custom font one has to set
* customFontEnabled = true
* @return the given custom Font
*/
public Font getCustomFont() { return null == customFont ? _customFont : customFont.get(); }
/**
* Defines the custom font that can be used to render all
* text elements. To enable the custom font one has to set
* customFontEnabled = true
* @param FONT
*/
public void setCustomFont(final Font FONT) {
if (null == customFont) {
_customFont = FONT;
fireTileEvent(RESIZE_EVENT);
} else {
customFont.set(FONT);
}
}
public ObjectProperty<Font> customFontProperty() {
if (null == customFont) {
customFont = new ObjectPropertyBase<Font>() {
@Override protected void invalidated() { fireTileEvent(RESIZE_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "customFont"; }
};
_customFont = null;
}
return customFont;
}
/**
* Returns a list of path elements that define the countries
* @return a list of path elements that define the countries
*/
public Map<String, List<CountryPath>> getCountryPaths() {
if (null == countryProperties) { countryProperties = readProperties(COUNTRY_PROPERTIES); }
if (null == countryPaths) {
countryPaths = new HashMap<>();
countryProperties.forEach((key, value) -> {
String name = key.toString();
List<CountryPath> pathList = new ArrayList<>();
for (String path : value.toString().split(";")) { pathList.add(new CountryPath(name, path)); }
countryPaths.put(name, pathList);
});
}
return countryPaths;
}
/**
* Returns true if a gradient defined by gradientStops will be
* used to stroke the line in the SparklineTileSkin.
* @return true if a gradient defined by gradientStops will be used to stroke the line in the SparklineTileSkin
*/
public boolean isStrokeWithGradient() { return null == strokeWithGradient ? _strokeWithGradient : strokeWithGradient.get(); }
/**
* Defines the usage of a gradient defined by gradientStops to stroke the line
* in the SparklineTileSkin
* @param STROKE_WITH_GRADIENT
*/
public void setStrokeWithGradient(final boolean STROKE_WITH_GRADIENT) {
if (null == strokeWithGradient) {
_strokeWithGradient = STROKE_WITH_GRADIENT;
fireTileEvent(REDRAW_EVENT);
} else {
strokeWithGradient.set(STROKE_WITH_GRADIENT);
}
}
public BooleanProperty strokeWithGradientProperty() {
if (null == strokeWithGradient) {
strokeWithGradient = new BooleanPropertyBase(_strokeWithGradient) {
@Override protected void invalidated() { fireTileEvent(REDRAW_EVENT); }
@Override public Object getBean() { return Tile.this; }
@Override public String getName() { return "strokeWithGradient"; }
};
}
return strokeWithGradient;
}
public DarkSky getDarkSky() { return darkSky; }
public void setDarkSky(final DarkSky DARK_SKY) {
darkSky = DARK_SKY;
fireTileEvent(REDRAW_EVENT);
}
public void updateWeather() {
if (null == darkSky) return;
if (darkSky.update()) {
fireTileEvent(REDRAW_EVENT);
} else {
//System.out.println("Wrong or missing DarkSky API key");
throw new IllegalArgumentException("Do you use a valid DarkSKY API key?");
}
}
private Properties readProperties(final String FILE_NAME) {
final ClassLoader LOADER = Thread.currentThread().getContextClassLoader();
final Properties PROPERTIES = new Properties();
try(InputStream resourceStream = LOADER.getResourceAsStream(FILE_NAME)) {
PROPERTIES.load(resourceStream);
} catch (IOException exception) {
exception.printStackTrace();
}
return PROPERTIES;
}
/**
* Calling this method will check the current time against all Alarm
* objects in alarms. The Alarm object will fire events in case the
* time is after the alarm time.
* @param TIME
*/
private void checkAlarms(final ZonedDateTime TIME) {
alarmsToRemove.clear();
for (Alarm alarm : alarms) {
final ZonedDateTime ALARM_TIME = alarm.getTime();
switch (alarm.getRepetition()) {
case ONCE:
if (TIME.isAfter(ALARM_TIME)) {
if (alarm.isArmed()) {
fireAlarmEvent(new AlarmEvent(alarm));
alarm.executeCommand();
}
alarmsToRemove.add(alarm);
}
break;
case HALF_HOURLY:
if ((ALARM_TIME.getMinute() == TIME.getMinute() ||
ALARM_TIME.plusMinutes(30).getMinute() == TIME.getMinute()) &&
ALARM_TIME.getSecond() == TIME.getSecond()) {
if (alarm.isArmed()) {
fireAlarmEvent(new AlarmEvent(alarm));
alarm.executeCommand();
}
}
break;
case HOURLY:
if (ALARM_TIME.getMinute() == TIME.getMinute() &&
ALARM_TIME.getSecond() == TIME.getSecond()) {
if (alarm.isArmed()) {
fireAlarmEvent(new AlarmEvent(alarm));
alarm.executeCommand();
}
}
break;
case DAILY:
if (ALARM_TIME.getHour() == TIME.getHour() &&
ALARM_TIME.getMinute() == TIME.getMinute() &&
ALARM_TIME.getSecond() == TIME.getSecond()) {
if (alarm.isArmed()) {
fireAlarmEvent(new AlarmEvent(alarm));
alarm.executeCommand();
}
}
break;
case WEEKLY:
if (ALARM_TIME.getDayOfWeek() == TIME.getDayOfWeek() &&
ALARM_TIME.getHour() == TIME.getHour() &&
ALARM_TIME.getMinute() == TIME.getMinute() &&
ALARM_TIME.getSecond() == TIME.getSecond()) {
if (alarm.isArmed()) {
fireAlarmEvent(new AlarmEvent(alarm));
alarm.executeCommand();
}
}
break;
}
}
for (Alarm alarm : alarmsToRemove) {
removeAlarm(alarm);
}
}
private void tick() { Platform.runLater(() -> {
if (isAnimated()) return;
ZonedDateTime oldTime = getTime();
setTime(getTime().plus(java.time.Duration.ofMillis(updateInterval)));
ZonedDateTime now = time.get();
if (isAlarmsEnabled()) checkAlarms(now);
if (getCheckSectionsForValue()) {
int listSize = timeSections.size();
for (TimeSection timeSection : timeSections) { timeSection.checkForTimeAndDate(now); }
}
if (timeEventListenerList.isEmpty()) return;
// Fire TimeEvents
if (oldTime.getSecond() != now.getSecond()) fireTimeEvent(new TimeEvent(Tile.this, now, TimeEventType.SECOND));
if (oldTime.getMinute() != now.getMinute()) fireTimeEvent(new TimeEvent(Tile.this, now, TimeEventType.MINUTE));
if (oldTime.getHour() != now.getHour()) fireTimeEvent(new TimeEvent(Tile.this, now, TimeEventType.HOUR));
}); }
// ******************** Scheduled tasks ***********************************
private synchronized void enableTickExecutorService() {
if (null == periodicTickExecutorService) {
periodicTickExecutorService = new ScheduledThreadPoolExecutor(1, getThreadFactory("TileTick", true));
}
}
private synchronized void scheduleTickTask() {
enableTickExecutorService();
stopTask(periodicTickTask);
updateInterval = (isDiscreteMinutes() && isDiscreteSeconds()) ? LONG_INTERVAL : SHORT_INTERVAL;
periodicTickTask = periodicTickExecutorService.scheduleAtFixedRate(() -> tick(), 0, updateInterval, TimeUnit.MILLISECONDS);
}
private static ThreadFactory getThreadFactory(final String THREAD_NAME, final boolean IS_DAEMON) {
return runnable -> {
Thread thread = new Thread(runnable, THREAD_NAME);
thread.setDaemon(IS_DAEMON);
return thread;
};
}
private void stopTask(ScheduledFuture<?> task) {
if (null == task) return;
task.cancel(true);
task = null;
}
/**
* Calling this method will stop all threads. This is needed when using
* JavaFX on mobile devices when the device goes to sleep mode.
*/
public void stop() {
if (null != periodicTickTask) { stopTask(periodicTickTask); }
if (null != periodicTickExecutorService) { periodicTickExecutorService.shutdownNow(); }
}
private void createShutdownHook() { Runtime.getRuntime().addShutdownHook(new Thread(() -> stop())); }
// ******************** Event handling ************************************
public void setOnTileEvent(final TileEventListener LISTENER) { addTileEventListener(LISTENER); }
public void addTileEventListener(final TileEventListener LISTENER) { if (!listenerList.contains(LISTENER)) listenerList.add(LISTENER); }
public void removeTileEventListener(final TileEventListener LISTENER) { if (listenerList.contains(LISTENER)) listenerList.remove(LISTENER); }
public void fireTileEvent(final TileEvent EVENT) {
for (TileEventListener listener : listenerList) { listener.onTileEvent(EVENT); }
}
public void setOnAlarm(final AlarmEventListener LISTENER) { addAlarmEventListener(LISTENER); }
public void addAlarmEventListener(final AlarmEventListener LISTENER) { if (!alarmListenerList.contains(LISTENER)) alarmListenerList.add(LISTENER); }
public void removeAlarmEventListener(final AlarmEventListener LISTENER) { if (alarmListenerList.contains(LISTENER)) alarmListenerList.remove(LISTENER); }
public void fireAlarmEvent(final AlarmEvent EVENT) {
for (AlarmEventListener listener : alarmListenerList) { listener.onAlarmEvent(EVENT); }
}
public void setOnTimeEvent(final TimeEventListener LISTENER) { addTimeEventListener(LISTENER); }
public void addTimeEventListener(final TimeEventListener LISTENER) { if (!timeEventListenerList.contains(LISTENER)) timeEventListenerList.add(LISTENER); }
public void removeTimeEventListener(final TimeEventListener LISTENER) { if (timeEventListenerList.contains(LISTENER)) timeEventListenerList.remove(LISTENER); }
public void fireTimeEvent(final TimeEvent EVENT) {
for (TimeEventListener listener : timeEventListenerList) { listener.onTimeEvent(EVENT); }
}
public void setOnSwitchPressed(final EventHandler<SwitchEvent> HANDLER) { addEventHandler(SwitchEvent.SWITCH_PRESSED, HANDLER); }
public void removeOnSwitchPressed(final EventHandler<SwitchEvent> HANDLER) { removeEventHandler(SwitchEvent.SWITCH_PRESSED, HANDLER); }
public void setOnSwitchReleased(final EventHandler<SwitchEvent> HANDLER) { addEventHandler(SwitchEvent.SWITCH_RELEASED, HANDLER); }
public void removeOnSwitchReleased(final EventHandler<SwitchEvent> HANDLER) { removeEventHandler(SwitchEvent.SWITCH_RELEASED, HANDLER); }
// ******************** Style related *************************************
@Override protected Skin createDefaultSkin() {
switch (skinType) {
case AREA_CHART : return new AreaChartTileSkin(Tile.this);
case BAR_CHART : return new BarChartTileSkin(Tile.this);
case LINE_CHART : return new LineChartTileSkin(Tile.this);
case CLOCK : return new ClockTileSkin(Tile.this);
case GAUGE : return new GaugeTileSkin(Tile.this);
case HIGH_LOW : return new HighLowTileSkin(Tile.this);
case PERCENTAGE : return new PercentageTileSkin(Tile.this);
case PLUS_MINUS : return new PlusMinusTileSkin(Tile.this);
case SLIDER : return new SliderTileSkin(Tile.this);
case SPARK_LINE : return new SparkLineTileSkin(Tile.this);
case SWITCH : return new SwitchTileSkin(Tile.this);
case WORLDMAP : return new WorldMapTileSkin(Tile.this);
case TIMER_CONTROL : return new TimerControlTileSkin(Tile.this);
case NUMBER : return new NumberTileSkin(Tile.this);
case TEXT : return new TextTileSkin(Tile.this);
case WEATHER : return new WeatherTileSkin(Tile.this);
case TIME : return new TimeTileSkin(Tile.this);
case CUSTOM : return new CustomTileSkin(Tile.this);
case LEADER_BOARD : return new LeaderBoardTileSkin(Tile.this);
case MAP : return new MapTileSkin(Tile.this);
case RADIAL_CHART : return new RadialChartTileSkin(Tile.this);
case DONUT_CHART : return new DonutChartTileSkin(Tile.this);
case CIRCULAR_PROGRESS: return new CircularProgressTileSkin(Tile.this);
case STOCK : return new StockTileSkin(Tile.this);
case GAUGE_SPARK_LINE : return new GaugeSparkLineTileSkin(Tile.this);
case SMOOTH_AREA_CHART: return new SmoothAreaChartTileSkin(Tile.this);
case RADAR_CHART : return new RadarChartTileSkin(Tile.this);
case COUNTRY : return new CountryTileSkin(Tile.this);
default : return new TileSkin(Tile.this);
}
}
@Override public String getUserAgentStylesheet() { return getClass().getResource("tilesfx.css").toExternalForm(); }
private void presetTileParameters(final SkinType SKIN_TYPE) {
switch (SKIN_TYPE) {
case AREA_CHART:
break;
case BAR_CHART:
break;
case LINE_CHART:
break;
case CLOCK:
break;
case GAUGE:
setAnimated(true);
setTickLabelDecimals(0);
setBarColor(FOREGROUND);
setThresholdColor(Tile.BLUE);
setThresholdVisible(true);
break;
case HIGH_LOW:
setMaxValue(Double.MAX_VALUE);
setDecimals(2);
setTickLabelDecimals(1);
break;
case PERCENTAGE:
setAnimated(true);
setThresholdColor(GRAY);
setTickLabelDecimals(0);
break;
case PLUS_MINUS:
break;
case SLIDER:
setBarBackgroundColor(Tile.FOREGROUND);
break;
case SPARK_LINE:
setTextVisible(false);
setAnimated(false);
setAveragingEnabled(true);
setAveragingPeriod(10);
setDecimals(0);
setTickLabelDecimals(0);
break;
case SWITCH:
break;
case WORLDMAP:
setPrefSize(380, 250);
break;
case TIMER_CONTROL:
setSectionsVisible(true);
setHighlightSections(true);
setCheckSectionsForValue(true);
break;
case NUMBER:
break;
case TEXT:
break;
case WEATHER:
break;
case TIME:
break;
case CUSTOM:
break;
case LEADER_BOARD:
break;
case MAP:
break;
case RADIAL_CHART:
setAnimated(true);
break;
case DONUT_CHART:
setAnimated(true);
break;
case CIRCULAR_PROGRESS:
setBarBackgroundColor(getBackgroundColor().brighter());
setAnimated(true);
break;
case STOCK:
setAnimated(false);
setAveragingPeriod(720);
setAveragingEnabled(true);
setDecimals(2);
setTickLabelDecimals(2);
setThresholdColor(GRAY);
setTextVisible(false);
break;
case GAUGE_SPARK_LINE:
setBarColor(Tile.BLUE);
setAngleRange(270);
break;
case SMOOTH_AREA_CHART:
break;
case RADAR_CHART:
break;
case COUNTRY:
break;
default:
break;
}
}
public SkinType getSkinType() { return skinType; }
public void setSkinType(final SkinType SKIN_TYPE) {
skinType = SKIN_TYPE;
switch (SKIN_TYPE) {
case AREA_CHART : setSkin(new AreaChartTileSkin(Tile.this)); break;
case BAR_CHART : setSkin(new BarChartTileSkin(Tile.this)); break;
case LINE_CHART : setSkin(new LineChartTileSkin(Tile.this)); break;
case CLOCK : setSkin(new ClockTileSkin(Tile.this)); break;
case GAUGE : setSkin(new GaugeTileSkin(Tile.this)); break;
case HIGH_LOW : setSkin(new HighLowTileSkin(Tile.this)); break;
case PERCENTAGE : setSkin(new PercentageTileSkin(Tile.this)); break;
case PLUS_MINUS : setSkin(new PlusMinusTileSkin(Tile.this)); break;
case SLIDER : setSkin(new SliderTileSkin(Tile.this)); break;
case SPARK_LINE : setSkin(new SparkLineTileSkin(Tile.this)); break;
case SWITCH : setSkin(new SwitchTileSkin(Tile.this)); break;
case WORLDMAP : setSkin(new WorldMapTileSkin(Tile.this)); break;
case TIMER_CONTROL : setSkin(new TimerControlTileSkin(Tile.this)); break;
case NUMBER : setSkin(new NumberTileSkin(Tile.this)); break;
case TEXT : setSkin(new TextTileSkin(Tile.this)); break;
case WEATHER : setSkin(new WeatherTileSkin(Tile.this)); break;
case TIME : setSkin(new TimeTileSkin(Tile.this)); break;
case CUSTOM : setSkin(new CustomTileSkin(Tile.this)); break;
case LEADER_BOARD : setSkin(new LeaderBoardTileSkin(Tile.this)); break;
case RADIAL_CHART : setSkin(new RadialChartTileSkin(Tile.this)); break;
case DONUT_CHART : setSkin(new DonutChartTileSkin(Tile.this)); break;
case CIRCULAR_PROGRESS: setSkin(new CircularProgressTileSkin(Tile.this)); break;
case STOCK : setSkin(new StockTileSkin(Tile.this)); break;
case GAUGE_SPARK_LINE : setSkin(new GaugeSparkLineTileSkin(Tile.this)); break;
case SMOOTH_AREA_CHART: setSkin(new SmoothAreaChartTileSkin(Tile.this)); break;
case RADAR_CHART : setSkin(new RadarChartTileSkin(Tile.this)); break;
case COUNTRY : setSkin(new CountryTileSkin(Tile.this)); break;
default : setSkin(new TileSkin(Tile.this)); break;
}
fireTileEvent(RESIZE_EVENT);
presetTileParameters(SKIN_TYPE);
}
}
| apache-2.0 |
zhangkewei/sky-walking | apm-collector/apm-collector-storage/collector-storage-define/src/main/java/org/skywalking/apm/collector/storage/base/dao/IBatchDAO.java | 914 | /*
* Copyright 2017, OpenSkywalking Organization All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Project repository: https://github.com/OpenSkywalking/skywalking
*/
package org.skywalking.apm.collector.storage.base.dao;
import java.util.List;
/**
* @author peng-yongsheng
*/
public interface IBatchDAO extends DAO {
void batchPersistence(List<?> batchCollection);
}
| apache-2.0 |
googleinterns/step100-2020 | capstone/src/test/java/com/google/sps/objects/OptionTest.java | 2686 | package com.google.sps.objects;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.sps.Objects.Option;
/**
* Unit tests for {@link Option}.
*
* @author lucyqu
*/
public class OptionTest {
private final LocalServiceTestHelper helper =
new LocalServiceTestHelper(
new LocalDatastoreServiceTestConfig()
.setDefaultHighRepJobPolicyUnappliedJobPercentage(0));
private Option option;
private final String OPTION_NAME = "Swim";
@Before
public void setUp() {
helper.setUp();
option = new Option(100, OPTION_NAME, new ArrayList<String>());
}
@After
public void tearDown() {
helper.tearDown();
}
@Test
public void getTextTest() {
assertEquals(option.getText(), OPTION_NAME);
}
@Test
public void addVoteGetVotesTest() {
option.addVote("1");
option.addVote("2");
assertEquals(option.getVotes().size(), 2);
assertEquals(option.getVotes().get(0), "1");
assertEquals(option.getVotes().get(1), "2");
}
@Test
public void getIdTest() {
assertEquals(option.getId(), 100);
}
@Test
public void toEntityTest() {
Entity entity = option.toEntity();
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
datastore.put(entity);
ArrayList<String> votes = (ArrayList<String>) entity.getProperty("votes");
assertEquals(entity.getProperty("text"), OPTION_NAME);
assertEquals(votes.size(), 0);
}
@Test
public void fromEntityTest() {
Entity entity = new Entity("Option");
entity.setProperty("text", OPTION_NAME);
List<String> votes = new ArrayList<String>();
votes.add("1");
votes.add("2");
votes.add("3");
entity.setProperty("votes", votes);
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
datastore.put(entity);
Option returnedOption = Option.fromEntity(entity);
List<String> returnedVotes = (ArrayList<String>) entity.getProperty("votes");
assertEquals(returnedOption.getText(), OPTION_NAME);
assertEquals(returnedVotes.size(), 3);
assertEquals(returnedVotes.get(0), "1");
assertEquals(returnedVotes.get(1), "2");
assertEquals(returnedVotes.get(2), "3");
}
}
| apache-2.0 |
sheetanchor/mblog | mblog-wechat/src/main/java/mblog/wechat/service/CoreService.java | 3482 | package mblog.wechat.service;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import mblog.wechat.entity.autoReply.MessageResponse;
import mblog.wechat.utill.MessageUtil;
import mblog.wechat.utill.TulingApiProcess;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CoreService {
static Logger logger = LoggerFactory.getLogger(CoreService.class);
/**
* 处理微信发来的请求
* @param
*
* @param request
* @return
*/
public static String processRequest(HttpServletRequest request) {
String respMessage = null;
try {
// 默认返回的文本消息内容
String respContent = "请求处理异常,请稍候尝试!";
// xml请求解析
// 调用消息工具类MessageUtil解析微信发来的xml格式的消息,解析的结果放在HashMap里;
Map<String, String> requestMap = MessageUtil.parseXml(request);
// 从HashMap中取出消息中的字段;
// 发送方帐号(open_id)
String fromUserName = requestMap.get("FromUserName");
// 公众帐号
String toUserName = requestMap.get("ToUserName");
// 消息类型
String msgType = requestMap.get("MsgType");
// 消息内容
String content = requestMap.get("Content");
// 从HashMap中取出消息中的字段;
logger.info("fromUserName is:" +fromUserName+" toUserName is:" +toUserName+" msgType is:" +msgType);
// 文本消息
if (msgType.equals(MessageUtil.REQ_MESSAGE_TYPE_TEXT)) {
//微信聊天机器人
if(content!=null){
respContent = TulingApiProcess.getTulingResult(content);
if(respContent==""||null==respContent){
MessageResponse.getTextMessage(fromUserName , toUserName , "服务号暂时无法回复,请稍后再试!");
}
//return FormatXmlProcess.formatXmlAnswer(toUserName, fromUserName, respContent);
return MessageResponse.getTextMessage(fromUserName , toUserName , respContent);
}
} else if (msgType.equals(MessageUtil.REQ_MESSAGE_TYPE_EVENT)) {// 事件推送
String eventType = requestMap.get("Event");// 事件类型
if (eventType.equals(MessageUtil.EVENT_TYPE_SUBSCRIBE)) {// 订阅
respContent = "大兄弟,上天不?!";
return MessageResponse.getTextMessage(fromUserName , toUserName , respContent);
} else if (eventType.equals(MessageUtil.EVENT_TYPE_UNSUBSCRIBE)) {// 取消订阅
// TODO 取消订阅后用户再收不到公众号发送的消息,因此不需要回复消息
} else if (eventType.equals(MessageUtil.EVENT_TYPE_CLICK)) {// 自定义菜单点击事件----------暂未加
String eventKey = requestMap.get("EventKey");// 事件KEY值,与创建自定义菜单时指定的KEY值对应
logger.info("eventKey is:" +eventKey);
return MenuClickService.getClickResponse(eventKey , fromUserName , toUserName);
}
}else if(msgType.equals("voice")){//语音识别
String recvMessage = requestMap.get("Recognition");
//respContent = "收到的语音解析结果:"+recvMessage;
if(recvMessage!=null){
respContent = TulingApiProcess.getTulingResult(recvMessage);
}else{
respContent = "您说的太模糊了,能不能重新说下呢?";
}
return MessageResponse.getTextMessage(fromUserName , toUserName , respContent);
}else{
return MessageResponse.getTextMessage(fromUserName , toUserName , "返回为空");
}
}
catch (Exception e) {
e.printStackTrace();
}
return respMessage;
}
}
| apache-2.0 |
pedrovgs/Kuronometer | kuronometer-android-consumer/kurono-android-lib-module/src/main/java/kuronometerandroidconsumer/pedrovgs/github/com/kurono_android_lib_module/KuronometerModule.java | 230 | package kuronometerandroidconsumer.pedrovgs.github.com.kurono_android_lib_module;
/**
* Created by pedro on 1/5/17.
*/
public class KuronometerModule {
public void foo() {
}
public String bar() {
return "";
}
}
| apache-2.0 |
liufeiit/swan | LibRaistCommon/src/main/java/org/raist/common/permutation/Permutation.java | 7352 | /*
* Copyright 2013 Lei CHEN (raistlic@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.raist.common.permutation;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
/**
* This class is to fulfill the needs of getting permutations from a
* collection.
* <p/>
* It basically provides the functionalities of :
* 1 - enquiry the number of permutation count : p(m, n)
* 2 - given an ordinal number i, fetch the i-th permutation result
* as a read-only list.
* 3 - convenient for-each iteration of all the permutations
* <p/>
* This class is NOT thread safe.
* <p/>
* This class re-uses one array to fetch each enquiry, so if the user
* want to keep the i-th permutation result, make a copy.
*
* @author Lei.C
*/
public class Permutation<E> implements Iterable<List<E>> {
public static interface Algorithm {
public int getMaxSupportedSize();
public BigInteger getPermutationCount(int numberOfElements);
public void fetchPermutation(Object[] elements, BigInteger ordinal);
}
public static final Algorithm DEFAULT_ALGORITHM = DefaultAlgorithm.INSTANCE;
public static <E> Permutation<E> of(Collection<E> elements) {
return of(elements, elements.size());
}
public static <E> Permutation<E> of(Collection<E> elements,
int numberToPick) {
return of(elements, numberToPick, DEFAULT_ALGORITHM);
}
public static <E> Permutation<E> of(Collection<E> elements,
Algorithm pAlgorithm) {
return of(elements, elements.size(), pAlgorithm);
}
public static <E> Permutation<E> of(Collection<E> elements,
int numberToPick,
Algorithm pAlgorithm) {
return of(elements, numberToPick, pAlgorithm, Combination.DEFAULT_ALGORITHM);
}
public static <E> Permutation<E> of(Collection<E> elements,
int numberToPick,
Permutation.Algorithm pAlgorithm,
Combination.Algorithm cAlgorithm) {
if (elements == null)
throw new NullPointerException();
if (pAlgorithm == null)
throw new NullPointerException();
if (elements.size() > pAlgorithm.getMaxSupportedSize())
throw new IllegalArgumentException(
"Element collection size not supported by the permutation algorithm.");
return new Permutation<E>(elements, numberToPick, pAlgorithm, cAlgorithm);
}
private E[] elements, picked;
private Algorithm pAlgorithm;
private Combination.Algorithm cAlgorithm;
private BigInteger cCount, pCount;
private BigInteger count;
private int numberToPick;
@SuppressWarnings("unchecked")
private Permutation(Collection<E> elements,
int numberToPick,
Algorithm pAlgorithm,
Combination.Algorithm cAlgorithm) {
assert elements != null;
assert pAlgorithm != null;
assert cAlgorithm != null;
assert numberToPick >= 0;
assert numberToPick <= elements.size();
this.elements = (E[]) elements.toArray();
this.picked = (E[]) new Object[numberToPick];
this.pAlgorithm = pAlgorithm;
this.cAlgorithm = cAlgorithm;
this.numberToPick = numberToPick;
this.cCount = this.cAlgorithm.getCombinationCount(this.elements.length,
this.numberToPick);
this.pCount = this.pAlgorithm.getPermutationCount(this.numberToPick);
this.count = this.cCount.multiply(this.pCount);
}
public BigInteger getPermutationCount() {
return count;
}
public List<E> getPermutation(BigInteger ordinal) {
if (ordinal == null)
throw new NullPointerException();
if (ordinal.compareTo(BigInteger.ZERO) < 0 || ordinal.compareTo(count) >= 0)
throw new IllegalArgumentException(
"Ordinal value out of range : " + ordinal);
if (numberToPick == elements.length) {
System.arraycopy(elements, 0, picked, 0, elements.length);
}
else {
cAlgorithm.fetchCombination(elements, picked, ordinal.divide(pCount));
ordinal = ordinal.mod(pCount);
}
pAlgorithm.fetchPermutation(picked, ordinal);
return Arrays.asList(picked);
}
@Override
public Iterator<List<E>> iterator() {
return this.new OrdinalIterator();
}
private class OrdinalIterator implements Iterator<List<E>> {
private BigInteger ordinal;
private OrdinalIterator() {
this.ordinal = BigInteger.ZERO;
}
@Override
public boolean hasNext() {
return ordinal.compareTo(count) < 0;
}
@Override
public List<E> next() {
List<E> result = getPermutation(ordinal); // throws IndexOutOfRangeException
ordinal = ordinal.add(BigInteger.ONE);
return result;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
private static enum DefaultAlgorithm implements Algorithm {
INSTANCE;
@Override
public int getMaxSupportedSize() {
return MAX_SUPPORT;
}
@Override
public BigInteger getPermutationCount(int numberOfElements) {
if (numberOfElements < 0)
throw new IllegalArgumentException(
"Invalid number of elements : " + numberOfElements);
if (numberOfElements > getMaxSupportedSize())
throw new IllegalArgumentException(
"Number of elements out of range : " + numberOfElements);
return Factorial.of(numberOfElements);
}
@Override
public void fetchPermutation(Object[] elements, BigInteger ordinal) {
if (elements == null)
throw new NullPointerException("elements array is null.");
if (ordinal == null)
throw new NullPointerException("ordinal number is null.");
if (ordinal.compareTo(BigInteger.ZERO) < 0)
throw new IllegalArgumentException(
"ordinal number out of range: " + ordinal);
if (ordinal.compareTo(getPermutationCount(elements.length)) >= 0)
throw new IllegalArgumentException(
"ordinal number out of range: " +
ordinal + " / " + getPermutationCount(elements.length));
for (int i = 0; i < elements.length - 1; i++) {
int left = elements.length - i - 1;
BigInteger leftCount = Factorial.of(left);
int curr = ordinal.divide(leftCount).intValue();
ordinal = ordinal.mod(leftCount);
if (curr > 0) {
Object temp = elements[curr + i];
for (int j = curr + i; j > i; j--)
elements[j] = elements[j - 1];
elements[i] = temp;
}
}
}
private static final int MAX_SUPPORT = 1024;
}
}
| apache-2.0 |
goru97/blueflood | blueflood-core/src/main/java/com/rackspacecloud/blueflood/io/Constants.java | 2782 | /*
* Copyright 2013 Rackspace
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspacecloud.blueflood.io;
import com.rackspacecloud.blueflood.utils.MetricHelper;
import com.rackspacecloud.blueflood.utils.TimeValue;
import org.jboss.netty.util.CharsetUtil;
import java.nio.charset.Charset;
import java.util.concurrent.TimeUnit;
public class Constants {
public static final int VERSION_FIELD_OFFSET = 0;
public static final byte VERSION_1_FULL_RES = 0;
public static final byte VERSION_1_ROLLUP = 0;
public static final byte VERSION_1_TIMER = 0;
public static final byte VERSION_2_TIMER = 1;
public static final byte VERSION_1_COUNTER_ROLLUP = 0;
public static final byte VERSION_1_SET_ROLLUP = VERSION_1_ROLLUP; // don't change this.
public static final byte VERSION_1_ENUM_ROLLUP = 0;
public static final int DOUBLE = (int) MetricHelper.Type.DOUBLE;
public static final int I32 = (int) MetricHelper.Type.INT32;
public static final int I64 = (int) MetricHelper.Type.INT64;
public static final int STR = (int) MetricHelper.Type.STRING;
public static final byte B_DOUBLE = (byte)DOUBLE;
public static final byte B_I32 = (byte)I32;
public static final byte B_I64 = (byte)I64;
public static final byte B_STR = (byte)STR;
public static final byte AVERAGE = 0;
public static final byte VARIANCE = 1;
public static final byte MIN = 2;
public static final byte MAX = 3;
public static final TimeValue STRING_SAFETY_TTL = new TimeValue(365, TimeUnit.DAYS);
public static final int NUMBER_OF_SHARDS = 128;
public static final int DEFAULT_SAMPLE_INTERVAL = 30; // seconds.
// ensure that some yahoo did not set FullResSerializer.CUR_VERSION to an invalid value (for backwards compatibility
// with old unversioned serializations).
static {
if (VERSION_1_FULL_RES == DOUBLE || VERSION_1_FULL_RES == I32 || VERSION_1_FULL_RES == I64 || VERSION_1_FULL_RES == STR)
throw new RuntimeException("Invalid FullResSerializer.CUR_VERSION. Please increment until this exception does not happen.");
}
public static final Charset DEFAULT_CHARSET = CharsetUtil.UTF_8;
private Constants() {}
}
| apache-2.0 |
blusechen/venus | venus-backend/src/main/java/com/meidusa/venus/backend/RequestInfo.java | 1592 | /**
*
*/
package com.meidusa.venus.backend;
public class RequestInfo {
/**
*
*/
private int clientId;
/**
*
*/
private String remoteIp;
/**
*
*/
private String protocolVersion;
/**
*
*/
private Protocol protocol;
/**
*
*/
private String accept;
public static enum Protocol {
HTTP, SOCKET
}
/**
*
* @return
*/
public String getRemoteIp() {
return remoteIp;
}
/**
*
* @param remoteIp
*/
public void setRemoteIp(String remoteIp) {
this.remoteIp = remoteIp;
}
/**
* @param protocolVersion the protocolVersion to set
*/
public void setProtocolVersion(String protocolVersion) {
this.protocolVersion = protocolVersion;
}
/**
* @return the protocolVersion
*/
public String getProtocolVersion() {
return protocolVersion;
}
/**
* @param protocol the protocol to set
*/
public void setProtocol(Protocol protocol) {
this.protocol = protocol;
}
/**
* @return the protocol
*/
public Protocol getProtocol() {
return protocol;
}
/**
* @param accept the accept to set
*/
public void setAccept(String accept) {
this.accept = accept;
}
/**
* @return the accept
*/
public String getAccept() {
return accept;
}
public void setClientId(int clientId) {
this.clientId = clientId;
}
public int getClientId() {
return clientId;
}
}
| apache-2.0 |
Jiemamy/dialect-postgresql | src/test/java/org/jiemamy/composer/exporter/SqlExporterPostgreSqlTest.java | 4775 | /*
* Copyright 2007-2012 Jiemamy Project and the Others.
* Created on 2008/07/12
*
* This file is part of Jiemamy.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.jiemamy.composer.exporter;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jiemamy.JiemamyContext;
import org.jiemamy.JiemamyContextTest;
import org.jiemamy.SimpleJmMetadata;
import org.jiemamy.SqlFacet;
import org.jiemamy.composer.Exporter;
import org.jiemamy.dialect.postgresql.PostgreSqlDialect;
/**
* {@link SqlExporter}のテストクラス。
*
* @author daisuke
*/
public class SqlExporterPostgreSqlTest {
private static Logger logger = LoggerFactory.getLogger(SqlExporterPostgreSqlTest.class);
/** ${WORKSPACE}/org.jiemamy.composer/target/sqlExporterTest1.sql */
private static final File OUTPUT_FILE = new File("./target/testresult/SqlExporterPostgreSqlTest-1.sql");
/** ${WORKSPACE}/org.jiemamy.composer/target/notExists/sqlExporterTest2.sql */
private static final File OUTPUT_FILE_IN_NOT_EXISTS_DIR = new File(
"./target/testresult/notExists/SqlExporterPostgreSqlTest-2.sql");
private static final File NOT_EXISTS_DIR = new File("./target/testresult/notExists");
/**
* テストを初期化する。
*
* @throws Exception 例外が発生した場合
*/
@BeforeClass
public static void setUp() throws Exception {
File testOutDir = new File("./target/testresult");
if (testOutDir.exists() == false) {
testOutDir.mkdirs();
}
}
/** テスト対象のエクスポータ */
private Exporter<SqlExportConfig> exporter = new SqlExporter();
/**
* モデルからSQLファイルがエクスポートできることを確認する。
*
* @throws Exception 例外が発生した場合
*/
@Test
public void test02_() throws Exception {
JiemamyContext context = JiemamyContextTest.random(SqlFacet.PROVIDER);
deleteFile(OUTPUT_FILE);
assertThat(OUTPUT_FILE.exists(), is(false));
SimpleJmMetadata meta = new SimpleJmMetadata();
meta.setDialectClassName(PostgreSqlDialect.class.getName());
context.setMetadata(meta);
BufferedReader reader = null;
try {
SimpleSqlExportConfig config = new SimpleSqlExportConfig();
config.setOutputFile(OUTPUT_FILE);
config.setOverwrite(true);
exporter.exportModel(context, config);
assertThat(OUTPUT_FILE.exists(), is(true));
reader = new BufferedReader(new FileReader(OUTPUT_FILE));
String line;
while ((line = reader.readLine()) != null) {
logger.info(line);
}
// UNDONE sqlExporterTest1.sqlの内容確認
} finally {
IOUtils.closeQuietly(reader);
}
}
/**
* モデルからSQLファイルがエクスポートできることを確認する。
*
* @throws Exception 例外が発生した場合
*/
@Test
public void test03_() throws Exception {
JiemamyContext context = JiemamyContextTest.random(SqlFacet.PROVIDER);
FileUtils.deleteDirectory(NOT_EXISTS_DIR);
assertThat(NOT_EXISTS_DIR.exists(), is(false));
SimpleJmMetadata meta = new SimpleJmMetadata();
meta.setDialectClassName(PostgreSqlDialect.class.getName());
context.setMetadata(meta);
BufferedReader reader = null;
try {
SimpleSqlExportConfig config = new SimpleSqlExportConfig();
config.setOutputFile(OUTPUT_FILE_IN_NOT_EXISTS_DIR);
config.setOverwrite(true);
exporter.exportModel(context, config);
assertThat(OUTPUT_FILE_IN_NOT_EXISTS_DIR.exists(), is(true));
reader = new BufferedReader(new FileReader(OUTPUT_FILE_IN_NOT_EXISTS_DIR));
String line;
while ((line = reader.readLine()) != null) {
logger.info(line);
}
// UNDONE sqlExporterTest2.sqlの内容確認
} finally {
IOUtils.closeQuietly(reader);
}
}
private void deleteFile(File file) {
if (file.exists() == false) {
return;
}
if (file.delete() == false) {
fail("Cannot delete file: " + file.getPath());
}
}
}
| apache-2.0 |
utgenome/utgb | utgb-core/src/main/java/org/utgenome/gwt/utgb/client/track/RangeSelectable.java | 1673 | /*--------------------------------------------------------------------------
* Copyright 2007 utgenome.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*--------------------------------------------------------------------------*/
//--------------------------------------
// GenomeBrowser Project
//
// RangeSelectable.java
// Since: Jun 13, 2007
//
// $URL$
// $Author$
//--------------------------------------
package org.utgenome.gwt.utgb.client.track;
import org.utgenome.gwt.utgb.client.track.lib.RulerTrack;
import org.utgenome.gwt.utgb.client.track.lib.SequenceRulerTrack;
import org.utgenome.gwt.utgb.client.ui.AbsoluteFocusPanel;
/**
* An interface for tracks supporting range selection.
* See also {@link TrackRangeSelector} to enable mouse click range selection on your tracks.
*
* {@link SequenceRulerTrack} and {@link RulerTrack} are the examples of using
* {@link RangeSelectable} interface and {@link TrackRangeSelector}.
*
* @author leo
*
*/
public interface RangeSelectable
{
public void onRangeSelect(int x1OnTrackWindow, int x2OnTrackWindow);
public AbsoluteFocusPanel getAbsoluteFocusPanel();
}
| apache-2.0 |
wavelets/smile | Smile/test/smile/neighbor/BKTreeSpeedTest.java | 3710 | /******************************************************************************
* Confidential Proprietary *
* (c) Copyright Haifeng Li 2011, All Rights Reserved *
******************************************************************************/
package smile.neighbor;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import smile.math.distance.EditDistance;
/**
*
* @author Haifeng Li
*/
public class BKTreeSpeedTest {
List<String> words = new ArrayList<String>();
BKTree<String> bktree;
public BKTreeSpeedTest() {
long start = System.currentTimeMillis();
try {
InputStream stream = this.getClass().getResourceAsStream("/smile/neighbor/index.noun");
BufferedReader input = new BufferedReader(new InputStreamReader(stream));
String line = input.readLine();
while (line != null) {
if (!line.startsWith(" ")) {
String[] w = line.split("\\s");
words.add(w[0].replace('_', ' '));
}
line = input.readLine();
}
} catch (Exception e) {
System.err.println(e);
}
double time = (System.currentTimeMillis() - start) / 1000.0;
System.out.format("Loading data: %.2fs\n", time);
String[] data = words.toArray(new String[words.size()]);
start = System.currentTimeMillis();
bktree = new BKTree<String>(new EditDistance(50, true));
bktree.add(data);
time = (System.currentTimeMillis() - start) / 1000.0;
System.out.format("Building BK-tree: %.2fs\n", time);
}
@BeforeClass
public static void setUpClass() throws Exception {
}
@AfterClass
public static void tearDownClass() throws Exception {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of range method, of class BKTree.
*/
@Test
public void testBKTreeSpeed() {
System.out.println("BK-Tree range 1 speed");
long start = System.currentTimeMillis();
List<Neighbor<String, String>> neighbors = new ArrayList<Neighbor<String, String>>();
for (int i = 1000; i < 1100; i++) {
bktree.range(words.get(i), 1, neighbors);
neighbors.clear();
}
double time = (System.currentTimeMillis() - start) / 1000.0;
System.out.format("BK-tree range 1 search: %.2fs\n", time);
start = System.currentTimeMillis();
for (int i = 1000; i < 1100; i++) {
bktree.range(words.get(i), 2, neighbors);
neighbors.clear();
}
time = (System.currentTimeMillis() - start) / 1000.0;
System.out.format("BK-tree range 2 search: %.2fs\n", time);
start = System.currentTimeMillis();
for (int i = 1000; i < 1100; i++) {
bktree.range(words.get(i), 3, neighbors);
neighbors.clear();
}
time = (System.currentTimeMillis() - start) / 1000.0;
System.out.format("BK-tree range 3 search: %.2fs\n", time);
start = System.currentTimeMillis();
for (int i = 1000; i < 1100; i++) {
bktree.range(words.get(i), 4, neighbors);
neighbors.clear();
}
time = (System.currentTimeMillis() - start) / 1000.0;
System.out.format("BK-tree range 4 search: %.2fs\n", time);
}
} | apache-2.0 |
ashqal/MD360Player4Android | vrlib/src/main/java/com/asha/vrlib/common/MDUtil.java | 607 | package com.asha.vrlib.common;
import android.content.ContentResolver;
import android.content.Context;
import android.content.res.Resources;
import android.net.Uri;
/**
* Created by hzqiujiadi on 2016/11/1.
* hzqiujiadi ashqalcn@gmail.com
*/
public class MDUtil {
public static Uri getDrawableUri(Context context, int resId){
Resources resources = context.getResources();
return Uri.parse(ContentResolver.SCHEME_ANDROID_RESOURCE + "://" + resources.getResourcePackageName(resId) + '/' + resources.getResourceTypeName(resId) + '/' + resources.getResourceEntryName(resId) );
}
}
| apache-2.0 |
artsoftware-io/law-office-nuts | src/main/java/io/artsoftware/organization/text/UncheckedOrgName.java | 1856 | /*-
* #%L
* Law Office
* %%
* Copyright (C) 2017 Art Software
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.artsoftware.organization.text;
import io.artsoftware.text.PlainText;
import io.artsoftware.text.Text;
import lombok.EqualsAndHashCode;
import java.util.stream.IntStream;
@EqualsAndHashCode(of = "source")
public final class UncheckedOrgName implements OrgName {
private final Text source;
public UncheckedOrgName(CharSequence source) {
this(new PlainText(source));
}
public UncheckedOrgName(Text source) {
this.source = source;
}
@Override
public boolean valid() {
return source.valid();
}
@Override
public boolean invalid() {
return source.invalid();
}
@Override
public int length() {
return source.length();
}
@Override
public char charAt(int index) {
return source.charAt(index);
}
@Override
public CharSequence subSequence(int start, int end) {
return source.subSequence(start, end);
}
@Override
public String toString() {
return source.toString();
}
@Override
public IntStream chars() {
return source.chars();
}
@Override
public IntStream codePoints() {
return source.codePoints();
}
}
| apache-2.0 |
DanishRafique/Advanced_Android_Development | app/src/main/java/com/example/android/sunshine/app/sync/SunshineSyncAdapter.java | 30006 | package com.example.android.sunshine.app.sync;
import android.accounts.Account;
import android.accounts.AccountManager;
import android.annotation.SuppressLint;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.AbstractThreadedSyncAdapter;
import android.content.ContentProviderClient;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SyncRequest;
import android.content.SyncResult;
import android.content.res.Resources;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.IntDef;
import android.support.v4.app.NotificationCompat;
import android.support.v4.app.TaskStackBuilder;
import android.text.format.Time;
import android.util.Log;
import com.bumptech.glide.Glide;
import com.example.android.sunshine.app.BuildConfig;
import com.example.android.sunshine.app.MainActivity;
import com.example.android.sunshine.app.R;
import com.example.android.sunshine.app.Utility;
import com.example.android.sunshine.app.data.WeatherContract;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Vector;
import java.util.concurrent.ExecutionException;
public class SunshineSyncAdapter extends AbstractThreadedSyncAdapter {
public final String LOG_TAG = SunshineSyncAdapter.class.getSimpleName();
// Interval at which to sync with the weather, in seconds.
// 60 seconds (1 minute) * 180 = 3 hours
public static final int SYNC_INTERVAL = 60 * 180;
public static final int SYNC_FLEXTIME = SYNC_INTERVAL/3;
private static final long DAY_IN_MILLIS = 1000 * 60 * 60 * 24;
private static final int WEATHER_NOTIFICATION_ID = 3004;
private static final String[] NOTIFY_WEATHER_PROJECTION = new String[] {
WeatherContract.WeatherEntry.COLUMN_WEATHER_ID,
WeatherContract.WeatherEntry.COLUMN_MAX_TEMP,
WeatherContract.WeatherEntry.COLUMN_MIN_TEMP,
WeatherContract.WeatherEntry.COLUMN_SHORT_DESC
};
// these indices must match the projection
private static final int INDEX_WEATHER_ID = 0;
private static final int INDEX_MAX_TEMP = 1;
private static final int INDEX_MIN_TEMP = 2;
private static final int INDEX_SHORT_DESC = 3;
/* Retention
Indicates how long annotations with the annotated type are to be
retained. If no Retention annotation is present on an annotation
type declaration, the retention policy defaults to
RetentionPolicy.CLASS.
A Retention meta-annotation has effect only if the meta-annotated
type is used directly for annotation. It has no effect if the
meta-annotated type is used as a member type in another
annotation type.
RetentionPolicy.SOURCE -- Annotations are to be discarded by the compiler.
*/
@Retention(RetentionPolicy.SOURCE)
/*IntDef
Denotes that the annotated element of integer type, represents a logical type and
that its value should be one of the explicitly named constants. */
@IntDef({LOCATION_STATUS_OK, LOCATION_STATUS_SERVER_DOWN, LOCATION_STATUS_SERVER_INVALID,LOCATION_STATUS_UNKNOWN,LOCATION_STATUS_INVALID})
public @interface LocationStatus{}
public static final int LOCATION_STATUS_OK = 0;
public static final int LOCATION_STATUS_SERVER_DOWN = 1;
public static final int LOCATION_STATUS_SERVER_INVALID = 2;
public static final int LOCATION_STATUS_UNKNOWN = 3;
public static final int LOCATION_STATUS_INVALID = 4;
public SunshineSyncAdapter(Context context, boolean autoInitialize) {
super(context, autoInitialize);
}
@Override
public void onPerformSync(Account account, Bundle extras, String authority, ContentProviderClient provider, SyncResult syncResult) {
Log.d(LOG_TAG, "Starting sync");
String locationQuery = Utility.getPreferredLocation(getContext());
// These two need to be declared outside the try/catch
// so that they can be closed in the finally block.
HttpURLConnection urlConnection = null;
BufferedReader reader = null;
// Will contain the raw JSON response as a string.
String forecastJsonStr = null;
String format = "json";
String units = "metric";
int numDays = 14;
try {
// Construct the URL for the OpenWeatherMap query
// Possible parameters are avaiable at OWM's forecast API page, at
// http://openweathermap.org/API#forecast
final String FORECAST_BASE_URL =
"http://api.openweathermap.org/data/2.5/forecast/daily?";
final String QUERY_PARAM = "q";
final String FORMAT_PARAM = "mode";
final String UNITS_PARAM = "units";
final String DAYS_PARAM = "cnt";
final String APPID_PARAM = "APPID";
Uri builtUri = Uri.parse(FORECAST_BASE_URL).buildUpon()
.appendQueryParameter(QUERY_PARAM, locationQuery)
.appendQueryParameter(FORMAT_PARAM, format)
.appendQueryParameter(UNITS_PARAM, units)
.appendQueryParameter(DAYS_PARAM, Integer.toString(numDays))
.appendQueryParameter(APPID_PARAM, BuildConfig.OPEN_WEATHER_MAP_API_KEY)
.build();
URL url = new URL(builtUri.toString());
// Create the request to OpenWeatherMap, and open the connection
urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setRequestMethod("GET");
urlConnection.connect();
// Read the input stream into a String
InputStream inputStream = urlConnection.getInputStream();
StringBuffer buffer = new StringBuffer();
if (inputStream == null) {
// Nothing to do.
return;
}
reader = new BufferedReader(new InputStreamReader(inputStream));
String line;
while ((line = reader.readLine()) != null) {
// Since it's JSON, adding a newline isn't necessary (it won't affect parsing)
// But it does make debugging a *lot* easier if you print out the completed
// buffer for debugging.
buffer.append(line + "\n");
}
if (buffer.length() == 0) {
// Stream was empty. No point in parsing.
setLocationStatus(getContext(),LOCATION_STATUS_SERVER_DOWN);
return;
}
forecastJsonStr = buffer.toString();
getWeatherDataFromJson(forecastJsonStr, locationQuery);
} catch (IOException e) {
Log.e(LOG_TAG, "Error ", e);
// If the code didn't successfully get the weather data, there's no point in attempting
// to parse it.
setLocationStatus(getContext(),LOCATION_STATUS_SERVER_DOWN);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
e.printStackTrace();
setLocationStatus(getContext(),LOCATION_STATUS_SERVER_INVALID);
} finally {
if (urlConnection != null) {
urlConnection.disconnect();
}
if (reader != null) {
try {
reader.close();
} catch (final IOException e) {
Log.e(LOG_TAG, "Error closing stream", e);
}
}
}
return;
}
/**
* Take the String representing the complete forecast in JSON Format and
* pull out the data we need to construct the Strings needed for the wireframes.
*
* Fortunately parsing is easy: constructor takes the JSON string and converts it
* into an Object hierarchy for us.
*/
private void getWeatherDataFromJson(String forecastJsonStr,
String locationSetting)
throws JSONException {
// Now we have a String representing the complete forecast in JSON Format.
// Fortunately parsing is easy: constructor takes the JSON string and converts it
// into an Object hierarchy for us.
// These are the names of the JSON objects that need to be extracted.
// Location information
final String OWM_CITY = "city";
final String OWM_CITY_NAME = "name";
final String OWM_COORD = "coord";
// Location coordinate
final String OWM_LATITUDE = "lat";
final String OWM_LONGITUDE = "lon";
// Weather information. Each day's forecast info is an element of the "list" array.
final String OWM_LIST = "list";
final String OWM_PRESSURE = "pressure";
final String OWM_HUMIDITY = "humidity";
final String OWM_WINDSPEED = "speed";
final String OWM_WIND_DIRECTION = "deg";
// All temperatures are children of the "temp" object.
final String OWM_TEMPERATURE = "temp";
final String OWM_MAX = "max";
final String OWM_MIN = "min";
final String OWM_WEATHER = "weather";
final String OWM_DESCRIPTION = "main";
final String OWM_WEATHER_ID = "id";
final String OWM_MESSAGE_CODE ="cod";
try {
JSONObject forecastJson = new JSONObject(forecastJsonStr);
//do we have an error?
if( forecastJson.has(OWM_MESSAGE_CODE)){
int errorCode = forecastJson.getInt(OWM_MESSAGE_CODE);
switch(errorCode){
case HttpURLConnection.HTTP_OK:
break;
case HttpURLConnection.HTTP_NOT_FOUND:
setLocationStatus(getContext(),LOCATION_STATUS_INVALID);
return;
default:
setLocationStatus(getContext(),LOCATION_STATUS_SERVER_DOWN);
return;
}
}
JSONArray weatherArray = forecastJson.getJSONArray(OWM_LIST);
JSONObject cityJson = forecastJson.getJSONObject(OWM_CITY);
String cityName = cityJson.getString(OWM_CITY_NAME);
JSONObject cityCoord = cityJson.getJSONObject(OWM_COORD);
double cityLatitude = cityCoord.getDouble(OWM_LATITUDE);
double cityLongitude = cityCoord.getDouble(OWM_LONGITUDE);
long locationId = addLocation(locationSetting, cityName, cityLatitude, cityLongitude);
// Insert the new weather information into the database
Vector<ContentValues> cVVector = new Vector<ContentValues>(weatherArray.length());
// OWM returns daily forecasts based upon the local time of the city that is being
// asked for, which means that we need to know the GMT offset to translate this data
// properly.
// Since this data is also sent in-order and the first day is always the
// current day, we're going to take advantage of that to get a nice
// normalized UTC date for all of our weather.
Time dayTime = new Time();
dayTime.setToNow();
// we start at the day returned by local time. Otherwise this is a mess.
int julianStartDay = Time.getJulianDay(System.currentTimeMillis(), dayTime.gmtoff);
// now we work exclusively in UTC
dayTime = new Time();
for(int i = 0; i < weatherArray.length(); i++) {
// These are the values that will be collected.
long dateTime;
double pressure;
int humidity;
double windSpeed;
double windDirection;
double high;
double low;
String description;
int weatherId;
// Get the JSON object representing the day
JSONObject dayForecast = weatherArray.getJSONObject(i);
// Cheating to convert this to UTC time, which is what we want anyhow
dateTime = dayTime.setJulianDay(julianStartDay+i);
pressure = dayForecast.getDouble(OWM_PRESSURE);
humidity = dayForecast.getInt(OWM_HUMIDITY);
windSpeed = dayForecast.getDouble(OWM_WINDSPEED);
windDirection = dayForecast.getDouble(OWM_WIND_DIRECTION);
// Description is in a child array called "weather", which is 1 element long.
// That element also contains a weather code.
JSONObject weatherObject =
dayForecast.getJSONArray(OWM_WEATHER).getJSONObject(0);
description = weatherObject.getString(OWM_DESCRIPTION);
weatherId = weatherObject.getInt(OWM_WEATHER_ID);
// Temperatures are in a child object called "temp". Try not to name variables
// "temp" when working with temperature. It confuses everybody.
JSONObject temperatureObject = dayForecast.getJSONObject(OWM_TEMPERATURE);
high = temperatureObject.getDouble(OWM_MAX);
low = temperatureObject.getDouble(OWM_MIN);
ContentValues weatherValues = new ContentValues();
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_LOC_KEY, locationId);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_DATE, dateTime);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_HUMIDITY, humidity);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_PRESSURE, pressure);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_WIND_SPEED, windSpeed);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_DEGREES, windDirection);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_MAX_TEMP, high);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_MIN_TEMP, low);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_SHORT_DESC, description);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_WEATHER_ID, weatherId);
cVVector.add(weatherValues);
}
int inserted = 0;
// add to database
if ( cVVector.size() > 0 ) {
ContentValues[] cvArray = new ContentValues[cVVector.size()];
cVVector.toArray(cvArray);
getContext().getContentResolver().bulkInsert(WeatherContract.WeatherEntry.CONTENT_URI, cvArray);
// delete old data so we don't build up an endless history
getContext().getContentResolver().delete(WeatherContract.WeatherEntry.CONTENT_URI,
WeatherContract.WeatherEntry.COLUMN_DATE + " <= ?",
new String[] {Long.toString(dayTime.setJulianDay(julianStartDay-1))});
notifyWeather();
}
Log.d(LOG_TAG, "Sync Complete. " + cVVector.size() + " Inserted");
setLocationStatus(getContext(),LOCATION_STATUS_OK);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
e.printStackTrace();
setLocationStatus(getContext(),LOCATION_STATUS_SERVER_INVALID);
}
}
private void notifyWeather() {
Context context = getContext();
//checking the last update and notify if it' the first of the day
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
String displayNotificationsKey = context.getString(R.string.pref_enable_notifications_key);
boolean displayNotifications = prefs.getBoolean(displayNotificationsKey,
Boolean.parseBoolean(context.getString(R.string.pref_enable_notifications_default)));
if ( displayNotifications ) {
String lastNotificationKey = context.getString(R.string.pref_last_notification);
long lastSync = prefs.getLong(lastNotificationKey, 0);
if (System.currentTimeMillis() - lastSync >= DAY_IN_MILLIS) {
// Last sync was more than 1 day ago, let's send a notification with the weather.
String locationQuery = Utility.getPreferredLocation(context);
Uri weatherUri = WeatherContract.WeatherEntry.buildWeatherLocationWithDate(locationQuery, System.currentTimeMillis());
// we'll query our contentProvider, as always
Cursor cursor = context.getContentResolver().query(weatherUri, NOTIFY_WEATHER_PROJECTION, null, null, null);
if (cursor.moveToFirst()) {
int weatherId = cursor.getInt(INDEX_WEATHER_ID);
double high = cursor.getDouble(INDEX_MAX_TEMP);
double low = cursor.getDouble(INDEX_MIN_TEMP);
String desc = cursor.getString(INDEX_SHORT_DESC);
int iconId = Utility.getIconResourceForWeatherCondition(weatherId);
Resources resources = context.getResources();
int artResourceId = Utility.getArtResourceForWeatherCondition(weatherId);
String artUrl = Utility.getArtUrlForWeatherCondition(context,weatherId);
//On Honeycomb and higher devices , we can retrieve the size of the large icon
//Prior to that , we use a fixed size
/**
*
* By executing lint --list (the lint tool is located in your sdk/tools directory)
* you can see a list of the valid issue id's. You can find the explanation of
* InlinedApi there :
"InlinedApi": Finds inlined fields that may or may not work on older platforms
*/
@SuppressLint("InlinedApi")
int largeIconWidth = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB ? resources.getDimensionPixelSize(android.R.dimen.notification_large_icon_width)
: resources.getDimensionPixelSize(R.dimen.notification_large_icon_default);
@SuppressLint("InlinedApi")
int largeIconHeight = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB ? resources.getDimensionPixelSize(android.R.dimen.notification_large_icon_height)
: resources.getDimensionPixelSize(R.dimen.notification_large_icon_default);
//Retrieve the large icon
Bitmap largeIcon;
try{
largeIcon = Glide.with(context)
.load(artUrl)
.asBitmap()
.error(artResourceId)
.fitCenter()
.into(largeIconWidth,largeIconHeight).get();
}catch(InterruptedException | ExecutionException e){
Log.e(LOG_TAG,"Error retrieving large icon from "+artUrl,e);
largeIcon= BitmapFactory.decodeResource(resources,artResourceId);
}
String title = context.getString(R.string.app_name);
// Define the text of the forecast.
String contentText = String.format(context.getString(R.string.format_notification),
desc,
Utility.formatTemperature(context, high),
Utility.formatTemperature(context, low));
// NotificationCompatBuilder is a very convenient way to build backward-compatible
// notifications. Just throw in some data.
NotificationCompat.Builder mBuilder =
new NotificationCompat.Builder(getContext())
.setColor(resources.getColor(R.color.primary_light))
.setSmallIcon(iconId)
.setLargeIcon(largeIcon)
.setContentTitle(title)
.setContentText(contentText);
// Make something interesting happen when the user clicks on the notification.
// In this case, opening the app is sufficient.
Intent resultIntent = new Intent(context, MainActivity.class);
// The stack builder object will contain an artificial back stack for the
// started Activity.
// This ensures that navigating backward from the Activity leads out of
// your application to the Home screen.
TaskStackBuilder stackBuilder = TaskStackBuilder.create(context);
stackBuilder.addNextIntent(resultIntent);
PendingIntent resultPendingIntent =
stackBuilder.getPendingIntent(
0,
PendingIntent.FLAG_UPDATE_CURRENT
);
mBuilder.setContentIntent(resultPendingIntent);
NotificationManager mNotificationManager =
(NotificationManager) getContext().getSystemService(Context.NOTIFICATION_SERVICE);
// WEATHER_NOTIFICATION_ID allows you to update the notification later on.
mNotificationManager.notify(WEATHER_NOTIFICATION_ID, mBuilder.build());
//refreshing last sync
SharedPreferences.Editor editor = prefs.edit();
editor.putLong(lastNotificationKey, System.currentTimeMillis());
editor.commit();
}
cursor.close();
}
}
}
/**
* Helper method to handle insertion of a new location in the weather database.
*
* @param locationSetting The location string used to request updates from the server.
* @param cityName A human-readable city name, e.g "Mountain View"
* @param lat the latitude of the city
* @param lon the longitude of the city
* @return the row ID of the added location.
*/
long addLocation(String locationSetting, String cityName, double lat, double lon) {
long locationId;
// First, check if the location with this city name exists in the db
Cursor locationCursor = getContext().getContentResolver().query(
WeatherContract.LocationEntry.CONTENT_URI,
new String[]{WeatherContract.LocationEntry._ID},
WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ?",
new String[]{locationSetting},
null);
if (locationCursor.moveToFirst()) {
int locationIdIndex = locationCursor.getColumnIndex(WeatherContract.LocationEntry._ID);
locationId = locationCursor.getLong(locationIdIndex);
} else {
// Now that the content provider is set up, inserting rows of data is pretty simple.
// First create a ContentValues object to hold the data you want to insert.
ContentValues locationValues = new ContentValues();
// Then add the data, along with the corresponding name of the data type,
// so the content provider knows what kind of value is being inserted.
locationValues.put(WeatherContract.LocationEntry.COLUMN_CITY_NAME, cityName);
locationValues.put(WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING, locationSetting);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LAT, lat);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LONG, lon);
// Finally, insert location data into the database.
Uri insertedUri = getContext().getContentResolver().insert(
WeatherContract.LocationEntry.CONTENT_URI,
locationValues
);
// The resulting URI contains the ID for the row. Extract the locationId from the Uri.
locationId = ContentUris.parseId(insertedUri);
}
locationCursor.close();
// Wait, that worked? Yes!
return locationId;
}
/**
* Helper method to schedule the sync adapter periodic execution
*/
public static void configurePeriodicSync(Context context, int syncInterval, int flexTime) {
Account account = getSyncAccount(context);
String authority = context.getString(R.string.content_authority);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
// we can enable inexact timers in our periodic sync
SyncRequest request = new SyncRequest.Builder().
syncPeriodic(syncInterval, flexTime).
setSyncAdapter(account, authority).
setExtras(new Bundle()).build();
ContentResolver.requestSync(request);
} else {
ContentResolver.addPeriodicSync(account,
authority, new Bundle(), syncInterval);
}
}
/**
* Helper method to have the sync adapter sync immediately
* @param context The context used to access the account service
*/
public static void syncImmediately(Context context) {
Bundle bundle = new Bundle();
bundle.putBoolean(ContentResolver.SYNC_EXTRAS_EXPEDITED, true);
bundle.putBoolean(ContentResolver.SYNC_EXTRAS_MANUAL, true);
ContentResolver.requestSync(getSyncAccount(context),
context.getString(R.string.content_authority), bundle);
}
/**
* Helper method to get the fake account to be used with SyncAdapter, or make a new one
* if the fake account doesn't exist yet. If we make a new account, we call the
* onAccountCreated method so we can initialize things.
*
* @param context The context used to access the account service
* @return a fake account.
*/
public static Account getSyncAccount(Context context) {
// Get an instance of the Android account manager
AccountManager accountManager =
(AccountManager) context.getSystemService(Context.ACCOUNT_SERVICE);
// Create the account type and default account
Account newAccount = new Account(
context.getString(R.string.app_name), context.getString(R.string.sync_account_type));
// If the password doesn't exist, the account doesn't exist
if ( null == accountManager.getPassword(newAccount) ) {
/*
* Add the account and account type, no password or user data
* If successful, return the Account object, otherwise report an error.
*/
if (!accountManager.addAccountExplicitly(newAccount, "", null)) {
return null;
}
/*
* If you don't set android:syncable="true" in
* in your <provider> element in the manifest,
* then call ContentResolver.setIsSyncable(account, AUTHORITY, 1)
* here.
*/
onAccountCreated(newAccount, context);
}
return newAccount;
}
private static void onAccountCreated(Account newAccount, Context context) {
/*
* Since we've created an account
*/
SunshineSyncAdapter.configurePeriodicSync(context, SYNC_INTERVAL, SYNC_FLEXTIME);
/*
* Without calling setSyncAutomatically, our periodic sync will not be enabled.
*/
ContentResolver.setSyncAutomatically(newAccount, context.getString(R.string.content_authority), true);
/*
* Finally, let's do a sync to get things started
*/
syncImmediately(context);
}
public static void initializeSyncAdapter(Context context) {
getSyncAccount(context);
}
/**
* Sets the location status into shared preference . This function should
* not be called from the UI thread beacuse it uses comit to write to the
* shared preferences
*
* @param c Context to get the PreferenceManager from .
* @param locationStatus The IntDef value to set
*/
static private void setLocationStatus(Context c, @LocationStatus int locationStatus){
/**The SharedPreferences class provides a general framework that
* allows you to save and retrieve persistent key-value pairs
* of primitive data types. You can use SharedPreferences to
*save any primitive data: booleans, floats, ints, longs,
* and strings. This data will persist across user sessions
* (even if your application is killed).*/
/**To write values:
Call edit() to get a SharedPreferences.Editor.
Add values with methods such as putBoolean() and putString().
Commit the new values with commit()**/
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(c);
SharedPreferences.Editor spe=sp.edit();
spe.putInt(c.getString(R.string.pref_location_status_key),locationStatus);
spe.commit();
}
} | apache-2.0 |
josdem/client | client-view/src/main/java/com/all/client/view/flows/ShowContactFeedInfoFlow.java | 13149 | /**
*
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2011 Eric Haddad Koenig
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.all.client.view.flows;
import java.awt.Component;
import com.all.action.ResponseCallback;
import com.all.appControl.control.ViewEngine;
import com.all.client.view.dialog.DialogFactory;
import com.all.core.actions.Actions;
import com.all.core.actions.ComposeView;
import com.all.core.actions.LoadContactLibraryAction;
import com.all.core.actions.LoadContactProfileAction;
import com.all.core.model.Model;
import com.all.core.model.SubViews;
import com.all.core.model.Views;
import com.all.shared.model.ContactInfo;
public class ShowContactFeedInfoFlow {
private final ViewEngine viewEngine;
private final DialogFactory dialogFactory;
public ShowContactFeedInfoFlow(ViewEngine viewEngine, DialogFactory dialogFactory) {
this.viewEngine = viewEngine;
this.dialogFactory = dialogFactory;
}
public void execute(final ContactInfo contact, final Component component) {
viewEngine.request(Actions.Social.IS_CONTACT_ACCESSIBLE, contact, new ResponseCallback<Boolean>() {
@Override
public void onResponse(Boolean accessible) {
if (accessible) {
if(!contact.getEmail().equals(viewEngine.get(Model.CURRENT_USER).getEmail())){
viewEngine.send(Actions.Library.LOAD_CONTACT_LIBRARY, LoadContactLibraryAction.load(contact.getEmail()));
} else {
viewEngine.send(Actions.Social.LOAD_USER_PROFILE, new LoadContactProfileAction(contact, new ComposeView(Views.PROFILE, SubViews.ALL)));
}
} else {
dialogFactory.showAddAsAFriendProfileDialog(contact, component);
}
}
});
}
}
| apache-2.0 |
SnappyDataInc/snappy-store | gemfire-core/src/main/java/com/gemstone/gemfire/internal/cache/SortedIndexContainer.java | 3054 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.internal.cache;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import com.gemstone.gemfire.cache.query.IndexMaintenanceException;
/**
* An interface to describe a container for a local index that can be persisted
* to and recovered from an Oplog.
*
* @author swale
* @since gfxd 1.0
*/
public interface SortedIndexContainer {
/** Get the unique system-wide ID of the container */
public String getUUID();
/** Get the comparator to be used for sorting the index keys. */
public Comparator<? super SortedIndexKey> getComparator();
/** Get the base region that is indexed. */
public LocalRegion getBaseRegion();
/** Returns true if this is a unique index and false otherwise */
public boolean isUniqueIndex();
/**
* Get an index key object that can be inserted into this index given the
* serialized index key bytes and reference to disk entry.
*/
public SortedIndexKey getIndexKey(byte[] indexKeyBytes, RegionEntry entry);
/**
* Get an index key object that can be inserted into this index given the
* full value extracted from a RegionEntry, and reference to the RegionEntry.
*/
public SortedIndexKey getIndexKey(Object val, RegionEntry entry);
/**
* Initialize the index with given set of sorted entries. The will fail if
* there is any previous history of operations on the index. The given
* iterator is required to provide key, value pairs in sorted order as would
* be required by this index's comparator.
*/
public void buildIndexFromSorted(
Iterator<Map.Entry<SortedIndexKey, Object>> entryIterator);
/**
* Merge two values for same index key into the first key. The result value
* can be gotten from {@link SortedIndexKey#getTransientValue()} method of the
* first argument.
*
* @throws IndexMaintenanceException
* if the index only allows unique values
*/
public void mergeValuesForNonUnique(SortedIndexKey mergeInto,
SortedIndexKey mergeFrom) throws IndexMaintenanceException;
/**
* New API added to account Index memory while index is getting loaded
* @param cursorPosition for which we are trying to estimate memory
* @param forceAccount whether to account irrespective of cursor position
*/
public void accountMemoryForIndex(long cursorPosition, boolean forceAccount);
}
| apache-2.0 |
dhutchis/accumulo | server/tserver/src/main/java/org/apache/accumulo/tserver/InMemoryMap.java | 27913 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.tserver;
import static com.google.common.util.concurrent.Uninterruptibles.sleepUninterruptibly;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import java.util.UUID;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.accumulo.core.client.SampleNotPresentException;
import org.apache.accumulo.core.client.sample.Sampler;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
import org.apache.accumulo.core.conf.ConfigurationCopy;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.conf.SiteConfiguration;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.ColumnUpdate;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.file.FileSKVIterator;
import org.apache.accumulo.core.file.FileSKVWriter;
import org.apache.accumulo.core.file.rfile.RFile;
import org.apache.accumulo.core.file.rfile.RFileOperations;
import org.apache.accumulo.core.iterators.IteratorEnvironment;
import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
import org.apache.accumulo.core.iterators.SortedMapIterator;
import org.apache.accumulo.core.iterators.WrappingIterator;
import org.apache.accumulo.core.iterators.system.EmptyIterator;
import org.apache.accumulo.core.iterators.system.InterruptibleIterator;
import org.apache.accumulo.core.iterators.system.LocalityGroupIterator;
import org.apache.accumulo.core.iterators.system.LocalityGroupIterator.LocalityGroup;
import org.apache.accumulo.core.iterators.system.SourceSwitchingIterator;
import org.apache.accumulo.core.iterators.system.SourceSwitchingIterator.DataSource;
import org.apache.accumulo.core.sample.impl.SamplerConfigurationImpl;
import org.apache.accumulo.core.sample.impl.SamplerFactory;
import org.apache.accumulo.core.util.CachedConfiguration;
import org.apache.accumulo.core.util.LocalityGroupUtil;
import org.apache.accumulo.core.util.LocalityGroupUtil.LocalityGroupConfigurationError;
import org.apache.accumulo.core.util.LocalityGroupUtil.Partitioner;
import org.apache.accumulo.core.util.Pair;
import org.apache.accumulo.core.util.PreAllocatedArray;
import org.apache.commons.lang.mutable.MutableLong;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Iterables;
public class InMemoryMap {
private SimpleMap map = null;
private static final Logger log = LoggerFactory.getLogger(InMemoryMap.class);
private volatile String memDumpFile = null;
private final String memDumpDir;
private final String mapType;
private Map<String,Set<ByteSequence>> lggroups;
private static Pair<SamplerConfigurationImpl,Sampler> getSampler(AccumuloConfiguration config) {
try {
SamplerConfigurationImpl sampleConfig = SamplerConfigurationImpl.newSamplerConfig(config);
if (sampleConfig == null) {
return new Pair<>(null, null);
}
return new Pair<>(sampleConfig, SamplerFactory.newSampler(sampleConfig, config));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static final String TYPE_NATIVE_MAP_WRAPPER = "NativeMapWrapper";
public static final String TYPE_DEFAULT_MAP = "DefaultMap";
public static final String TYPE_LOCALITY_GROUP_MAP = "LocalityGroupMap";
public static final String TYPE_LOCALITY_GROUP_MAP_NATIVE = "LocalityGroupMap with native";
private AtomicReference<Pair<SamplerConfigurationImpl,Sampler>> samplerRef = new AtomicReference<>(null);
private AccumuloConfiguration config;
// defer creating sampler until first write. This was done because an empty sample map configured with no sampler will not flush after a user changes sample
// config.
private Sampler getOrCreateSampler() {
Pair<SamplerConfigurationImpl,Sampler> pair = samplerRef.get();
if (pair == null) {
pair = getSampler(config);
if (!samplerRef.compareAndSet(null, pair)) {
pair = samplerRef.get();
}
}
return pair.getSecond();
}
public InMemoryMap(AccumuloConfiguration config) throws LocalityGroupConfigurationError {
boolean useNativeMap = config.getBoolean(Property.TSERV_NATIVEMAP_ENABLED);
this.memDumpDir = config.get(Property.TSERV_MEMDUMP_DIR);
this.lggroups = LocalityGroupUtil.getLocalityGroups(config);
this.config = config;
SimpleMap allMap;
SimpleMap sampleMap;
if (lggroups.size() == 0) {
allMap = newMap(useNativeMap);
sampleMap = newMap(useNativeMap);
mapType = useNativeMap ? TYPE_NATIVE_MAP_WRAPPER : TYPE_DEFAULT_MAP;
} else {
allMap = new LocalityGroupMap(lggroups, useNativeMap);
sampleMap = new LocalityGroupMap(lggroups, useNativeMap);
mapType = useNativeMap ? TYPE_LOCALITY_GROUP_MAP_NATIVE : TYPE_LOCALITY_GROUP_MAP;
}
map = new SampleMap(allMap, sampleMap);
}
private static SimpleMap newMap(boolean useNativeMap) {
if (useNativeMap && NativeMap.isLoaded()) {
try {
return new NativeMapWrapper();
} catch (Throwable t) {
log.error("Failed to create native map", t);
}
}
return new DefaultMap();
}
/**
* Description of the type of SimpleMap that is created.
* <p>
* If no locality groups are present, the SimpleMap is either TYPE_DEFAULT_MAP or TYPE_NATIVE_MAP_WRAPPER. If there is one more locality groups, then the
* InMemoryMap has an array for simple maps that either contain either TYPE_LOCALITY_GROUP_MAP which contains DefaultMaps or TYPE_LOCALITY_GROUP_MAP_NATIVE
* which contains NativeMapWrappers.
*
* @return String that describes the Map type
*/
public String getMapType() {
return mapType;
}
private interface SimpleMap {
Value get(Key key);
Iterator<Entry<Key,Value>> iterator(Key startKey);
int size();
InterruptibleIterator skvIterator(SamplerConfigurationImpl samplerConfig);
void delete();
long getMemoryUsed();
void mutate(List<Mutation> mutations, int kvCount);
}
private class SampleMap implements SimpleMap {
private SimpleMap map;
private SimpleMap sample;
public SampleMap(SimpleMap map, SimpleMap sampleMap) {
this.map = map;
this.sample = sampleMap;
}
@Override
public Value get(Key key) {
return map.get(key);
}
@Override
public Iterator<Entry<Key,Value>> iterator(Key startKey) {
throw new UnsupportedOperationException();
}
@Override
public int size() {
return map.size();
}
@Override
public InterruptibleIterator skvIterator(SamplerConfigurationImpl samplerConfig) {
if (samplerConfig == null)
return map.skvIterator(null);
else {
Pair<SamplerConfigurationImpl,Sampler> samplerAndConf = samplerRef.get();
if (samplerAndConf == null) {
return EmptyIterator.EMPTY_ITERATOR;
} else if (samplerAndConf.getFirst() != null && samplerAndConf.getFirst().equals(samplerConfig)) {
return sample.skvIterator(null);
} else {
throw new SampleNotPresentException();
}
}
}
@Override
public void delete() {
map.delete();
sample.delete();
}
@Override
public long getMemoryUsed() {
return map.getMemoryUsed() + sample.getMemoryUsed();
}
@Override
public void mutate(List<Mutation> mutations, int kvCount) {
map.mutate(mutations, kvCount);
Sampler sampler = getOrCreateSampler();
if (sampler != null) {
List<Mutation> sampleMutations = null;
for (Mutation m : mutations) {
List<ColumnUpdate> colUpdates = m.getUpdates();
List<ColumnUpdate> sampleColUpdates = null;
for (ColumnUpdate cvp : colUpdates) {
Key k = new Key(m.getRow(), cvp.getColumnFamily(), cvp.getColumnQualifier(), cvp.getColumnVisibility(), cvp.getTimestamp(), cvp.isDeleted(), false);
if (sampler.accept(k)) {
if (sampleColUpdates == null) {
sampleColUpdates = new ArrayList<>();
}
sampleColUpdates.add(cvp);
}
}
if (sampleColUpdates != null) {
if (sampleMutations == null) {
sampleMutations = new ArrayList<>();
}
sampleMutations.add(new LocalityGroupUtil.PartitionedMutation(m.getRow(), sampleColUpdates));
}
}
if (sampleMutations != null) {
sample.mutate(sampleMutations, kvCount);
}
}
}
}
private static class LocalityGroupMap implements SimpleMap {
private PreAllocatedArray<Map<ByteSequence,MutableLong>> groupFams;
// the last map in the array is the default locality group
private SimpleMap maps[];
private Partitioner partitioner;
private PreAllocatedArray<List<Mutation>> partitioned;
private Set<ByteSequence> nonDefaultColumnFamilies;
LocalityGroupMap(Map<String,Set<ByteSequence>> groups, boolean useNativeMap) {
this.groupFams = new PreAllocatedArray<>(groups.size());
this.maps = new SimpleMap[groups.size() + 1];
this.partitioned = new PreAllocatedArray<>(groups.size() + 1);
this.nonDefaultColumnFamilies = new HashSet<ByteSequence>();
for (int i = 0; i < maps.length; i++) {
maps[i] = newMap(useNativeMap);
}
int count = 0;
for (Set<ByteSequence> cfset : groups.values()) {
HashMap<ByteSequence,MutableLong> map = new HashMap<ByteSequence,MutableLong>();
for (ByteSequence bs : cfset)
map.put(bs, new MutableLong(1));
this.groupFams.set(count++, map);
nonDefaultColumnFamilies.addAll(cfset);
}
partitioner = new LocalityGroupUtil.Partitioner(this.groupFams);
for (int i = 0; i < partitioned.length; i++) {
partitioned.set(i, new ArrayList<Mutation>());
}
}
@Override
public Value get(Key key) {
throw new UnsupportedOperationException();
}
@Override
public Iterator<Entry<Key,Value>> iterator(Key startKey) {
throw new UnsupportedOperationException();
}
@Override
public int size() {
int sum = 0;
for (SimpleMap map : maps)
sum += map.size();
return sum;
}
@Override
public InterruptibleIterator skvIterator(SamplerConfigurationImpl samplerConfig) {
if (samplerConfig != null)
throw new SampleNotPresentException();
LocalityGroup groups[] = new LocalityGroup[maps.length];
for (int i = 0; i < groups.length; i++) {
if (i < groupFams.length)
groups[i] = new LocalityGroup(maps[i].skvIterator(null), groupFams.get(i), false);
else
groups[i] = new LocalityGroup(maps[i].skvIterator(null), null, true);
}
return new LocalityGroupIterator(groups, nonDefaultColumnFamilies);
}
@Override
public void delete() {
for (SimpleMap map : maps)
map.delete();
}
@Override
public long getMemoryUsed() {
long sum = 0;
for (SimpleMap map : maps)
sum += map.getMemoryUsed();
return sum;
}
@Override
public synchronized void mutate(List<Mutation> mutations, int kvCount) {
// this method is synchronized because it reuses objects to avoid allocation,
// currently, the method that calls this is synchronized so there is no
// loss in parallelism.... synchronization was added here for future proofing
try {
partitioner.partition(mutations, partitioned);
for (int i = 0; i < partitioned.length; i++) {
if (partitioned.get(i).size() > 0) {
maps[i].mutate(partitioned.get(i), kvCount);
for (Mutation m : partitioned.get(i))
kvCount += m.getUpdates().size();
}
}
} finally {
// clear immediately so mutations can be garbage collected
for (List<Mutation> list : partitioned) {
list.clear();
}
}
}
}
private static class DefaultMap implements SimpleMap {
private ConcurrentSkipListMap<Key,Value> map = new ConcurrentSkipListMap<Key,Value>(new MemKeyComparator());
private AtomicLong bytesInMemory = new AtomicLong();
private AtomicInteger size = new AtomicInteger();
public void put(Key key, Value value) {
// Always a MemKey, so account for the kvCount int
bytesInMemory.addAndGet(key.getLength() + 4);
bytesInMemory.addAndGet(value.getSize());
if (map.put(key, value) == null)
size.incrementAndGet();
}
@Override
public Value get(Key key) {
return map.get(key);
}
@Override
public Iterator<Entry<Key,Value>> iterator(Key startKey) {
Key lk = new Key(startKey);
SortedMap<Key,Value> tm = map.tailMap(lk);
return tm.entrySet().iterator();
}
@Override
public int size() {
return size.get();
}
@Override
public InterruptibleIterator skvIterator(SamplerConfigurationImpl samplerConfig) {
if (samplerConfig != null)
throw new SampleNotPresentException();
if (map == null)
throw new IllegalStateException();
return new SortedMapIterator(map);
}
@Override
public synchronized void delete() {
map = null;
}
public long getOverheadPerEntry() {
// all of the java objects that are used to hold the
// data and make it searchable have overhead... this
// overhead is estimated using test.EstimateInMemMapOverhead
// and is in bytes.. the estimates were obtained by running
// java 6_16 in 64 bit server mode
return 200;
}
@Override
public void mutate(List<Mutation> mutations, int kvCount) {
for (Mutation m : mutations) {
for (ColumnUpdate cvp : m.getUpdates()) {
Key newKey = new MemKey(m.getRow(), cvp.getColumnFamily(), cvp.getColumnQualifier(), cvp.getColumnVisibility(), cvp.getTimestamp(), cvp.isDeleted(),
false, kvCount++);
Value value = new Value(cvp.getValue());
put(newKey, value);
}
}
}
@Override
public long getMemoryUsed() {
return bytesInMemory.get() + (size() * getOverheadPerEntry());
}
}
private static class NativeMapWrapper implements SimpleMap {
private NativeMap nativeMap;
NativeMapWrapper() {
nativeMap = new NativeMap();
}
@Override
public Value get(Key key) {
return nativeMap.get(key);
}
@Override
public Iterator<Entry<Key,Value>> iterator(Key startKey) {
return nativeMap.iterator(startKey);
}
@Override
public int size() {
return nativeMap.size();
}
@Override
public InterruptibleIterator skvIterator(SamplerConfigurationImpl samplerConfig) {
if (samplerConfig != null)
throw new SampleNotPresentException();
return (InterruptibleIterator) nativeMap.skvIterator();
}
@Override
public void delete() {
nativeMap.delete();
}
@Override
public long getMemoryUsed() {
return nativeMap.getMemoryUsed();
}
@Override
public void mutate(List<Mutation> mutations, int kvCount) {
nativeMap.mutate(mutations, kvCount);
}
}
private AtomicInteger nextKVCount = new AtomicInteger(1);
private AtomicInteger kvCount = new AtomicInteger(0);
private Object writeSerializer = new Object();
/**
* Applies changes to a row in the InMemoryMap
*
*/
public void mutate(List<Mutation> mutations) {
int numKVs = 0;
for (int i = 0; i < mutations.size(); i++)
numKVs += mutations.get(i).size();
// Can not update mutationCount while writes that started before
// are in progress, this would cause partial mutations to be seen.
// Also, can not continue until mutation count is updated, because
// a read may not see a successful write. Therefore writes must
// wait for writes that started before to finish.
//
// using separate lock from this map, to allow read/write in parallel
synchronized (writeSerializer) {
int kv = nextKVCount.getAndAdd(numKVs);
try {
map.mutate(mutations, kv);
} finally {
kvCount.set(kv + numKVs - 1);
}
}
}
/**
* Returns a long representing the size of the InMemoryMap
*
* @return bytesInMemory
*/
public synchronized long estimatedSizeInBytes() {
if (map == null)
return 0;
return map.getMemoryUsed();
}
Iterator<Map.Entry<Key,Value>> iterator(Key startKey) {
return map.iterator(startKey);
}
public synchronized long getNumEntries() {
if (map == null)
return 0;
return map.size();
}
private final Set<MemoryIterator> activeIters = Collections.synchronizedSet(new HashSet<MemoryIterator>());
class MemoryDataSource implements DataSource {
private boolean switched = false;
private InterruptibleIterator iter;
private FileSKVIterator reader;
private MemoryDataSource parent;
private IteratorEnvironment env;
private AtomicBoolean iflag;
private SamplerConfigurationImpl iteratorSamplerConfig;
private SamplerConfigurationImpl getSamplerConfig() {
if (env != null) {
if (env.isSamplingEnabled()) {
return new SamplerConfigurationImpl(env.getSamplerConfiguration());
} else {
return null;
}
} else {
return iteratorSamplerConfig;
}
}
MemoryDataSource(SamplerConfigurationImpl samplerConfig) {
this(null, false, null, null, samplerConfig);
}
public MemoryDataSource(MemoryDataSource parent, boolean switched, IteratorEnvironment env, AtomicBoolean iflag, SamplerConfigurationImpl samplerConfig) {
this.parent = parent;
this.switched = switched;
this.env = env;
this.iflag = iflag;
this.iteratorSamplerConfig = samplerConfig;
}
@Override
public boolean isCurrent() {
if (switched)
return true;
else
return memDumpFile == null;
}
@Override
public DataSource getNewDataSource() {
if (switched)
throw new IllegalStateException();
if (!isCurrent()) {
switched = true;
iter = null;
try {
// ensure files are referenced even if iterator was never seeked before
iterator();
} catch (IOException e) {
throw new RuntimeException();
}
}
return this;
}
private synchronized FileSKVIterator getReader() throws IOException {
if (reader == null) {
Configuration conf = CachedConfiguration.getInstance();
FileSystem fs = FileSystem.getLocal(conf);
reader = new RFileOperations().newReaderBuilder().forFile(memDumpFile, fs, conf).withTableConfiguration(SiteConfiguration.getInstance())
.seekToBeginning().build();
if (iflag != null)
reader.setInterruptFlag(iflag);
if (getSamplerConfig() != null) {
reader = reader.getSample(getSamplerConfig());
}
}
return reader;
}
@Override
public SortedKeyValueIterator<Key,Value> iterator() throws IOException {
if (iter == null)
if (!switched) {
iter = map.skvIterator(getSamplerConfig());
if (iflag != null)
iter.setInterruptFlag(iflag);
} else {
if (parent == null)
iter = new MemKeyConversionIterator(getReader());
else
synchronized (parent) {
// synchronize deep copy operation on parent, this prevents multiple threads from deep copying the rfile shared from parent its possible that the
// thread deleting an InMemoryMap and scan thread could be switching different deep copies
iter = new MemKeyConversionIterator(parent.getReader().deepCopy(env));
}
}
return iter;
}
@Override
public DataSource getDeepCopyDataSource(IteratorEnvironment env) {
return new MemoryDataSource(parent == null ? this : parent, switched, env, iflag, iteratorSamplerConfig);
}
@Override
public void setInterruptFlag(AtomicBoolean flag) {
this.iflag = flag;
}
}
public class MemoryIterator extends WrappingIterator implements InterruptibleIterator {
private AtomicBoolean closed;
private SourceSwitchingIterator ssi;
private MemoryDataSource mds;
@Override
protected SortedKeyValueIterator<Key,Value> getSource() {
if (closed.get())
throw new IllegalStateException("Memory iterator is closed");
return super.getSource();
}
private MemoryIterator(InterruptibleIterator source) {
this(source, new AtomicBoolean(false));
}
private MemoryIterator(SortedKeyValueIterator<Key,Value> source, AtomicBoolean closed) {
setSource(source);
this.closed = closed;
}
@Override
public SortedKeyValueIterator<Key,Value> deepCopy(IteratorEnvironment env) {
return new MemoryIterator(getSource().deepCopy(env), closed);
}
public void close() {
synchronized (this) {
if (closed.compareAndSet(false, true)) {
try {
if (mds.reader != null)
mds.reader.close();
} catch (IOException e) {
log.warn("{}", e.getMessage(), e);
}
}
}
// remove outside of sync to avoid deadlock
activeIters.remove(this);
}
private synchronized boolean switchNow() throws IOException {
if (closed.get())
return false;
ssi.switchNow();
return true;
}
@Override
public void setInterruptFlag(AtomicBoolean flag) {
((InterruptibleIterator) getSource()).setInterruptFlag(flag);
}
private void setSSI(SourceSwitchingIterator ssi) {
this.ssi = ssi;
}
public void setMDS(MemoryDataSource mds) {
this.mds = mds;
}
}
public synchronized MemoryIterator skvIterator(SamplerConfigurationImpl iteratorSamplerConfig) {
if (map == null)
throw new NullPointerException();
if (deleted)
throw new IllegalStateException("Can not obtain iterator after map deleted");
int mc = kvCount.get();
MemoryDataSource mds = new MemoryDataSource(iteratorSamplerConfig);
// TODO seems like a bug that two MemoryDataSources are created... may need to fix in older branches
SourceSwitchingIterator ssi = new SourceSwitchingIterator(mds);
MemoryIterator mi = new MemoryIterator(new PartialMutationSkippingIterator(ssi, mc));
mi.setSSI(ssi);
mi.setMDS(mds);
activeIters.add(mi);
return mi;
}
public SortedKeyValueIterator<Key,Value> compactionIterator() {
if (nextKVCount.get() - 1 != kvCount.get())
throw new IllegalStateException("Memory map in unexpected state : nextKVCount = " + nextKVCount.get() + " kvCount = " + kvCount.get());
return map.skvIterator(null);
}
private boolean deleted = false;
public void delete(long waitTime) {
synchronized (this) {
if (deleted)
throw new IllegalStateException("Double delete");
deleted = true;
}
long t1 = System.currentTimeMillis();
while (activeIters.size() > 0 && System.currentTimeMillis() - t1 < waitTime) {
sleepUninterruptibly(50, TimeUnit.MILLISECONDS);
}
if (activeIters.size() > 0) {
// dump memmap exactly as is to a tmp file on disk, and switch scans to that temp file
try {
Configuration conf = CachedConfiguration.getInstance();
FileSystem fs = FileSystem.getLocal(conf);
String tmpFile = memDumpDir + "/memDump" + UUID.randomUUID() + "." + RFile.EXTENSION;
Configuration newConf = new Configuration(conf);
newConf.setInt("io.seqfile.compress.blocksize", 100000);
AccumuloConfiguration siteConf = SiteConfiguration.getInstance();
if (getOrCreateSampler() != null) {
siteConf = createSampleConfig(siteConf);
}
FileSKVWriter out = new RFileOperations().newWriterBuilder().forFile(tmpFile, fs, newConf).withTableConfiguration(siteConf).build();
InterruptibleIterator iter = map.skvIterator(null);
HashSet<ByteSequence> allfams = new HashSet<ByteSequence>();
for (Entry<String,Set<ByteSequence>> entry : lggroups.entrySet()) {
allfams.addAll(entry.getValue());
out.startNewLocalityGroup(entry.getKey(), entry.getValue());
iter.seek(new Range(), entry.getValue(), true);
dumpLocalityGroup(out, iter);
}
out.startDefaultLocalityGroup();
iter.seek(new Range(), allfams, false);
dumpLocalityGroup(out, iter);
out.close();
log.debug("Created mem dump file " + tmpFile);
memDumpFile = tmpFile;
synchronized (activeIters) {
for (MemoryIterator mi : activeIters) {
mi.switchNow();
}
}
// rely on unix behavior that file will be deleted when last
// reader closes it
fs.delete(new Path(memDumpFile), true);
} catch (IOException ioe) {
log.error("Failed to create mem dump file ", ioe);
while (activeIters.size() > 0) {
sleepUninterruptibly(100, TimeUnit.MILLISECONDS);
}
}
}
SimpleMap tmpMap = map;
synchronized (this) {
map = null;
}
tmpMap.delete();
}
private AccumuloConfiguration createSampleConfig(AccumuloConfiguration siteConf) {
ConfigurationCopy confCopy = new ConfigurationCopy(Iterables.filter(siteConf, input -> !input.getKey().startsWith(Property.TABLE_SAMPLER.getKey())));
for (Entry<String,String> entry : samplerRef.get().getFirst().toTablePropertiesMap().entrySet()) {
confCopy.set(entry.getKey(), entry.getValue());
}
siteConf = confCopy;
return siteConf;
}
private void dumpLocalityGroup(FileSKVWriter out, InterruptibleIterator iter) throws IOException {
while (iter.hasTop() && activeIters.size() > 0) {
// RFile does not support MemKey, so we move the kv count into the value only for the RFile.
// There is no need to change the MemKey to a normal key because the kvCount info gets lost when it is written
out.append(iter.getTopKey(), MemValue.encode(iter.getTopValue(), ((MemKey) iter.getTopKey()).getKVCount()));
iter.next();
}
}
}
| apache-2.0 |
jiangqqlmj/Android-Universal-Image-Loader-Modify | library/src/main/java/com/nostra13/universalimageloader/utils/IoUtils.java | 4529 | /*******************************************************************************
* Copyright 2011-2014 Sergey Tarasevich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.nostra13.universalimageloader.utils;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* Provides I/O operations
*
* @author Sergey Tarasevich (nostra13[at]gmail[dot]com)
* @since 1.0.0
*/
public final class IoUtils {
/** {@value} */
public static final int DEFAULT_BUFFER_SIZE = 32 * 1024; // 32 KB
/** {@value} */
public static final int DEFAULT_IMAGE_TOTAL_SIZE = 500 * 1024; // 500 Kb
/** {@value} */
public static final int CONTINUE_LOADING_PERCENTAGE = 75;
private IoUtils() {
}
/**
* Copies stream, fires progress events by listener, can be interrupted by listener. Uses buffer size =
* {@value #DEFAULT_BUFFER_SIZE} bytes.
*
* @param is Input stream
* @param os Output stream
* @param listener null-ok; Listener of copying progress and controller of copying interrupting
* @return <b>true</b> - if stream copied successfully; <b>false</b> - if copying was interrupted by listener
* @throws IOException
*/
public static boolean copyStream(InputStream is, OutputStream os, CopyListener listener) throws IOException {
return copyStream(is, os, listener, DEFAULT_BUFFER_SIZE);
}
/**
* Copies stream, fires progress events by listener, can be interrupted by listener.
*
* @param is Input stream
* @param os Output stream
* @param listener null-ok; Listener of copying progress and controller of copying interrupting
* @param bufferSize Buffer size for copying, also represents a step for firing progress listener callback, i.e.
* progress event will be fired after every copied <b>bufferSize</b> bytes
* @return <b>true</b> - if stream copied successfully; <b>false</b> - if copying was interrupted by listener
* @throws IOException
*/
public static boolean copyStream(InputStream is, OutputStream os, CopyListener listener, int bufferSize)
throws IOException {
int current = 0;
int total = is.available();
if (total <= 0) {
total = DEFAULT_IMAGE_TOTAL_SIZE;
}
final byte[] bytes = new byte[bufferSize];
int count;
if (shouldStopLoading(listener, current, total)) return false;
while ((count = is.read(bytes, 0, bufferSize)) != -1) {
os.write(bytes, 0, count);
current += count;
if (shouldStopLoading(listener, current, total)) return false;
}
os.flush();
return true;
}
private static boolean shouldStopLoading(CopyListener listener, int current, int total) {
if (listener != null) {
boolean shouldContinue = listener.onBytesCopied(current, total);
if (!shouldContinue) {
if (100 * current / total < CONTINUE_LOADING_PERCENTAGE) {
return true; // if loaded more than 75% then continue loading anyway
}
}
}
return false;
}
/**
* Reads all data from stream and close it silently
* 读取流 然后关闭流方法
* @param is Input stream
*/
public static void readAndCloseStream(InputStream is) {
final byte[] bytes = new byte[DEFAULT_BUFFER_SIZE];
try {
while (is.read(bytes, 0, DEFAULT_BUFFER_SIZE) != -1);
} catch (IOException ignored) {
} finally {
closeSilently(is);
}
}
/**
* 流相关资源释放 关闭
* @param closeable
*/
public static void closeSilently(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (Exception ignored) {
}
}
}
/*
* Listener and controller for copy process
* 进行监听复制拷贝进度
*/
public static interface CopyListener {
/**
* @param current Loaded bytes
* @param total Total bytes for loading
* @return <b>true</b> - if copying should be continued; <b>false</b> - if copying should be interrupted
*/
boolean onBytesCopied(int current, int total);
}
}
| apache-2.0 |
ideastudios/utils | app/src/main/java/tech/oom/julian/media/recorder/StageListener.java | 472 | package tech.oom.julian.media.recorder;
public class StageListener {
public void onRecordData(short[] data, int length) {
}
public void onRecordVolume(int volume) {
}
public void onStartRecording(Client client) {
}
public void onStopRecording(Client client) {
}
public void onRecordError(String error) {
}
public void onFileSaveFailed(String error) {
}
public void onFileSaveSuccess(String fileUri) {
}
}
| apache-2.0 |
jkiddo/jmdns | src/main/java/javax/jmdns/impl/DNSCache.java | 9263 | // Copyright 2003-2005 Arthur van Hoff Rick Blair
// Licensed under Apache License version 2.0
// Original license LGPL
package javax.jmdns.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import javax.jmdns.impl.constants.DNSRecordClass;
import javax.jmdns.impl.constants.DNSRecordType;
/**
* A table of DNS entries. This is a map table which can handle multiple entries with the same name.
* <p/>
* Storing multiple entries with the same name is implemented using a linked list. This is hidden from the user and can change in later implementation.
* <p/>
* Here's how to iterate over all entries:
*
* <pre>
* for (Iterator i=dnscache.allValues().iterator(); i.hasNext(); ) {
* DNSEntry entry = i.next();
* ...do something with entry...
* }
* </pre>
* <p/>
* And here's how to iterate over all entries having a given name:
*
* <pre>
* for (Iterator i=dnscache.getDNSEntryList(name).iterator(); i.hasNext(); ) {
* DNSEntry entry = i.next();
* ...do something with entry...
* }
* </pre>
*
* @author Arthur van Hoff, Werner Randelshofer, Rick Blair, Pierre Frisch
*/
public class DNSCache extends ConcurrentHashMap<String, List<DNSEntry>> {
// private static Logger logger = LoggerFactory.getLogger(DNSCache.class.getName());
private static final long serialVersionUID = 3024739453186759259L;
/**
*
*/
public DNSCache() {
this(1024);
}
/**
* @param map
*/
public DNSCache(DNSCache map) {
this(map != null ? map.size() : 1024);
if (map != null) {
this.putAll(map);
}
}
/**
* Create a table with a given initial size.
*
* @param initialCapacity
*/
public DNSCache(int initialCapacity) {
super(initialCapacity);
}
// ====================================================================
// Map
/**
* {@inheritDoc}
*/
@Override
protected Object clone() throws CloneNotSupportedException {
return new DNSCache(this);
}
// ====================================================================
/**
* Returns all entries in the cache
*
* @return all entries in the cache
*/
public Collection<DNSEntry> allValues() {
List<DNSEntry> allValues = new ArrayList<DNSEntry>();
for (List<? extends DNSEntry> entry : this.values()) {
if (entry != null) {
allValues.addAll(entry);
}
}
return allValues;
}
/**
* Iterate only over items with matching name. Returns an list of DNSEntry or null. To retrieve all entries, one must iterate over this linked list.
*
* @param name
* @return list of DNSEntries
*/
public Collection<? extends DNSEntry> getDNSEntryList(String name) {
Collection<? extends DNSEntry> entryList = this._getDNSEntryList(name);
if (entryList != null) {
synchronized (entryList) {
entryList = new ArrayList<DNSEntry>(entryList);
}
} else {
entryList = Collections.emptyList();
}
return entryList;
}
private Collection<? extends DNSEntry> _getDNSEntryList(String name) {
return this.get(name != null ? name.toLowerCase() : null);
}
/**
* Get a matching DNS entry from the table (using isSameEntry). Returns the entry that was found.
*
* @param dnsEntry
* @return DNSEntry
*/
public DNSEntry getDNSEntry(DNSEntry dnsEntry) {
DNSEntry result = null;
if (dnsEntry != null) {
Collection<? extends DNSEntry> entryList = this._getDNSEntryList(dnsEntry.getKey());
if (entryList != null) {
synchronized (entryList) {
for (DNSEntry testDNSEntry : entryList) {
if (testDNSEntry.isSameEntry(dnsEntry)) {
result = testDNSEntry;
break;
}
}
}
}
}
return result;
}
/**
* Get a matching DNS entry from the table.
*
* @param name
* @param type
* @param recordClass
* @return DNSEntry
*/
public DNSEntry getDNSEntry(String name, DNSRecordType type, DNSRecordClass recordClass) {
DNSEntry result = null;
Collection<? extends DNSEntry> entryList = this._getDNSEntryList(name);
if (entryList != null) {
synchronized (entryList) {
for (DNSEntry testDNSEntry : entryList) {
if (testDNSEntry.matchRecordType(type) && testDNSEntry.matchRecordClass(recordClass)) {
result = testDNSEntry;
break;
}
}
}
}
return result;
}
/**
* Get all matching DNS entries from the table.
*
* @param name
* @param type
* @param recordClass
* @return list of entries
*/
public Collection<? extends DNSEntry> getDNSEntryList(String name, DNSRecordType type, DNSRecordClass recordClass) {
Collection<? extends DNSEntry> entryList = this._getDNSEntryList(name);
if (entryList != null) {
synchronized (entryList) {
entryList = new ArrayList<DNSEntry>(entryList);
for (Iterator<? extends DNSEntry> i = entryList.iterator(); i.hasNext();) {
DNSEntry testDNSEntry = i.next();
if (!testDNSEntry.matchRecordType(type) || (!testDNSEntry.matchRecordClass(recordClass))) {
i.remove();
}
}
}
} else {
entryList = Collections.emptyList();
}
return entryList;
}
/**
* Adds an entry to the table.
*
* @param dnsEntry
* @return true if the entry was added
*/
public boolean addDNSEntry(final DNSEntry dnsEntry) {
boolean result = false;
if (dnsEntry != null) {
List<DNSEntry> entryList = this.get(dnsEntry.getKey());
if (entryList == null) {
this.putIfAbsent(dnsEntry.getKey(), new ArrayList<DNSEntry>());
entryList = this.get(dnsEntry.getKey());
}
synchronized (entryList) {
entryList.add(dnsEntry);
}
// This is probably not very informative
result = true;
}
return result;
}
/**
* Removes a specific entry from the table. Returns true if the entry was found.
*
* @param dnsEntry
* @return true if the entry was removed
*/
public boolean removeDNSEntry(DNSEntry dnsEntry) {
boolean result = false;
if (dnsEntry != null) {
List<DNSEntry> entryList = this.get(dnsEntry.getKey());
if (entryList != null) {
synchronized (entryList) {
entryList.remove(dnsEntry);
}
}
}
return result;
}
/**
* Replace an existing entry by a new one.<br/>
* <b>Note:</b> the 2 entries must have the same key.
*
* @param newDNSEntry
* @param existingDNSEntry
* @return <code>true</code> if the entry has been replace, <code>false</code> otherwise.
*/
public boolean replaceDNSEntry(DNSEntry newDNSEntry, DNSEntry existingDNSEntry) {
boolean result = false;
if ((newDNSEntry != null) && (existingDNSEntry != null) && (newDNSEntry.getKey().equals(existingDNSEntry.getKey()))) {
List<DNSEntry> entryList = this.get(newDNSEntry.getKey());
if (entryList == null) {
this.putIfAbsent(newDNSEntry.getKey(), new ArrayList<DNSEntry>());
entryList = this.get(newDNSEntry.getKey());
}
synchronized (entryList) {
entryList.remove(existingDNSEntry);
entryList.add(newDNSEntry);
}
// This is probably not very informative
result = true;
}
return result;
}
/**
* {@inheritDoc}
*/
@Override
public synchronized String toString() {
StringBuffer aLog = new StringBuffer(2000);
aLog.append("\t---- cache ----");
for (String key : this.keySet()) {
aLog.append("\n\t\t");
aLog.append("\n\t\tname '");
aLog.append(key);
aLog.append("' ");
List<? extends DNSEntry> entryList = this.get(key);
if ((entryList != null) && (!entryList.isEmpty())) {
synchronized (entryList) {
for (DNSEntry entry : entryList) {
aLog.append("\n\t\t\t");
aLog.append(entry.toString());
}
}
} else {
aLog.append(" no entries");
}
}
return aLog.toString();
}
}
| apache-2.0 |
mjsax/aeolus | batching/src/test/java/de/hub/cs/dbis/aeolus/batching/SingleBatchSizeHashMapTest.java | 2857 | /*
* #!
* %
* Copyright (C) 2014 - 2016 Humboldt-Universität zu Berlin
* %
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #_
*/
package de.hub.cs.dbis.aeolus.batching;
import java.util.Random;
import org.junit.Assert;
import org.junit.Test;
/**
* @author mjsax
*/
public class SingleBatchSizeHashMapTest {
@Test
public void testGet() {
final long seed = System.currentTimeMillis();
System.out.println("Test seed: " + seed);
Random r = new Random(seed);
final int batchSize = r.nextInt();
Assert.assertEquals(new Integer(batchSize), new SingleBatchSizeHashMap(batchSize).get(null));
}
@Test(expected = UnsupportedOperationException.class)
public void testSize() {
new SingleBatchSizeHashMap(0).size();
}
@Test(expected = UnsupportedOperationException.class)
public void testIsEmpty() {
new SingleBatchSizeHashMap(0).isEmpty();
}
@Test(expected = UnsupportedOperationException.class)
public void testContainsKey() {
new SingleBatchSizeHashMap(0).containsKey(null);
}
@Test(expected = UnsupportedOperationException.class)
public void testContainsValue() {
new SingleBatchSizeHashMap(0).containsValue(null);
}
@Test
public void testPut() {
new SingleBatchSizeHashMap(0).put(null, new Integer(0));
}
@Test(expected = UnsupportedOperationException.class)
public void testPutFail1() {
new SingleBatchSizeHashMap(0).put(null, null);
}
@Test(expected = UnsupportedOperationException.class)
public void testPutFail2() {
new SingleBatchSizeHashMap(0).put(null, new Integer(1));
}
@Test(expected = UnsupportedOperationException.class)
public void testRemove() {
new SingleBatchSizeHashMap(0).remove(null);
}
@Test(expected = UnsupportedOperationException.class)
public void testPutAll() {
new SingleBatchSizeHashMap(0).putAll(null);
}
@Test(expected = UnsupportedOperationException.class)
public void testClear() {
new SingleBatchSizeHashMap(0).clear();
}
@Test(expected = UnsupportedOperationException.class)
public void testKeySet() {
new SingleBatchSizeHashMap(0).keySet();
}
@Test(expected = UnsupportedOperationException.class)
public void testValues() {
new SingleBatchSizeHashMap(0).values();
}
@Test(expected = UnsupportedOperationException.class)
public void testEntrySet() {
new SingleBatchSizeHashMap(0).entrySet();
}
}
| apache-2.0 |
opennetworkinglab/spring-open | src/test/java/net/onrc/onos/api/flowmanager/FlowBatchOperationTest.java | 6353 | package net.onrc.onos.api.flowmanager;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import java.util.LinkedList;
import java.util.List;
import net.onrc.onos.api.batchoperation.BatchOperationEntry;
import net.onrc.onos.api.flowmanager.FlowBatchOperation.Operator;
import net.onrc.onos.core.matchaction.MatchActionId;
import net.onrc.onos.core.matchaction.MatchActionOperations;
import net.onrc.onos.core.matchaction.MatchActionOperationsId;
import net.onrc.onos.core.matchaction.match.Match;
import net.onrc.onos.core.util.IdGenerator;
import org.junit.Before;
import org.junit.Test;
/**
* Unit tests for {@link FlowBatchOperation}.
*/
public class FlowBatchOperationTest {
private Flow flow1;
private Flow flow2;
private Flow flow3;
/**
* A subclass of {@link Flow} for testing purpose.
*/
final class TestFlow extends Flow {
public TestFlow(FlowId id) {
super(id);
}
@Override
public Match getMatch() {
return null;
}
@Override
public List<MatchActionOperations> compile(Operator op,
IdGenerator<MatchActionId> maIdGenerator,
IdGenerator<MatchActionOperationsId> maoIdGenerator) {
return null;
}
}
@Before
public void setUp() throws Exception {
flow1 = new TestFlow(new FlowId(123L));
flow2 = new TestFlow(new FlowId(456L));
flow3 = new TestFlow(new FlowId(789L));
}
/**
* Tests {@link FlowBatchOperation#FlowBatchOperation()} constructor.
*/
@Test
public void testConstructor() {
FlowBatchOperation op1 = new FlowBatchOperation();
assertNotNull(op1);
assertEquals(0, op1.size());
}
/**
* Tests {@link FlowBatchOperation#FlowBatchOperation(java.util.List)}
* constructor.
*/
@Test
public void testConstructorWithList() {
List<BatchOperationEntry<Operator, ?>> batchOperations;
batchOperations = new LinkedList<>();
batchOperations.add(new BatchOperationEntry<Operator, Flow>(
Operator.ADD, flow1));
batchOperations.add(new BatchOperationEntry<Operator, Flow>(
Operator.ADD, flow2));
batchOperations.add(new BatchOperationEntry<Operator, Flow>(
Operator.ADD, flow3));
batchOperations.add(new BatchOperationEntry<Operator, FlowId>(
Operator.REMOVE, new FlowId(1L)));
batchOperations.add(new BatchOperationEntry<Operator, FlowId>(
Operator.REMOVE, new FlowId(2L)));
batchOperations.add(new BatchOperationEntry<Operator, FlowId>(
Operator.REMOVE, new FlowId(3L)));
FlowBatchOperation op1 = new FlowBatchOperation(batchOperations);
assertNotNull(op1);
assertEquals(6, op1.size());
assertThat(op1.getOperations(), hasSize(6));
assertThat(op1.getOperations(), hasItem(
new BatchOperationEntry<Operator, Flow>(Operator.ADD, flow1)));
assertThat(op1.getOperations(), hasItem(
new BatchOperationEntry<Operator, Flow>(Operator.ADD, flow2)));
assertThat(op1.getOperations(), hasItem(
new BatchOperationEntry<Operator, Flow>(Operator.ADD, flow3)));
assertThat(op1.getOperations(),
hasItem(new BatchOperationEntry<Operator, FlowId>(Operator.REMOVE,
new FlowId(1L))));
assertThat(op1.getOperations(),
hasItem(new BatchOperationEntry<Operator, FlowId>(Operator.REMOVE,
new FlowId(2L))));
assertThat(op1.getOperations(),
hasItem(new BatchOperationEntry<Operator, FlowId>(Operator.REMOVE,
new FlowId(3L))));
}
/**
* Tests {@link FlowBatchOperation#addAddFlowOperation(Flow)} method.
*/
@Test
public void testAddAddFlowOperation() {
FlowBatchOperation op1 = new FlowBatchOperation();
FlowBatchOperation op2 = op1.addAddFlowOperation(flow1);
assertEquals(op1, op2);
assertEquals(1, op1.size());
assertThat(op1.getOperations(), hasSize(1));
assertThat(op1.getOperations(), hasItem(
new BatchOperationEntry<Operator, Flow>(Operator.ADD, flow1)));
op1.addAddFlowOperation(flow2).addAddFlowOperation(flow3);
assertEquals(3, op1.size());
assertThat(op1.getOperations(), hasSize(3));
assertThat(op1.getOperations(), hasItem(
new BatchOperationEntry<Operator, Flow>(Operator.ADD, flow1)));
assertThat(op1.getOperations(), hasItem(
new BatchOperationEntry<Operator, Flow>(Operator.ADD, flow2)));
assertThat(op1.getOperations(), hasItem(
new BatchOperationEntry<Operator, Flow>(Operator.ADD, flow3)));
}
/**
* Tests {@link FlowBatchOperation#addRemoveFlowOperation(Flow)} method.
*/
@Test
public void testAddRemoveFlowOperation() {
FlowBatchOperation op1 = new FlowBatchOperation();
FlowBatchOperation op2 = op1.addRemoveFlowOperation(new FlowId(123L));
assertEquals(op1, op2);
assertEquals(1, op1.size());
assertThat(op1.getOperations(), hasSize(1));
assertThat(op1.getOperations(),
hasItem(new BatchOperationEntry<Operator, FlowId>(
Operator.REMOVE, new FlowId(123L))));
op1.addRemoveFlowOperation(new FlowId(456L))
.addRemoveFlowOperation(new FlowId(789L));
assertEquals(3, op1.size());
assertThat(op1.getOperations(), hasSize(3));
assertThat(op1.getOperations(),
hasItem(new BatchOperationEntry<Operator, FlowId>(
Operator.REMOVE, new FlowId(123L))));
assertThat(op1.getOperations(),
hasItem(new BatchOperationEntry<Operator, FlowId>(
Operator.REMOVE, new FlowId(456L))));
assertThat(op1.getOperations(),
hasItem(new BatchOperationEntry<Operator, FlowId>(
Operator.REMOVE, new FlowId(789L))));
}
}
| apache-2.0 |
opetrovski/development | oscm-rest-api-trigger-unittests/javasrc/org/oscm/rest/trigger/unittests/DefinitionBackendTest.java | 14519 | /*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Creation Date: Jun 10, 2016
*
*******************************************************************************/
package org.oscm.rest.trigger.unittests;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response.Status;
import org.junit.Test;
import org.mockito.Mockito;
import org.oscm.internal.intf.TriggerDefinitionService;
import org.oscm.internal.types.exception.ConcurrentModificationException;
import org.oscm.internal.types.exception.DeletionConstraintException;
import org.oscm.internal.types.exception.ObjectNotFoundException;
import org.oscm.internal.types.exception.OperationNotPermittedException;
import org.oscm.internal.types.exception.TriggerDefinitionDataException;
import org.oscm.internal.types.exception.ValidationException;
import org.oscm.internal.vo.VOOrganization;
import org.oscm.internal.vo.VOTriggerDefinition;
import org.oscm.rest.common.Representation;
import org.oscm.rest.common.RepresentationCollection;
import org.oscm.rest.trigger.DefinitionBackend;
import org.oscm.rest.trigger.TriggerParameters;
import org.oscm.rest.trigger.data.DefinitionRepresentation;
/**
* Unit test for TriggerBackend
*
* @author miethaner
*/
public class DefinitionBackendTest {
@Test
public void dummyTest() {
}
@Test
public void testGetItem() throws Exception {
Long id = new Long(1L);
TriggerParameters params = new TriggerParameters();
params.setId(id);
VOTriggerDefinition trigger = new VOTriggerDefinition();
trigger.setKey(id.longValue());
trigger.setOrganization(new VOOrganization());
TriggerDefinitionService service = Mockito
.mock(TriggerDefinitionService.class);
Mockito.when(service.getTriggerDefinition(params.getId())).thenReturn(
trigger);
DefinitionBackend backend = new DefinitionBackend();
backend.setService(service);
Representation result = backend.getItem().get(params);
assertEquals(id, result.getId());
Mockito.verify(service).getTriggerDefinition(params.getId());
Mockito.when(service.getTriggerDefinition(params.getId())).thenThrow(
new ObjectNotFoundException());
try {
backend.getItem().get(params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.NOT_FOUND.getStatusCode(), e.getResponse()
.getStatus());
}
params.setId(new Long(2L));
Mockito.when(service.getTriggerDefinition(params.getId())).thenThrow(
new OperationNotPermittedException());
try {
backend.getItem().get(params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.FORBIDDEN.getStatusCode(), e.getResponse()
.getStatus());
}
params.setId(new Long(3L));
Mockito.when(service.getTriggerDefinition(params.getId())).thenThrow(
new javax.ejb.EJBAccessException());
try {
backend.getItem().get(params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.FORBIDDEN.getStatusCode(), e.getResponse()
.getStatus());
}
}
@Test
public void testGetCollection() throws Exception {
Long id = new Long(1L);
TriggerParameters params = new TriggerParameters();
params.setId(id);
VOTriggerDefinition trigger = new VOTriggerDefinition();
trigger.setKey(id.longValue());
trigger.setOrganization(new VOOrganization());
List<VOTriggerDefinition> col = new ArrayList<VOTriggerDefinition>();
col.add(trigger);
TriggerDefinitionService service = Mockito
.mock(TriggerDefinitionService.class);
Mockito.when(service.getTriggerDefinitions()).thenReturn(col);
DefinitionBackend backend = new DefinitionBackend();
backend.setService(service);
RepresentationCollection<DefinitionRepresentation> result = backend
.getCollection().get(params);
assertEquals(id,
result.getItems().toArray(new DefinitionRepresentation[] {})[0]
.getId());
Mockito.verify(service).getTriggerDefinitions();
Mockito.when(service.getTriggerDefinitions()).thenThrow(
new javax.ejb.EJBAccessException());
try {
backend.getCollection().get(params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.FORBIDDEN.getStatusCode(), e.getResponse()
.getStatus());
}
}
@Test
public void testPostCollection() throws Exception {
Long id = new Long(1L);
TriggerParameters params = new TriggerParameters();
DefinitionRepresentation trigger = new DefinitionRepresentation();
VOTriggerDefinition definition = new VOTriggerDefinition();
definition.setKey(id.longValue());
TriggerDefinitionService service = Mockito
.mock(TriggerDefinitionService.class);
Mockito.when(service.createTriggerDefinition(definition))
.thenReturn(id);
DefinitionBackend backend = new DefinitionBackend();
backend.setService(service);
backend.postCollection().post(trigger, params);
Mockito.verify(service).createTriggerDefinition(
Mockito.any(VOTriggerDefinition.class));
Mockito.when(
service.createTriggerDefinition(Mockito
.any(VOTriggerDefinition.class))).thenThrow(
new ValidationException());
try {
backend.postCollection().post(trigger, params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.BAD_REQUEST.getStatusCode(), e.getResponse()
.getStatus());
}
Mockito.reset(service);
Mockito.when(
service.createTriggerDefinition(Mockito
.any(VOTriggerDefinition.class))).thenThrow(
new TriggerDefinitionDataException());
try {
backend.postCollection().post(trigger, params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.CONFLICT.getStatusCode(), e.getResponse()
.getStatus());
}
Mockito.reset(service);
Mockito.when(
service.createTriggerDefinition(Mockito
.any(VOTriggerDefinition.class))).thenThrow(
new javax.ejb.EJBAccessException());
try {
backend.postCollection().post(trigger, params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.FORBIDDEN.getStatusCode(), e.getResponse()
.getStatus());
}
}
@Test
public void testPutItemWithoutEtag() throws Exception {
Long id = new Long(1L);
TriggerParameters params = new TriggerParameters();
params.setId(id);
DefinitionRepresentation trigger = new DefinitionRepresentation();
trigger.setId(id);
trigger.setETag(new Long(1L));
trigger.setAction("SUBSCRIBE_TO_SERVICE");
trigger.setDescription("desc");
trigger.setTargetURL("abc");
trigger.setType("REST_SERVICE");
trigger.setSuspending(Boolean.TRUE);
TriggerDefinitionService service = Mockito
.mock(TriggerDefinitionService.class);
DefinitionBackend backend = new DefinitionBackend();
backend.setService(service);
backend.putItem().put(trigger, params);
Mockito.verify(service, Mockito.never()).getTriggerDefinition(id);
Mockito.verify(service).updateTriggerDefinition(
Mockito.any(VOTriggerDefinition.class));
Mockito.doThrow(new ObjectNotFoundException())
.when(service)
.updateTriggerDefinition(Mockito.any(VOTriggerDefinition.class));
try {
backend.putItem().put(trigger, params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.NOT_FOUND.getStatusCode(), e.getResponse()
.getStatus());
}
Mockito.reset(service);
Mockito.doThrow(new ValidationException())
.when(service)
.updateTriggerDefinition(Mockito.any(VOTriggerDefinition.class));
try {
backend.putItem().put(trigger, params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.BAD_REQUEST.getStatusCode(), e.getResponse()
.getStatus());
}
Mockito.reset(service);
Mockito.doThrow(new ConcurrentModificationException())
.when(service)
.updateTriggerDefinition(Mockito.any(VOTriggerDefinition.class));
try {
backend.putItem().put(trigger, params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.CONFLICT.getStatusCode(), e.getResponse()
.getStatus());
}
Mockito.reset(service);
Mockito.doThrow(new TriggerDefinitionDataException())
.when(service)
.updateTriggerDefinition(Mockito.any(VOTriggerDefinition.class));
try {
backend.putItem().put(trigger, params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.CONFLICT.getStatusCode(), e.getResponse()
.getStatus());
}
Mockito.reset(service);
Mockito.doThrow(new OperationNotPermittedException())
.when(service)
.updateTriggerDefinition(Mockito.any(VOTriggerDefinition.class));
try {
backend.putItem().put(trigger, params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.FORBIDDEN.getStatusCode(), e.getResponse()
.getStatus());
}
Mockito.reset(service);
Mockito.doThrow(new javax.ejb.EJBAccessException())
.when(service)
.updateTriggerDefinition(Mockito.any(VOTriggerDefinition.class));
try {
backend.putItem().put(trigger, params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.FORBIDDEN.getStatusCode(), e.getResponse()
.getStatus());
}
}
@Test
public void testPutItemWithEtag() throws Exception {
Long id = new Long(1L);
TriggerParameters params = new TriggerParameters();
params.setId(id);
params.setMatch("3");
DefinitionRepresentation trigger = new DefinitionRepresentation();
trigger.setId(id);
VOTriggerDefinition definition = new VOTriggerDefinition();
definition.setKey(id.longValue());
definition.setVersion(3);
TriggerDefinitionService service = Mockito
.mock(TriggerDefinitionService.class);
Mockito.when(service.getTriggerDefinition(id)).thenReturn(definition);
DefinitionBackend backend = new DefinitionBackend();
backend.setService(service);
backend.putItem().put(trigger, params);
Mockito.verify(service).getTriggerDefinition(id);
Mockito.verify(service).updateTriggerDefinition(
Mockito.any(VOTriggerDefinition.class));
}
@Test
public void testDeleteItem() throws Exception {
Long id = new Long(1L);
TriggerParameters params = new TriggerParameters();
params.setId(id);
TriggerDefinitionService service = Mockito
.mock(TriggerDefinitionService.class);
DefinitionBackend backend = new DefinitionBackend();
backend.setService(service);
backend.deleteItem().delete(params);
Mockito.verify(service).deleteTriggerDefinition(id.longValue());
Mockito.reset(service);
Mockito.doThrow(new ObjectNotFoundException()).when(service)
.deleteTriggerDefinition(params.getId().longValue());
try {
backend.deleteItem().delete(params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.NOT_FOUND.getStatusCode(), e.getResponse()
.getStatus());
}
Mockito.reset(service);
Mockito.doThrow(new DeletionConstraintException()).when(service)
.deleteTriggerDefinition(params.getId().longValue());
try {
backend.deleteItem().delete(params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.CONFLICT.getStatusCode(), e.getResponse()
.getStatus());
}
Mockito.reset(service);
Mockito.doThrow(new OperationNotPermittedException()).when(service)
.deleteTriggerDefinition(params.getId().longValue());
try {
backend.deleteItem().delete(params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.FORBIDDEN.getStatusCode(), e.getResponse()
.getStatus());
}
Mockito.reset(service);
Mockito.doThrow(new javax.ejb.EJBAccessException()).when(service)
.deleteTriggerDefinition(params.getId().longValue());
try {
backend.deleteItem().delete(params);
fail();
} catch (WebApplicationException e) {
assertEquals(Status.FORBIDDEN.getStatusCode(), e.getResponse()
.getStatus());
}
}
}
| apache-2.0 |
EnMasseProject/enmasse | api-model/src/main/java/io/enmasse/admin/model/v1/StandardInfraConfig.java | 2919 | /*
* Copyright 2018, EnMasse authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.enmasse.admin.model.v1;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.enmasse.common.model.CustomResourceWithAdditionalProperties;
import io.enmasse.common.model.DefaultCustomResource;
import io.fabric8.kubernetes.api.model.Namespaced;
import io.fabric8.kubernetes.api.model.ObjectMeta;
import io.fabric8.kubernetes.model.annotation.Group;
import io.fabric8.kubernetes.model.annotation.Version;
import io.sundr.builder.annotations.Buildable;
import io.sundr.builder.annotations.BuildableReference;
@Buildable(
editableEnabled = false,
generateBuilderPackage = false,
builderPackage = "io.fabric8.kubernetes.api.builder",
refs = {@BuildableReference(ObjectMeta.class)}
)
@DefaultCustomResource
@SuppressWarnings("serial")
@Version(AdminCrd.VERSION_V1BETA1)
@Group(AdminCrd.GROUP)
public class StandardInfraConfig extends CustomResourceWithAdditionalProperties<StandardInfraConfigSpec, StandardInfraConfigStatus> implements WithAdditionalProperties, InfraConfig, Namespaced {
public static final String KIND = "StandardInfraConfig";
// for builders - probably will be fixed by https://github.com/fabric8io/kubernetes-client/pull/1346
private ObjectMeta metadata;
private StandardInfraConfigSpec spec = new StandardInfraConfigSpec();
@Override
public ObjectMeta getMetadata() {
return metadata;
}
@Override
public void setMetadata(ObjectMeta metadata) {
this.metadata = metadata;
}
public void setSpec(StandardInfraConfigSpec spec) {
this.spec = spec;
}
public StandardInfraConfigSpec getSpec() {
return spec;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
StandardInfraConfig that = (StandardInfraConfig) o;
return Objects.equals(getMetadata(), that.getMetadata()) &&
Objects.equals(spec, that.spec);
}
@Override
public int hashCode() {
return Objects.hash(getMetadata(), spec);
}
@Override
public String toString() {
return "StandardInfraConfig{" +
"metadata=" + getMetadata() +
", spec=" + spec + "}";
}
@Override
@JsonIgnore
public String getInfraConfigVersion() {
return spec.getVersion();
}
@Override
@JsonIgnore
public NetworkPolicy getNetworkPolicy() {
return spec.getNetworkPolicy();
}
@Override
@JsonIgnore
public boolean getUpdatePersistentVolumeClaim() {
return spec.getBroker().getUpdatePersistentVolumeClaim() != null ? spec.getBroker().getUpdatePersistentVolumeClaim() : false;
}
}
| apache-2.0 |
mcollovati/camel | components/camel-rss/src/generated/java/org/apache/camel/component/rss/RssEndpointConfigurer.java | 10221 | /* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.component.rss;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.ConfigurerStrategy;
import org.apache.camel.spi.GeneratedPropertyConfigurer;
import org.apache.camel.spi.PropertyConfigurerGetter;
import org.apache.camel.util.CaseInsensitiveMap;
import org.apache.camel.support.component.PropertyConfigurerSupport;
/**
* Generated by camel build tools - do NOT edit this file!
*/
@SuppressWarnings("unchecked")
public class RssEndpointConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("feedUri", java.lang.String.class);
map.put("bridgeErrorHandler", boolean.class);
map.put("feedHeader", boolean.class);
map.put("filter", boolean.class);
map.put("lastUpdate", java.util.Date.class);
map.put("password", java.lang.String.class);
map.put("sendEmptyMessageWhenIdle", boolean.class);
map.put("sortEntries", boolean.class);
map.put("splitEntries", boolean.class);
map.put("throttleEntries", boolean.class);
map.put("username", java.lang.String.class);
map.put("exceptionHandler", org.apache.camel.spi.ExceptionHandler.class);
map.put("exchangePattern", org.apache.camel.ExchangePattern.class);
map.put("pollStrategy", org.apache.camel.spi.PollingConsumerPollStrategy.class);
map.put("basicPropertyBinding", boolean.class);
map.put("synchronous", boolean.class);
map.put("backoffErrorThreshold", int.class);
map.put("backoffIdleThreshold", int.class);
map.put("backoffMultiplier", int.class);
map.put("delay", long.class);
map.put("greedy", boolean.class);
map.put("initialDelay", long.class);
map.put("repeatCount", long.class);
map.put("runLoggingLevel", org.apache.camel.LoggingLevel.class);
map.put("scheduledExecutorService", java.util.concurrent.ScheduledExecutorService.class);
map.put("scheduler", java.lang.Object.class);
map.put("schedulerProperties", java.util.Map.class);
map.put("startScheduler", boolean.class);
map.put("timeUnit", java.util.concurrent.TimeUnit.class);
map.put("useFixedDelay", boolean.class);
ALL_OPTIONS = map;
ConfigurerStrategy.addConfigurerClearer(RssEndpointConfigurer::clearConfigurers);
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
RssEndpoint target = (RssEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "backofferrorthreshold":
case "backoffErrorThreshold": target.setBackoffErrorThreshold(property(camelContext, int.class, value)); return true;
case "backoffidlethreshold":
case "backoffIdleThreshold": target.setBackoffIdleThreshold(property(camelContext, int.class, value)); return true;
case "backoffmultiplier":
case "backoffMultiplier": target.setBackoffMultiplier(property(camelContext, int.class, value)); return true;
case "basicpropertybinding":
case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "delay": target.setDelay(property(camelContext, long.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "feedheader":
case "feedHeader": target.setFeedHeader(property(camelContext, boolean.class, value)); return true;
case "filter": target.setFilter(property(camelContext, boolean.class, value)); return true;
case "greedy": target.setGreedy(property(camelContext, boolean.class, value)); return true;
case "initialdelay":
case "initialDelay": target.setInitialDelay(property(camelContext, long.class, value)); return true;
case "lastupdate":
case "lastUpdate": target.setLastUpdate(property(camelContext, java.util.Date.class, value)); return true;
case "password": target.setPassword(property(camelContext, java.lang.String.class, value)); return true;
case "pollstrategy":
case "pollStrategy": target.setPollStrategy(property(camelContext, org.apache.camel.spi.PollingConsumerPollStrategy.class, value)); return true;
case "repeatcount":
case "repeatCount": target.setRepeatCount(property(camelContext, long.class, value)); return true;
case "runlogginglevel":
case "runLoggingLevel": target.setRunLoggingLevel(property(camelContext, org.apache.camel.LoggingLevel.class, value)); return true;
case "scheduledexecutorservice":
case "scheduledExecutorService": target.setScheduledExecutorService(property(camelContext, java.util.concurrent.ScheduledExecutorService.class, value)); return true;
case "scheduler": target.setScheduler(property(camelContext, java.lang.Object.class, value)); return true;
case "schedulerproperties":
case "schedulerProperties": target.setSchedulerProperties(property(camelContext, java.util.Map.class, value)); return true;
case "sendemptymessagewhenidle":
case "sendEmptyMessageWhenIdle": target.setSendEmptyMessageWhenIdle(property(camelContext, boolean.class, value)); return true;
case "sortentries":
case "sortEntries": target.setSortEntries(property(camelContext, boolean.class, value)); return true;
case "splitentries":
case "splitEntries": target.setSplitEntries(property(camelContext, boolean.class, value)); return true;
case "startscheduler":
case "startScheduler": target.setStartScheduler(property(camelContext, boolean.class, value)); return true;
case "synchronous": target.setSynchronous(property(camelContext, boolean.class, value)); return true;
case "throttleentries":
case "throttleEntries": target.setThrottleEntries(property(camelContext, boolean.class, value)); return true;
case "timeunit":
case "timeUnit": target.setTimeUnit(property(camelContext, java.util.concurrent.TimeUnit.class, value)); return true;
case "usefixeddelay":
case "useFixedDelay": target.setUseFixedDelay(property(camelContext, boolean.class, value)); return true;
case "username": target.setUsername(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
public static void clearBootstrapConfigurers() {
}
public static void clearConfigurers() {
ALL_OPTIONS.clear();
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
RssEndpoint target = (RssEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "backofferrorthreshold":
case "backoffErrorThreshold": return target.getBackoffErrorThreshold();
case "backoffidlethreshold":
case "backoffIdleThreshold": return target.getBackoffIdleThreshold();
case "backoffmultiplier":
case "backoffMultiplier": return target.getBackoffMultiplier();
case "basicpropertybinding":
case "basicPropertyBinding": return target.isBasicPropertyBinding();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "delay": return target.getDelay();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "feedheader":
case "feedHeader": return target.isFeedHeader();
case "filter": return target.isFilter();
case "greedy": return target.isGreedy();
case "initialdelay":
case "initialDelay": return target.getInitialDelay();
case "lastupdate":
case "lastUpdate": return target.getLastUpdate();
case "password": return target.getPassword();
case "pollstrategy":
case "pollStrategy": return target.getPollStrategy();
case "repeatcount":
case "repeatCount": return target.getRepeatCount();
case "runlogginglevel":
case "runLoggingLevel": return target.getRunLoggingLevel();
case "scheduledexecutorservice":
case "scheduledExecutorService": return target.getScheduledExecutorService();
case "scheduler": return target.getScheduler();
case "schedulerproperties":
case "schedulerProperties": return target.getSchedulerProperties();
case "sendemptymessagewhenidle":
case "sendEmptyMessageWhenIdle": return target.isSendEmptyMessageWhenIdle();
case "sortentries":
case "sortEntries": return target.isSortEntries();
case "splitentries":
case "splitEntries": return target.isSplitEntries();
case "startscheduler":
case "startScheduler": return target.isStartScheduler();
case "synchronous": return target.isSynchronous();
case "throttleentries":
case "throttleEntries": return target.isThrottleEntries();
case "timeunit":
case "timeUnit": return target.getTimeUnit();
case "usefixeddelay":
case "useFixedDelay": return target.isUseFixedDelay();
case "username": return target.getUsername();
default: return null;
}
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-webrisk/v1/1.31.0/com/google/api/services/webrisk/v1/model/GoogleLongrunningListOperationsResponse.java | 3075 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.webrisk.v1.model;
/**
* The response message for Operations.ListOperations.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Web Risk API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleLongrunningListOperationsResponse extends com.google.api.client.json.GenericJson {
/**
* The standard List next-page token.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String nextPageToken;
/**
* A list of operations that matches the specified filter in the request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GoogleLongrunningOperation> operations;
/**
* The standard List next-page token.
* @return value or {@code null} for none
*/
public java.lang.String getNextPageToken() {
return nextPageToken;
}
/**
* The standard List next-page token.
* @param nextPageToken nextPageToken or {@code null} for none
*/
public GoogleLongrunningListOperationsResponse setNextPageToken(java.lang.String nextPageToken) {
this.nextPageToken = nextPageToken;
return this;
}
/**
* A list of operations that matches the specified filter in the request.
* @return value or {@code null} for none
*/
public java.util.List<GoogleLongrunningOperation> getOperations() {
return operations;
}
/**
* A list of operations that matches the specified filter in the request.
* @param operations operations or {@code null} for none
*/
public GoogleLongrunningListOperationsResponse setOperations(java.util.List<GoogleLongrunningOperation> operations) {
this.operations = operations;
return this;
}
@Override
public GoogleLongrunningListOperationsResponse set(String fieldName, Object value) {
return (GoogleLongrunningListOperationsResponse) super.set(fieldName, value);
}
@Override
public GoogleLongrunningListOperationsResponse clone() {
return (GoogleLongrunningListOperationsResponse) super.clone();
}
}
| apache-2.0 |
ceridwen-com/lcfserver | lcfserver-webservice/src/main/java/com/ceridwen/lcf/server/handlers/package-info.java | 644 | /*
* Copyright 2019 Ceridwen Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ceridwen.lcf.server.handlers;
| apache-2.0 |
pdrados/cas | support/cas-server-support-saml-idp-metadata-jpa/src/main/java/org/apereo/cas/config/SamlIdPJpaIdPMetadataConfiguration.java | 6728 | package org.apereo.cas.config;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.configuration.model.support.jpa.JpaConfigurationContext;
import org.apereo.cas.configuration.support.JpaBeans;
import org.apereo.cas.jpa.JpaBeanFactory;
import org.apereo.cas.support.saml.idp.metadata.JpaSamlIdPMetadataCipherExecutor;
import org.apereo.cas.support.saml.idp.metadata.JpaSamlIdPMetadataGenerator;
import org.apereo.cas.support.saml.idp.metadata.JpaSamlIdPMetadataLocator;
import org.apereo.cas.support.saml.idp.metadata.generator.SamlIdPMetadataGenerator;
import org.apereo.cas.support.saml.idp.metadata.generator.SamlIdPMetadataGeneratorConfigurationContext;
import org.apereo.cas.support.saml.idp.metadata.jpa.JpaSamlIdPMetadataDocumentFactory;
import org.apereo.cas.support.saml.idp.metadata.locator.SamlIdPMetadataLocator;
import org.apereo.cas.support.saml.idp.metadata.writer.SamlIdPCertificateAndKeyWriter;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.util.cipher.CipherExecutorUtils;
import org.apereo.cas.util.crypto.CipherExecutor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.transaction.support.TransactionTemplate;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import java.util.List;
/**
* This is {@link SamlIdPJpaIdPMetadataConfiguration}.
*
* @author Misagh Moayyed
* @since 6.0.0
*/
@Configuration("samlIdPJpaIdPMetadataConfiguration")
@EnableConfigurationProperties(CasConfigurationProperties.class)
@EnableTransactionManagement(proxyTargetClass = true)
@Slf4j
@ConditionalOnProperty(prefix = "cas.authn.saml-idp.metadata.jpa", name = "idp-metadata-enabled", havingValue = "true")
public class SamlIdPJpaIdPMetadataConfiguration {
@Autowired
@Qualifier("jpaBeanFactory")
private ObjectProvider<JpaBeanFactory> jpaBeanFactory;
@Autowired
@Qualifier("samlSelfSignedCertificateWriter")
private ObjectProvider<SamlIdPCertificateAndKeyWriter> samlSelfSignedCertificateWriter;
@Autowired
private CasConfigurationProperties casProperties;
@Autowired
private ConfigurableApplicationContext applicationContext;
@RefreshScope
@Bean
public JpaVendorAdapter jpaSamlMetadataIdPVendorAdapter() {
return jpaBeanFactory.getObject().newJpaVendorAdapter(casProperties.getJdbc());
}
@RefreshScope
@Bean
@ConditionalOnMissingBean(name = "dataSourceSamlMetadataIdP")
public DataSource dataSourceSamlMetadataIdP() {
val idp = casProperties.getAuthn().getSamlIdp().getMetadata();
return JpaBeans.newDataSource(idp.getJpa());
}
@Bean
@RefreshScope
public List<String> jpaSamlMetadataIdPPackagesToScan() {
val idp = casProperties.getAuthn().getSamlIdp().getMetadata();
val type = new JpaSamlIdPMetadataDocumentFactory(idp.getJpa().getDialect()).getType();
return CollectionUtils.wrapList(type.getPackage().getName());
}
@Lazy
@Bean
public LocalContainerEntityManagerFactoryBean samlMetadataIdPEntityManagerFactory() {
val idp = casProperties.getAuthn().getSamlIdp().getMetadata();
val factory = jpaBeanFactory.getObject();
val ctx = JpaConfigurationContext.builder()
.jpaVendorAdapter(jpaSamlMetadataIdPVendorAdapter())
.persistenceUnitName("jpaSamlMetadataIdPContext")
.dataSource(dataSourceSamlMetadataIdP())
.packagesToScan(jpaSamlMetadataIdPPackagesToScan())
.build();
return factory.newEntityManagerFactoryBean(ctx, idp.getJpa());
}
@Autowired
@Bean
public PlatformTransactionManager transactionManagerSamlMetadataIdP(
@Qualifier("samlMetadataIdPEntityManagerFactory") final EntityManagerFactory emf) {
val mgmr = new JpaTransactionManager();
mgmr.setEntityManagerFactory(emf);
return mgmr;
}
@RefreshScope
@Bean
@ConditionalOnMissingBean(name = "jpaSamlIdPMetadataCipherExecutor")
public CipherExecutor jpaSamlIdPMetadataCipherExecutor() {
val idp = casProperties.getAuthn().getSamlIdp();
val crypto = idp.getMetadata().getJpa().getCrypto();
if (crypto.isEnabled()) {
return CipherExecutorUtils.newStringCipherExecutor(crypto, JpaSamlIdPMetadataCipherExecutor.class);
}
LOGGER.info("JPA SAML IdP metadata encryption/signing is turned off and "
+ "MAY NOT be safe in a production environment. "
+ "Consider using other choices to handle encryption, signing and verification of metadata artifacts");
return CipherExecutor.noOp();
}
@Autowired
@Bean
public SamlIdPMetadataGenerator samlIdPMetadataGenerator(@Qualifier("transactionManagerSamlMetadataIdP") final PlatformTransactionManager mgr) {
val transactionTemplate = new TransactionTemplate(mgr);
val context = SamlIdPMetadataGeneratorConfigurationContext.builder()
.samlIdPMetadataLocator(samlIdPMetadataLocator())
.samlIdPCertificateAndKeyWriter(samlSelfSignedCertificateWriter.getObject())
.applicationContext(applicationContext)
.casProperties(casProperties)
.metadataCipherExecutor(jpaSamlIdPMetadataCipherExecutor())
.build();
return new JpaSamlIdPMetadataGenerator(
context,
transactionTemplate);
}
@RefreshScope
@Bean
@SneakyThrows
public SamlIdPMetadataLocator samlIdPMetadataLocator() {
return new JpaSamlIdPMetadataLocator(jpaSamlIdPMetadataCipherExecutor());
}
}
| apache-2.0 |
Mackenzie-High/autumn | src/autumn/lang/compiler/CompiledProgram.java | 5963 | package autumn.lang.compiler;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import high.mackenzie.autumn.resources.Finished;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URL;
import java.util.List;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
/**
* An instance of this class is a compiled Autumn program.
*
* @author Mackenzie High
*/
@Finished("2014/08/19")
public final class CompiledProgram
{
private final String main_class;
private final List<ClassFile> classes = Lists.newLinkedList();
private final List<URL> libraries = Lists.newLinkedList();
/**
* Constructor.
*
* <p>
* The main-class parameter must be null, if no entry-point is specified.
* For example, this would occur when the compiled program is a library only.
* </p>
*
* <p>
* The dependency files will be added to the generated generated Jar file's manifest.
* </p>
*
* @param main_class is the name of the class that contains the program's entry-point.
* @param classes are the classes that the compiled program is composed of.
*/
public CompiledProgram(final String main_class,
final List<ClassFile> classes)
{
Preconditions.checkNotNull(classes);
this.main_class = main_class;
this.classes.addAll(classes);
}
/**
* Constructor.
*
* @param program is the original compiled program.
* @param libraries are the paths to libraries that the program relies upon.
* @throws NullPointerException if program is null.
* @throws NullPointerException if libraries is null.
*/
public CompiledProgram(final CompiledProgram program,
final List<URL> libraries)
{
this(program.main_class, program.classes);
Preconditions.checkNotNull(libraries);
this.libraries.addAll(libraries);
}
/**
* This method retrieves the name of the module that contains the entry-point.
*
* <p>
* This is the fully-qualified name as it would appear in source code.
* </p>
*
* @return the aforedescribed name (may be null).
*/
public String mainClass()
{
return main_class;
}
/**
* This method retrieves the class-files that are part of the compiled program.
*
* @return the program's class-files.
*/
public List<ClassFile> classes()
{
return ImmutableList.copyOf(classes);
}
/**
* This method retrieves the list of libraries that this program relies upon.
*
* @return an immutable list containing the URLs of the aforesaid libraries.
*/
public List<URL> libraries()
{
return ImmutableList.copyOf(libraries);
}
/**
* This method writes this compiled program to a specified JAR file.
*
* <p>
* If the JAR already exists, then it will be overwritten.
* If the JAR does not exist, then it will be created.
* </p>
*
* @param path is the path to the new jar file.
*/
public void jar(final File path)
throws IOException
{
final Manifest manifest = createManifest();
final FileOutputStream fos = new FileOutputStream(path);
final JarOutputStream jos = new JarOutputStream(fos, manifest);
for (ClassFile file : classes)
{
writeClassFile(jos, file);
}
jos.close();
}
/**
* This method creates the MANIFEST.MF file to put into the JAR file.
*
* @return the manifest as an object.
*/
private Manifest createManifest()
{
final Manifest manifest = new Manifest();
/**
* Set the manifest version.
*/
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
/**
* Set the main-class attribute.
*/
if (mainClass() != null)
{
manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, mainClass());
}
return manifest;
}
/**
* This method writes a class-file to a JAR file.
*
* @param jos is the the JAR file being written
* @param file is the class-file to write to the JAR file.
*/
private void writeClassFile(final JarOutputStream jos,
final ClassFile file)
throws IOException
{
/**
* Compute the name of the class-file as a regular file.
* In other words, the package part of the name specifies a folder hierarchy.
*/
final String name = file.name().replace('.', '/') + ".class";
/**
* Add the class-file to the jar-file.
*/
final JarEntry entry = new JarEntry(name);
entry.setTime(System.currentTimeMillis());
jos.putNextEntry(entry);
jos.write(file.bytecode());
jos.closeEntry();
}
/**
* This method creates a new class-loader and uses it to load this program.
*
* @param parent is the parent of the new class-loader.
* @return the newly created class-loader with this program loaded into it.
*/
public DynamicLoader load(final ClassLoader parent)
{
final DynamicLoader loader = new DynamicLoader(parent, this);
return loader;
}
/**
* This method creates a new class-loader and uses it to load this program.
*
* <p>
* Note: The parent of the new class-loader is the system's class-loader.
* </p>
*
* @return the newly created class-loader with this program loaded into it.
*/
public DynamicLoader load()
{
return load(ClassLoader.getSystemClassLoader());
}
}
| apache-2.0 |
otto-de/edison-microservice | edison-core/src/main/java/de/otto/edison/status/indicator/CompositeStatusDetailIndicator.java | 1042 | package de.otto.edison.status.indicator;
import de.otto.edison.status.domain.StatusDetail;
import java.util.Collection;
import java.util.List;
import static java.util.stream.Collectors.toList;
/**
* A StatusDetailIndicator that is a composite of multiple delegates.
*
* @author Guido Steinacker
* @since 04.09.15
*/
public class CompositeStatusDetailIndicator implements StatusDetailIndicator {
private final List<StatusDetailIndicator> delegates;
public CompositeStatusDetailIndicator(final List<StatusDetailIndicator> delegates) {
this.delegates = delegates;
if (delegates == null || delegates.isEmpty()) {
throw new IllegalArgumentException("CompositeStatusDetailIndicator does not have any delegate indicators");
}
}
@Override
public List<StatusDetail> statusDetails() {
return delegates
.stream()
.map(StatusDetailIndicator::statusDetails)
.flatMap(Collection::stream)
.collect(toList());
}
}
| apache-2.0 |
consulo/consulo-java | java-impl/src/main/java/com/siyeh/ig/memory/StaticCollectionInspection.java | 2351 | /*
* Copyright 2003-2007 Dave Griffith, Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.memory;
import com.intellij.psi.PsiField;
import com.intellij.psi.PsiModifier;
import com.intellij.psi.PsiType;
import com.intellij.codeInspection.ui.SingleCheckboxOptionsPanel;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspection;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.psiutils.CollectionUtils;
import javax.annotation.Nonnull;
import javax.swing.*;
public class StaticCollectionInspection extends BaseInspection {
/**
* @noinspection PublicField
*/
public boolean m_ignoreWeakCollections = false;
@Nonnull
public String getDisplayName() {
return InspectionGadgetsBundle.message(
"static.collection.display.name");
}
@Nonnull
public String buildErrorString(Object... infos) {
return InspectionGadgetsBundle.message(
"static.collection.problem.descriptor");
}
public JComponent createOptionsPanel() {
return new SingleCheckboxOptionsPanel(InspectionGadgetsBundle.message(
"static.collection.ignore.option"),
this, "m_ignoreWeakCollections");
}
public BaseInspectionVisitor buildVisitor() {
return new StaticCollectionVisitor();
}
private class StaticCollectionVisitor extends BaseInspectionVisitor {
@Override
public void visitField(@Nonnull PsiField field) {
if (!field.hasModifierProperty(PsiModifier.STATIC)) {
return;
}
final PsiType type = field.getType();
if (!CollectionUtils.isCollectionClassOrInterface(type)) {
return;
}
if (!m_ignoreWeakCollections ||
CollectionUtils.isWeakCollectionClass(type)) {
return;
}
registerFieldError(field);
}
}
} | apache-2.0 |
t3t5u/common-android | src/main/java/com/github/t3t5u/common/android/InputStreamInvoker.java | 2318 | package com.github.t3t5u.common.android;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import android.content.Context;
import com.github.t3t5u.common.http.InputStreamConfiguration;
import com.github.t3t5u.common.http.InputStreamConfigurationBuilder;
import com.github.t3t5u.common.http.Method;
import com.github.t3t5u.common.http.Result;
public class InputStreamInvoker extends com.github.t3t5u.common.http.InputStreamInvoker {
private final Context context;
public InputStreamInvoker(final Context context, final Method method, final String url) {
this(context, method, url, null, null, new InputStreamConfigurationBuilder().build());
}
public InputStreamInvoker(final Context context, final Method method, final String url, final InputStreamConfiguration configuration) {
this(context, method, url, null, null, configuration);
}
public InputStreamInvoker(final Context context, final Method method, final String url, final String queryString) {
this(context, method, url, queryString, null, new InputStreamConfigurationBuilder().build());
}
public InputStreamInvoker(final Context context, final Method method, final String url, final String queryString, final InputStreamConfiguration configuration) {
this(context, method, url, queryString, null, configuration);
}
public InputStreamInvoker(final Context context, final Method method, final String url, final String queryString, final Map<String, List<String>> requestProperties) {
this(context, method, url, queryString, requestProperties, new InputStreamConfigurationBuilder().build());
}
public InputStreamInvoker(final Context context, final Method method, final String url, final String queryString, final Map<String, List<String>> requestProperties, final InputStreamConfiguration configuration) {
super(method, url, queryString, requestProperties, configuration);
this.context = context;
}
protected Context getContext() {
return context;
}
@Override
protected boolean isRetry(final Result<InputStream> result) {
return (super.isRetry(result) || (!result.isOk() && !result.isTimeout())) && AndroidUtils.isActiveNetworkConnected(context);
}
@Override
protected Result<InputStream> perform() {
return AndroidUtils.isActiveNetworkConnected(context) ? super.perform() : null;
}
}
| apache-2.0 |
nuxleus/closure-compiler | src/com/google/javascript/jscomp/ControlStructureCheck.java | 2279 | /*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
/**
* Check for usage of 'with'.
*
*/
class ControlStructureCheck implements HotSwapCompilerPass {
private final AbstractCompiler compiler;
private String sourceName = null;
static final DiagnosticType USE_OF_WITH = DiagnosticType.warning(
"JSC_USE_OF_WITH",
"The use of the 'with' structure should be avoided.");
ControlStructureCheck(AbstractCompiler compiler) {
this.compiler = compiler;
}
@Override
public void process(Node externs, Node root) {
check(root);
}
@Override
public void hotSwapScript(Node scriptRoot) {
check(scriptRoot);
}
/**
* Reports errors for any invalid use of control structures.
*
* @param node Current node to check.
*/
private void check(Node node) {
switch (node.getType()) {
case Token.WITH:
JSDocInfo info = node.getJSDocInfo();
boolean allowWith =
info != null && info.getSuppressions().contains("with");
if (!allowWith) {
report(node, USE_OF_WITH);
}
break;
case Token.SCRIPT:
// Remember the source file name in case we need to report an error.
sourceName = (String) node.getProp(Node.SOURCENAME_PROP);
break;
}
for (Node bChild = node.getFirstChild(); bChild != null;) {
Node next = bChild.getNext();
check(bChild);
bChild = next;
}
}
private void report(Node n, DiagnosticType error) {
compiler.report(JSError.make(sourceName, n, error));
}
}
| apache-2.0 |
pankyopkey/pCloudy-sample-projects | Java/NewAppium_SampleProjects/Chapter2-TestNg+OnParallelDevice(Android)/src/com/pCloudy/testNG/Runner.java | 4757 | package com.pCloudy.testNG;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.io.FileUtils;
import org.openqa.selenium.By;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.TakesScreenshot;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import io.appium.java_client.AppiumDriver;
import io.appium.java_client.android.AndroidDriver;
import io.appium.java_client.service.local.AppiumDriverLocalService;
public class Runner {
AppiumDriverLocalService service;
AppiumDriver<WebElement> driver;
String folder_name;
DateFormat df;
@BeforeTest
public void setUpSuite() throws Exception {
}
@Parameters({"deviceName"})
@BeforeMethod
public void prepareTest(String deviceName) throws IOException, InterruptedException {
DesiredCapabilities capabilities = new DesiredCapabilities();
capabilities.setCapability("pCloudy_Username", "Enter your email-id");
capabilities.setCapability("pCloudy_ApiKey", "Enter your API Key");
capabilities.setCapability("pCloudy_ApplicationName", "pCloudyAppiumDemo.apk");
capabilities.setCapability("pCloudy_DurationInMinutes", 10);
capabilities.setCapability("pCloudy_DeviceManafacturer", deviceName);
//capabilities.setCapability("pCloudy_DeviceVersion", "8.0.0");
//capabilities.setCapability("pCloudy_DeviceFullName", "Samsung_GalaxyTabA_Android_7.1.1");
capabilities.setCapability("newCommandTimeout", 600);
capabilities.setCapability("launchTimeout", 90000);
capabilities.setCapability("automationName", "uiautomator2");
capabilities.setCapability("appPackage", "com.pcloudy.appiumdemo");
capabilities.setCapability("appActivity", "com.ba.mobile.LaunchActivity");
driver = new AndroidDriver(new URL("https://device.pcloudy.com/appiumcloud/wd/hub"), capabilities);
}
@Test
public void Test() throws IOException, InterruptedException {
//Click on Accept button
driver.findElement(By.xpath("//android.widget.Button[@resource-id='com.pcloudy.appiumdemo:id/accept']")).click();
captureScreenShots();
//Click on Flight button
driver.findElement(By.xpath("//android.widget.Button[@resource-id='com.pcloudy.appiumdemo:id/flightButton']")).click();
captureScreenShots();
//Select from location
driver.findElement(By.xpath("//android.widget.Spinner[@resource-id='com.pcloudy.appiumdemo:id/spinnerfrom']")).click();
captureScreenShots();
driver.findElement(By.xpath("//android.widget.CheckedTextView[@resource-id='android:id/text1' and @text='Bangalore, India (BLR)']")).click();
captureScreenShots();
//Select to location
driver.findElement(By.xpath("//android.widget.Spinner[@resource-id='com.pcloudy.appiumdemo:id/spinnerto']")).click();
captureScreenShots();
driver.findElement(By.xpath("//android.widget.CheckedTextView[@resource-id='android:id/text1' and @text='Pune, India (PNQ)']")).click();
captureScreenShots();
//Select one way trip
driver.findElement(By.xpath("//android.widget.RadioButton[@resource-id='com.pcloudy.appiumdemo:id/singleTrip']")).click();
captureScreenShots();
//Select departure time
driver.findElement(By.xpath("//android.widget.TextView[@resource-id='com.pcloudy.appiumdemo:id/txtdepart']")).click();
captureScreenShots();
driver.findElement(By.xpath("//android.widget.Button[@resource-id='android:id/button1' and @text='OK']")).click();
captureScreenShots();
//Click on search flights button
driver.findElement(By.xpath("//android.widget.Button[@resource-id='com.pcloudy.appiumdemo:id/searchFlights']")).click();
captureScreenShots();
}
@AfterMethod
public void endTest() throws IOException {
driver.quit();
}
//Capture screenshot
public void captureScreenShots() throws IOException {
folder_name="screenshot";
File f=((TakesScreenshot)driver).getScreenshotAs(OutputType.FILE);
//Date format for screenshot file name
df=new SimpleDateFormat("dd-MMM-yyyy__hh_mm_ssaa");
//create dir with given folder name
new File(folder_name).mkdir();
//Setting file name
String file_name=df.format(new Date())+".png";
//copy screenshot file into screenshot folder.
FileUtils.copyFile(f, new File(folder_name + "/" + file_name));
}
}
| apache-2.0 |
wentam/DefCol | src/com/wentam/defcol/paletteList/PaletteListAdapter.java | 4005 | //////////////////////////////////////////////////////////////////////////////
// Copyright 2012 Matthew Egeler
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//////////////////////////////////////////////////////////////////////////////
package com.wentam.defcol.paletteList;
import com.wentam.defcol.R;
import com.wentam.defcol.PaletteFile;
import com.wentam.defcol.palette.PaletteActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnLongClickListener;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.TextView;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ListView;
import android.content.Context;
import android.content.Intent;
import java.util.ArrayList;
import android.util.Log;
public class PaletteListAdapter extends BaseAdapter {
private LayoutInflater layoutInflater;
private ArrayList<String> items;
private PaletteFile paletteFile;
private PaletteListAdapter me;
private Context context;
private ListView lv;
// --
// listeners
// --
OnClickListener mainListener = new OnClickListener() {
@Override
public void onClick(View v) {
int position = (Integer) v.getTag();
if (lv.getCheckedItemCount() > 0) {
if (lv.isItemChecked(position)) {
lv.setItemChecked(position, false);
} else {
lv.setItemChecked(position, true);
}
} else {
Intent intent = new Intent(me.context, PaletteActivity.class);
intent.putExtra("pallete_id",""+position);
int[] tmp = {1};
intent.putExtra("pallete_colors",""+me.paletteFile.getRow(position, tmp));
me.context.startActivity(intent);
}
}
};
OnLongClickListener longPress = new OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
int position = (Integer) v.getTag();
if (lv.isItemChecked(position)) {
lv.setItemChecked(position, false);
} else {
lv.setItemChecked(position, true);
}
return true;
}
};
// --
// constructor
// --
public PaletteListAdapter(Context c, ArrayList<String> i, PaletteFile p, ListView l) {
super();
this.layoutInflater = (LayoutInflater) c.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
this.items = i;
this.paletteFile = p;
this.me = this;
this.context = c;
this.lv = l;
}
// --
// methods
// --
public View getView (int position, View convertView, ViewGroup parent) {
ImageButton delete_btn;
Button main_btn;
if (convertView == null) {
convertView = layoutInflater.inflate(R.layout.palette_list_item, null);
main_btn = ((Button) convertView.findViewById(R.id.main_btn));
main_btn.setTag(position);
main_btn.setOnClickListener(mainListener);
main_btn.setOnLongClickListener(longPress);
} else {
main_btn = ((Button) convertView.findViewById(R.id.main_btn));
main_btn.setTag(position);
}
((TextView) convertView.findViewById(R.id.textview)).setText(items.get(position));
return convertView;
}
public int getCount() {
return this.items.size();
}
public long getItemId (int position) {
return position;
}
public Object getItem (int position) {
return this.items.get(position);
}
}
| apache-2.0 |
werkt/bazel | src/test/java/com/google/devtools/build/lib/starlark/StarlarkAnnotationContractTest.java | 3679 | // Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.starlark;
import com.google.devtools.build.lib.util.Classpath;
import java.lang.reflect.Method;
import net.starlark.java.annot.StarlarkBuiltin;
import net.starlark.java.annot.StarlarkInterfaceUtils;
import net.starlark.java.annot.StarlarkMethod;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests that bazel usages of {@link StarlarkMethod} and {@link StarlarkBuiltin} abide by the
* contracts specified in their documentation.
*
* <p>Tests in this class use the java reflection API.
*
* <p>This verification *would* be done via annotation processor, but annotation processors in java
* don't have access to the full set of information that the java reflection API has.
*/
@RunWith(JUnit4.class)
public class StarlarkAnnotationContractTest {
// Common prefix of packages in bazel that may have classes that implement or extend a
// Starlark type.
private static final String MODULES_PACKAGE_PREFIX = "com/google/devtools/build";
/**
* Verifies that every class in bazel that implements or extends a Starlark type has a clearly
* resolvable type.
*
* <p>If this test fails, it indicates the following error scenario:
*
* <p>Suppose class A is a subclass of both B and C, where B and C are annotated with {@link
* StarlarkBuiltin} annotations (and are thus considered "Starlark types"). If B is not a subclass
* of C (nor visa versa), then it's impossible to resolve whether A is of type B or if A is of
* type C. It's both! The way to resolve this is usually to have A be its own type (annotated with
* {@link StarlarkBuiltin}), and thus have the explicit type of A be semantically "B and C".
*/
@Test
public void testResolvableStarlarkBuiltins() throws Exception {
for (Class<?> candidateClass : Classpath.findClasses(MODULES_PACKAGE_PREFIX)) {
StarlarkInterfaceUtils.getStarlarkBuiltin(candidateClass);
}
}
/**
* Verifies that no class or interface has a method annotated with {@link StarlarkMethod} unless
* that class or interface is annotated with either {@link StarlarkGlobalLibrary} or with {@link
* StarlarkBuiltin}.
*/
@Test
public void testStarlarkCallableScope() throws Exception {
for (Class<?> candidateClass : Classpath.findClasses(MODULES_PACKAGE_PREFIX)) {
if (StarlarkInterfaceUtils.getStarlarkBuiltin(candidateClass) == null
&& !StarlarkInterfaceUtils.hasStarlarkGlobalLibrary(candidateClass)) {
for (Method method : candidateClass.getMethods()) {
StarlarkMethod callable = StarlarkInterfaceUtils.getStarlarkMethod(method);
if (callable != null && method.getDeclaringClass() == candidateClass) {
throw new AssertionError(
String.format(
"Class %s has a StarlarkMethod method %s but is neither a @StarlarkBuiltin"
+ " nor a @StarlarkGlobalLibrary",
candidateClass, method.getName()));
}
}
}
}
}
}
| apache-2.0 |
Mageswaran1989/aja | src/main/scala/org/aja/dhira/src/main/scala/org/dhira/core/nnet/conf/distribution/GaussianDistribution.java | 1309 | /*
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.deeplearning4j.nn.conf.distribution;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* A normal distribution.
*/
public class GaussianDistribution extends NormalDistribution {
/**
* Create a gaussian distribution (equivalent to normal)
* with the given mean and std
*
* @param mean the mean
* @param std the standard deviation
*/
@JsonCreator
public GaussianDistribution(@JsonProperty("mean") double mean, @JsonProperty("std") double std) {
super(mean, std);
}
}
| apache-2.0 |
nigel0913/EcustLock | src/com/nigel/ecustlock/MainActivity.java | 6684 | package com.nigel.ecustlock;
import java.io.File;
import com.nigel.service.LockService;
import com.support.Cfg;
import android.os.Bundle;
import android.app.Activity;
import android.content.Intent;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.Switch;
import android.widget.TextView;
public class MainActivity extends Activity implements OnClickListener {
View switchLayout = null;
Switch toggleService = null;
Button btnModifyPwd = null;
Button btnOpenTrain = null;
Button btnConfig = null;
// Button btnMfccTest = null;
Button btnUserManager = null;
Button btnTestAuth = null;
Button btnDeveloper = null;
View layTrain = null;
TextView tvTrainDesc = null;
final String life_tag = "MainActivity";
boolean trained = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Log.v(life_tag, "onCreate");
this.switchLayout = (View) super.findViewById(R.id.switch_layout);
this.toggleService = (Switch) super.findViewById(R.id.service_switch);
this.btnModifyPwd = (Button) super.findViewById(R.id.btn_modify_passwd);
this.btnOpenTrain = (Button) super.findViewById(R.id.btn_open_train);
this.btnConfig = (Button) super.findViewById(R.id.btn_config);
// this.btnMfccTest = (Button) super.findViewById(R.id.btn_mfcctest);
this.btnUserManager = (Button) super.findViewById(R.id.btn_user_manager);
this.btnTestAuth = (Button) super.findViewById(R.id.btn_test_auth);
this.btnDeveloper = (Button) super.findViewById(R.id.btn_record_30);
this.tvTrainDesc = (TextView) super.findViewById(R.id.tv_train_desc);
layTrain = super.findViewById(R.id.layout_train);
this.btnModifyPwd.setOnClickListener(this);
this.btnOpenTrain.setOnClickListener(this);
this.btnConfig.setOnClickListener(this);
// this.btnMfccTest.setOnClickListener(this);
this.btnTestAuth.setOnClickListener(this);
this.btnDeveloper.setOnClickListener(this);
layTrain.setOnClickListener(this);
Intent intent = getIntent();
String userName = intent.getStringExtra("user_name");
if ( userName.equals("admin") ) {
this.btnUserManager.setVisibility(View.VISIBLE);
this.switchLayout.setVisibility(View.VISIBLE);
this.btnConfig.setVisibility(View.VISIBLE);
this.btnDeveloper.setVisibility(View.VISIBLE);
this.btnUserManager.setOnClickListener(this);
// if (LockService.isRunning(getApplicationContext())) {
if (LockService.getStatus() == LockService.Status.RUNNING) {
this.toggleService.setChecked(true);
}
else {
this.toggleService.setChecked(false);
}
this.toggleService.setOnCheckedChangeListener( new StartServiceOnCheckedChangeListenerImpl() );
Cfg.getInstance().setUserName(userName);
}
else {
this.btnUserManager.setVisibility(View.GONE);
this.switchLayout.setVisibility(View.GONE);
this.btnConfig.setVisibility(View.GONE);
this.btnDeveloper.setVisibility(View.GONE);
Cfg.getInstance().setUserName(userName);
}
}
@Override
protected void onStart() {
super.onStart();
Log.v(life_tag, "onStart");
userCheck();
}
@Override
protected void onResume() {
super.onResume();
Log.v(life_tag, "onResume");
}
@Override
protected void onPause() {
super.onPause();
Log.v(life_tag, "onPause");
}
@Override
protected void onStop() {
super.onStop();
Log.v(life_tag, "onStop");
}
@Override
protected void onDestroy() {
super.onDestroy();
Log.v(life_tag, "onDestroy");
}
@Override
protected void onRestart() {
super.onRestart();
Log.v(life_tag, "onRestart");
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
/**
* ¼ì²éÓû§Ä¿Â¼½á¹¹
*/
private void userCheck() {
String username = Cfg.getInstance().getUserName();
String rootDir = Cfg.getInstance().getRootDir();
String userFileDir = rootDir + Cfg.getInstance().getUsersPath() + File.separator
+ username + File.separator;
File userDir = new File(userFileDir);
Log.d("userDir", userFileDir);
if (!userDir.exists())
userDir.mkdirs();
File featrue = new File(userFileDir + username + Cfg.getInstance().getFeaSuf());
File model = new File(userFileDir + username + Cfg.getInstance().getMdlSuf());
if ( featrue.exists() && model.exists() ) {
trained = true;
tvTrainDesc.setText("ÒÑѵÁ·");
}
else {
trained = false;
tvTrainDesc.setText("ÉÐδѵÁ·");
}
Log.d("trained", ""+trained);
}
private class StartServiceOnCheckedChangeListenerImpl implements OnCheckedChangeListener {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
Intent service = new Intent(MainActivity.this, LockService.class);
if (isChecked) {
MainActivity.this.startService(service);
} else {
MainActivity.this.stopService(service);
}
}
}
@Override
public void onClick(View v) {
int id = v.getId();
Intent intent = null;
switch (id) {
case R.id.btn_config:
intent = new Intent(MainActivity.this, ConfigActivity.class);
MainActivity.this.startActivity(intent);
break;
case R.id.btn_open_train:
case R.id.layout_train:
intent = new Intent(MainActivity.this, TrainActivity.class);
MainActivity.this.startActivity(intent);
break;
case R.id.btn_modify_passwd:
intent = new Intent(MainActivity.this, PasswordActivity.class);
MainActivity.this.startActivity(intent);
break;
// case R.id.btn_mfcctest:
// intent = new Intent(MainActivity.this, MfccTestActivity.class);
// MainActivity.this.startActivity(intent);
// break;
case R.id.btn_user_manager:
intent = new Intent(MainActivity.this, UsersActivity.class);
MainActivity.this.startActivity(intent);
break;
case R.id.btn_test_auth:
intent = new Intent(MainActivity.this, LockActivity.class);
MainActivity.this.startActivity(intent);
break;
case R.id.btn_record_30:
intent = new Intent(MainActivity.this, DeveloperActivity.class);
MainActivity.this.startActivity(intent);
break;
default:
break;
}
}
}
| apache-2.0 |
agateau/equiv | app/src/main/java/com/agateau/utils/NTemplate.java | 1155 | /*
Copyright 2015 Aurélien Gâteau
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.agateau.utils;
import android.text.Html;
import android.text.Spanned;
import android.text.SpannedString;
/**
* Simple text template system. Replaces {keyword} with values.
* Works with Spanned strings.
*/
public class NTemplate {
private String mHtml;
public NTemplate(CharSequence text) {
mHtml = Html.toHtml(new SpannedString(text));
}
public NTemplate put(String key, String value) {
mHtml = mHtml.replace("{" + key + "}", value);
return this;
}
public Spanned toSpanned() {
return Html.fromHtml(mHtml);
}
}
| apache-2.0 |
XillioQA/xill-platform-3.4 | xill-api/src/main/java/nl/xillio/xill/api/Issue.java | 2506 | /**
* Copyright (C) 2014 Xillio (support@xillio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.xillio.xill.api;
import nl.xillio.xill.api.components.RobotID;
/**
* This class represents an issue with the code.
*/
public class Issue {
private final String message;
private final int line;
private final Type severity;
private final RobotID robot;
/**
* The severity of this issue.
*
* @see Type#ERROR
* @see Type#WARNING
* @see Type#INFO
*/
public enum Type {
/**
* Can't compile.
*/
ERROR,
/**
* Needs attention.
*/
WARNING,
/**
* Friendly notice.
*/
INFO
}
/**
* Default constructor to create a new Issue.
*
* @param message the message to display
* @param line the line where the issue occurred
* @param severity the severity of the issue
* @param robot the robot in which the issue occures
* @see Type
*/
public Issue(final String message, final int line, final Type severity, final RobotID robot) {
this.message = message;
this.line = line;
this.severity = severity;
this.robot = robot;
}
/**
* Returns the robot attached to this issue.
*
* @return the robot
*/
public RobotID getRobot() {
return robot;
}
/**
* Return the issue's message.
*
* @return the message
*/
public String getMessage() {
return message;
}
/**
* Returns the line where the issue occured.
*
* @return the line
*/
public int getLine() {
return line;
}
/**
* Returns the severity of the issue.
*
* @return the severity
*/
public Type getSeverity() {
return severity;
}
}
| apache-2.0 |
rodionovsasha/ArmstrongNumbers | src/main/java/com/github/rodionovsasha/ArmstrongNumbersAll.java | 3651 | package com.github.rodionovsasha;
import java.math.BigInteger;
import java.util.Set;
import java.util.TreeSet;
import static java.math.BigInteger.ONE;
import static java.math.BigInteger.TEN;
import static java.math.BigInteger.ZERO;
class ArmstrongNumbersAll {
private static final int MAX_DIGITS_AMOUNT = 39; // max digits amount where armstrong numbers exist(39)
private static double factor = Math.log(2) / Math.log(10);
private static int counter = 1;
public static void main(String[] args) {
long startTime = System.currentTimeMillis();
Set<BigInteger> result = getNumbers();
for (BigInteger armstrongNumber : result) {
System.out.println(counter++ + ". " + armstrongNumber);
}
long executionTime = System.currentTimeMillis() - startTime;
long minutes = (executionTime / 1000) / 60;
long seconds = (executionTime / 1000) % 60;
System.out.println(String.format("Execution time: %dm%ds", minutes, seconds));
System.out.println("Used memory: " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / (1024 * 1024) + "mb");
}
private static Set<BigInteger> getNumbers() {
Set<BigInteger> armstrongNumbers = new TreeSet<>();
//Main loop
for (BigInteger i = ONE; getDigitsAmount(i) <= MAX_DIGITS_AMOUNT; i = getNextNumber(i)) {
BigInteger sumOfPowers = getSumOfPowers(i);
if (isArmstrongNumber(sumOfPowers)) {
armstrongNumbers.add(sumOfPowers);
}
}
return armstrongNumbers;
}
private static BigInteger getNextNumber(BigInteger number) {
BigInteger copyOfNumber = number;
if (isGrowingNumber(copyOfNumber)) { // here we have numbers where each digit not less than previous one and not more than next one: 12, 1557, 333 and so on.
return copyOfNumber.add(ONE);
}
// here we have numbers which end in zero: 10, 20, ..., 100, 110, 5000, 1000000 and so on.
BigInteger lastNumber = ONE; //can be: 1,2,3..., 10,20,30,...,100,200,300,...
while (copyOfNumber.mod(TEN).equals(ZERO)) {// 5000 -> 500 -> 50: try to get the last non-zero digit
copyOfNumber = copyOfNumber.divide(TEN);
lastNumber = lastNumber.multiply(TEN);
}
BigInteger lastNonZeroDigit = copyOfNumber.mod(TEN);
return number.add(lastNonZeroDigit.multiply(lastNumber).divide(TEN)); //e.g. number=100, lastNumber=10, lastNonZeroDigit=1
}
/*
* 135 returns true: 1 < 3 < 5
* 153 returns false: 1 < 5 > 3
* */
private static boolean isGrowingNumber(BigInteger number) {
return !number.add(ONE).mod(TEN).equals(ONE);
}
private static BigInteger getSumOfPowers(BigInteger number) {
BigInteger currentNumber = number;
int power = getDigitsAmount(currentNumber);
BigInteger currentSum = ZERO;
while (currentNumber.compareTo(ZERO) > 0) { // currentNumber > 0
currentSum = currentSum.add(currentNumber.mod(TEN).pow(power)); // get powers from array by indexes and then the sum.
currentNumber = currentNumber.divide(TEN);
}
return currentSum;
}
private static boolean isArmstrongNumber(BigInteger number) {
return number.equals(getSumOfPowers(number));
}
private static int getDigitsAmount(BigInteger number) {
int digitCount = (int) (factor * number.bitLength() + 1);
if (TEN.pow(digitCount - 1).compareTo(number) > 0) {
return digitCount - 1;
}
return digitCount;
}
} | apache-2.0 |
jdcasey/EMB | api/src/main/java/org/commonjava/emb/conf/mgmt/LoadOnFinish.java | 739 | /*
* Copyright 2010 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.commonjava.emb.conf.mgmt;
public interface LoadOnFinish
{
void executionFinished( EMBManagementView managementView );
}
| apache-2.0 |
lmjacksoniii/hazelcast | hazelcast/src/main/java/com/hazelcast/internal/cluster/impl/Versions.java | 881 | /*
* Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.cluster.impl;
import com.hazelcast.nio.Version;
public final class Versions {
/**
* Represents cluster version 3.8
*/
public static final Version V3_8 = Version.of(8);
private Versions() {
}
}
| apache-2.0 |
thomsonreuters/assertj-swagger | src/main/java/io/github/robwin/swagger/test/SwaggerAssertionConfig.java | 3872 | package io.github.robwin.swagger.test;
import org.apache.commons.lang3.StringUtils;
import java.util.*;
public class SwaggerAssertionConfig {
private static final String PREFIX = "assertj.swagger.";
private static final String IGNORE_MISSING_PATHS = "pathsToIgnoreInExpected";
private static final String IGNORE_MISSING_DEFINITIONS = "definitionsToIgnoreInExpected";
private static final String IGNORE_MISSING_PROPERTIES = "propertiesToIgnoreInExpected";
private static final String PATHS_PREPEND_EXPECTED = "pathsPrependExpected";
private Map<SwaggerAssertionType, Boolean> swaggerAssertionFlags = new HashMap<>();
private Set<String> pathsToIgnoreInExpected = Collections.emptySet();
private Set<String> propertiesToIgnoreInExpected = Collections.emptySet();
private Set<String> definitionsToIgnoreInExpected = Collections.emptySet();
private String pathsPrependExpected;
/**
* Construct a {@link SwaggerAssertionConfig}.
*/
public SwaggerAssertionConfig() {
final SwaggerAssertionType[] assertionTypes = SwaggerAssertionType.values();
for (final SwaggerAssertionType assertionType : assertionTypes) {
swaggerAssertionFlags.put(assertionType, assertionType.isEnabledByDefault());
}
}
/**
* Construct a {@link SwaggerAssertionConfig}. All checks are enabled by default, and overridden by the supplied
* properties.
* @param props properties. Typically sourced from root of classpath
*/
public SwaggerAssertionConfig(final Properties props) {
this();
final SwaggerAssertionType[] assertionTypes = SwaggerAssertionType.values();
for (final SwaggerAssertionType assertionType : assertionTypes) {
final String value = props.getProperty(PREFIX + assertionType.getBarePropertyName());
if (value != null) {
swaggerAssertionFlags.put(assertionType, Boolean.TRUE.toString().equals(value));
} else {
swaggerAssertionFlags.put(assertionType, assertionType.isEnabledByDefault());
}
}
final String ignoreMissingPathsStr = props.getProperty(PREFIX + IGNORE_MISSING_PATHS);
if (!StringUtils.isBlank(ignoreMissingPathsStr)) {
pathsToIgnoreInExpected = splitCommaDelimStrIntoSet(ignoreMissingPathsStr);
}
final String ignoreMissingDefinitionsStr = props.getProperty(PREFIX + IGNORE_MISSING_DEFINITIONS);
if (!StringUtils.isBlank(ignoreMissingDefinitionsStr)) {
definitionsToIgnoreInExpected = splitCommaDelimStrIntoSet(ignoreMissingDefinitionsStr);
}
final String ignoreMissingPropertiesStr = props.getProperty(PREFIX + IGNORE_MISSING_PROPERTIES);
if (!StringUtils.isBlank(ignoreMissingPropertiesStr)) {
propertiesToIgnoreInExpected = splitCommaDelimStrIntoSet(ignoreMissingPropertiesStr);
}
pathsPrependExpected = props.getProperty(PREFIX + PATHS_PREPEND_EXPECTED);
}
public boolean swaggerAssertionEnabled(SwaggerAssertionType assertionType) {
final Boolean flag = swaggerAssertionFlags.get(assertionType);
return (flag != null ? flag : assertionType.isEnabledByDefault());
}
public Set<String> getPathsToIgnoreInExpected() {
return pathsToIgnoreInExpected;
}
public Set<String> getDefinitionsToIgnoreInExpected() {
return definitionsToIgnoreInExpected;
}
public Set<String> getPropertiesToIgnoreInExpected() { return propertiesToIgnoreInExpected; }
public String getPathsPrependExpected() {
return pathsPrependExpected;
}
private Set<String> splitCommaDelimStrIntoSet(String str) {
final String[] strs = str.split("\\s*,\\s*");
return Collections.unmodifiableSet(new HashSet<>(Arrays.asList(strs)));
}
} | apache-2.0 |
rlabancz/casa-glass | src/ca/rldesigns/casa/android/glass/model/Place.java | 1561 | package ca.rldesigns.casa.android.glass.model;
/**
* This class represents a point of interest that has geographical coordinates (latitude and longitude) and a name that is displayed to the user.
*/
public class Place {
private final int mIcon;
private final double mLatitude;
private final double mLongitude;
private final String mName;
private final String mPrice;
/**
* Initialises a new place with the specified coordinates and name.
*
* @param icon
* the icon of the place
* @param latitude
* the latitude of the place
* @param longitude
* the longitude of the place
* @param name
* the name of the place
*/
public Place(int icon, double latitude, double longitude, String name, String price) {
mIcon = icon;
mLatitude = latitude;
mLongitude = longitude;
mName = name;
mPrice = price;
}
/**
* Gets the icon of the place.
*
* @return the icon of the place
*/
public int getIcon() {
return mIcon;
}
/**
* Gets the latitude of the place.
*
* @return the latitude of the place
*/
public double getLatitude() {
return mLatitude;
}
/**
* Gets the longitude of the place.
*
* @return the longitude of the place
*/
public double getLongitude() {
return mLongitude;
}
/**
* Gets the name of the place.
*
* @return the name of the place
*/
public String getName() {
return mName;
}
/**
* Gets the price of the place.
*
* @return the price of the place
*/
public String getPrice() {
return mPrice;
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-sagemaker/src/main/java/com/amazonaws/services/sagemaker/model/transform/CreateDataQualityJobDefinitionResultJsonUnmarshaller.java | 3040 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.sagemaker.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.sagemaker.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* CreateDataQualityJobDefinitionResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateDataQualityJobDefinitionResultJsonUnmarshaller implements Unmarshaller<CreateDataQualityJobDefinitionResult, JsonUnmarshallerContext> {
public CreateDataQualityJobDefinitionResult unmarshall(JsonUnmarshallerContext context) throws Exception {
CreateDataQualityJobDefinitionResult createDataQualityJobDefinitionResult = new CreateDataQualityJobDefinitionResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return createDataQualityJobDefinitionResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("JobDefinitionArn", targetDepth)) {
context.nextToken();
createDataQualityJobDefinitionResult.setJobDefinitionArn(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return createDataQualityJobDefinitionResult;
}
private static CreateDataQualityJobDefinitionResultJsonUnmarshaller instance;
public static CreateDataQualityJobDefinitionResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new CreateDataQualityJobDefinitionResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
s3phir0th/SameGame | src/View/StatusBar.java | 2338 | package View;
import java.awt.Color;
import javax.swing.BorderFactory;
import javax.swing.JLabel;
import javax.swing.JPanel;
import translator.TranslatableGUIElement;
public class StatusBar extends JPanel {
/**
* StatusBar ist ein Panel welches Informationen zum aktuellen Spielstand anzeigt in Labeln
* z.B.: aktuelle Zeit, Punkte, min Stones
*/
private static final long serialVersionUID = 1L;
private JLabel JPointLabel = null;
private JLabel JPoints = null;
private JLabel JTimeLabel = null;
private JLabel JTime = null;
private JLabel JStoneLabel = null;
private JLabel minStones = null;
private TranslatableGUIElement guiBuilder;
/**
* Konstruktor der StatusBar erzeugt eine Statusbar in der passenden Sprache
* durch den gegebenen Gui-Builder
*
* @param guiBuilder
* @param controller
*/
public StatusBar(TranslatableGUIElement guiBuilder
) {
this.guiBuilder = guiBuilder;
// erzeugen der Elemente
createElements();
// hinzufügen der Elemente
addElements();
setVisible(true);
}
/**
* erzeugt die benötigten Objekte
*/
private void createElements() {
JPointLabel = guiBuilder.generateJLabel("statusPoints");
JPoints = new JLabel();
JPoints.setForeground(Color.black);
JTime = new JLabel();
JTime.setForeground(Color.black);
JTimeLabel = guiBuilder.generateJLabel("statusTime");
JStoneLabel = guiBuilder.generateJLabel("statusStones");
minStones = new JLabel();
setBorder(BorderFactory.createLineBorder(Color.black));
}
/**
* fügt die erzeugten Objekte der Statusbar hinzu
*/
private void addElements() {
add(JPointLabel);
add(JPoints);
add(JTimeLabel);
add(JTime);
add(JStoneLabel);
add(minStones);
}
/**
* aendert den Wert des JTime Labels entsprechend zur Eingabe Zeit
* @param time
*/
public void setJTime(double time) {
if (time < 0) {
JTime.setForeground(Color.red);
JTime.setText(String.valueOf((int) time));
} else {
JTime.setForeground(Color.black);
JTime.setText(String.valueOf((int) time));
}
}
/**
* setter für JPoints
* @param points
*/
public void setJPoints(double points) {
JPoints.setText(String.valueOf((int) points));
}
/**
* Setter für minStones;
* @param stones
*/
public void setMinStones(int stones) {
minStones.setText(String.valueOf(stones));
}
}
| apache-2.0 |
abitofalchemy/sde_technical_interviews | TechnicalInterview/src/projecteuler/prob4.java | 2683 | package projecteuler;
import common.questions.GenericQueue;
public class prob4 {
/**
*
A palindromic number reads the same both ways. The largest palindrome
made from the product of two 2-digit numbers is 9009 = 91 × 99.
Find the largest palindrome made from the product of two 3-digit numbers.
*
* @param args
*/
public static void main(String[] args) {
// the largest palindrome from the product of two numbers
int number = 0;
boolean isPalindrome = false;
// if ( isProductAPalindrome(9009)){
// System.out.println(number+" is a palindrome.");
// //break;
// } else System.out.println("Not");
for (int i = 999; i>0; --i){
//System.out.println(i+" "+number);
for (int j=999; j>0; --j){
number = j * i;
//System.out.println(i+" "+number);
if ( isProductAPalindrome(number)){
System.out.println(i+"x"+j+": "+number+" is a palindrome.");
isPalindrome = true;
break;
} //else System.out.println("Not");
}
// if (isPalindrome)
// break;
}
}
private static boolean isProductAPalindrome(int intNumber) {
char[] chars = ("" + intNumber).toCharArray();
//System.out.println(chars.length);
GenericQueue<Character> queueF = new GenericQueue<Character>();
GenericQueue<Character> queueS = new GenericQueue<Character>();
if ( chars.length % 2 == 0) {
//System.out.println("divide in half");
for(int i = 0; i< chars.length/2; i++){
queueF.enqueueFirst(chars[i]);//add one word at a time
}
for(int i = chars.length/2; i< chars.length; i++){
queueS.enqueue(chars[i]);//add one word at a time
}
//System.out.println(queueF.getSize());
//System.out.println(queueS);
if (compareQueues(queueF, queueS))
return true;
else
return false;
} else /* odd number of numbers */ {
// divide in half
for(int i = 0; i< (int) Math.floor(chars.length/2); i++){
queueF.enqueueFirst(chars[i]);//add one word at a time
}
for(int i = (int) Math.ceil(chars.length/2); i< chars.length; i++){
queueS.enqueue(chars[i]);//add one word at a time
}
if (queueF == queueS)
return true;
else
return false;
}
}
private static boolean compareQueues(GenericQueue<Character> queueF,
GenericQueue<Character> queueS) {
boolean retBool = false;
int siz = queueF.getSize();
for (int i=0;i<siz; i++){
if (queueF.dequeue().equals(queueS.dequeue())){
retBool = true;
}else {
retBool = false;
break;
}
}
return retBool;
}
}
| apache-2.0 |
jcommand/jcommand | infrastructure/targetplatform/osgi.3rdParty/jafama/src/net/jafama/FastMath.java | 110227 | /*
* Copyright 2012-2015 Jeff Hain
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* =============================================================================
* Notice of fdlibm package this program is partially derived from:
*
* Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved.
*
* Developed at SunSoft, a Sun Microsystems, Inc. business.
* Permission to use, copy, modify, and distribute this
* software is freely granted, provided that this notice
* is preserved.
* =============================================================================
*/
package net.jafama;
/**
* Class providing math treatments that:
* - are meant to be faster than java.lang.Math class equivalents (if any),
* - are still somehow accurate and robust (handling of NaN and such),
* - do not (or not directly) generate objects at run time (no "new").
*
* Other than optimized treatments, a valuable feature of this class is the
* presence of angles normalization methods, derived from those used in
* java.lang.Math (for which, sadly, no API is provided, letting everyone
* with the terrible responsibility of writing their own ones).
*
* Non-redefined methods Math methods are also available, for easy replacement,
* even though for some of them, such as for incrementExact, you might want to
* stick to Math versions to benefit from eventual JVM intrinsics.
*
* Use of look-up tables: around 1 Mo total, and initialized lazily or on first
* call to initTables().
*
* Depending on JVM, or JVM options, these treatments can actually be slower
* than Math ones.
* In particular, they can be slower if not optimized by the JIT, which you
* can see with -Xint JVM option.
* Another cause of slowness can be cache-misses on look-up tables.
* Also, look-up tables initialization typically takes multiple hundreds of
* milliseconds (and is about twice slower in J6 than in J5, and in J7 than in
* J6, possibly due to intrinsifications preventing optimizations such as use
* of hardware sqrt, and Math delegating to StrictMath with JIT optimizations
* not yet up during class load).
* As a result, you might want to make these treatments not use tables,
* and delegate to corresponding Math methods, when they are available in the
* lowest supported Java version, by using the appropriate property (see below).
*
* Methods with same signature than Math ones, are meant to return
* "good" approximations on all range.
* Methods terminating with "Fast" are meant to return "good" approximation
* on a reduced range only.
* Methods terminating with "Quick" are meant to be quick, but do not
* return a good approximation, and might only work on a reduced range.
*
* Properties:
*
* - jafama.usejdk (boolean, default is false):
* If true, for redefined methods, as well as their "Fast" or "Quick"
* terminated counterparts, instead of using redefined computations,
* delegating to Math, when available in required Java version.
*
* - jafama.fastlog (boolean, default is false):
* If true, using redefined computations for log(double) and
* log10(double), else they delegate to Math.log(double) and
* Math.log10(double).
* False by default because Math.log(double) and Math.log10(double)
* seem usually fast (redefined log(double) might be even faster,
* but is less accurate).
*
* - jafama.fastsqrt (boolean, default is false):
* If true, using redefined computation for sqrt(double),
* else it delegates to Math.sqrt(double).
* False by default because Math.sqrt(double) seems usually fast.
*/
public final class FastMath extends CmnFastMath {
//--------------------------------------------------------------------------
// CONFIGURATION
//--------------------------------------------------------------------------
private static final boolean USE_JDK_MATH = FM_USE_JDK_MATH;
private static final boolean USE_REDEFINED_LOG = FM_USE_REDEFINED_LOG;
private static final boolean USE_REDEFINED_SQRT = FM_USE_REDEFINED_SQRT;
private static final boolean USE_POWTABS_FOR_ASIN = FM_USE_POWTABS_FOR_ASIN;
//--------------------------------------------------------------------------
// PUBLIC METHODS
//--------------------------------------------------------------------------
/*
* trigonometry
*/
/**
* @param angle Angle in radians.
* @return Angle sine.
*/
public static double sin(double angle) {
if (USE_JDK_MATH) {
return Math.sin(angle);
}
boolean negateResult = false;
if (angle < 0.0) {
angle = -angle;
negateResult = true;
}
if (angle > SIN_COS_MAX_VALUE_FOR_INT_MODULO) {
if (false) {
// Can give very bad relative error near PI (mod 2*PI).
angle = remainderTwoPi(angle);
if (angle < 0.0) {
angle = -angle;
negateResult = !negateResult;
}
} else {
final long remAndQuad = remainderPiO2(angle);
angle = decodeRemainder(remAndQuad);
final double sin;
final int q = decodeQuadrant(remAndQuad);
if (q == 0) {
sin = sin(angle);
} else if (q == 1) {
sin = cos(angle);
} else if (q == 2) {
sin = -sin(angle);
} else {
sin = -cos(angle);
}
return (negateResult ? -sin : sin);
}
}
// index: possibly outside tables range.
int index = (int)(angle * SIN_COS_INDEXER + 0.5);
double delta = (angle - index * SIN_COS_DELTA_HI) - index * SIN_COS_DELTA_LO;
// Making sure index is within tables range.
// Last value of each table is the same than first,
// so we ignore it (tabs size minus one) for modulo.
index &= (SIN_COS_TABS_SIZE-2); // index % (SIN_COS_TABS_SIZE-1)
double indexSin = MyTSinCos.sinTab[index];
double indexCos = MyTSinCos.cosTab[index];
double result = indexSin + delta * (indexCos + delta * (-indexSin * ONE_DIV_F2 + delta * (-indexCos * ONE_DIV_F3 + delta * indexSin * ONE_DIV_F4)));
return negateResult ? -result : result;
}
/**
* Quick sin, with accuracy of about 1.6e-3 (PI/<look-up tabs size>)
* for |angle| < 6588395.0 (Integer.MAX_VALUE * (2*PI/<look-up tabs size>) - 2)
* (- 2 due to removing PI/2 before using cosine tab),
* and no accuracy at all for larger values.
*
* @param angle Angle in radians.
* @return Angle sine.
*/
public static double sinQuick(double angle) {
if (USE_JDK_MATH) {
return Math.sin(angle);
}
return MyTSinCos.cosTab[((int)(Math.abs(angle-Math.PI/2) * SIN_COS_INDEXER + 0.5)) & (SIN_COS_TABS_SIZE-2)];
}
/**
* @param angle Angle in radians.
* @return Angle cosine.
*/
public static double cos(double angle) {
if (USE_JDK_MATH) {
return Math.cos(angle);
}
angle = Math.abs(angle);
if (angle > SIN_COS_MAX_VALUE_FOR_INT_MODULO) {
if (false) {
// Can give very bad relative error near PI (mod 2*PI).
angle = remainderTwoPi(angle);
if (angle < 0.0) {
angle = -angle;
}
} else {
final long remAndQuad = remainderPiO2(angle);
angle = decodeRemainder(remAndQuad);
final double cos;
final int q = decodeQuadrant(remAndQuad);
if (q == 0) {
cos = cos(angle);
} else if (q == 1) {
cos = -sin(angle);
} else if (q == 2) {
cos = -cos(angle);
} else {
cos = sin(angle);
}
return cos;
}
}
// index: possibly outside tables range.
int index = (int)(angle * SIN_COS_INDEXER + 0.5);
double delta = (angle - index * SIN_COS_DELTA_HI) - index * SIN_COS_DELTA_LO;
// Making sure index is within tables range.
// Last value of each table is the same than first,
// so we ignore it (tabs size minus one) for modulo.
index &= (SIN_COS_TABS_SIZE-2); // index % (SIN_COS_TABS_SIZE-1)
double indexCos = MyTSinCos.cosTab[index];
double indexSin = MyTSinCos.sinTab[index];
return indexCos + delta * (-indexSin + delta * (-indexCos * ONE_DIV_F2 + delta * (indexSin * ONE_DIV_F3 + delta * indexCos * ONE_DIV_F4)));
}
/**
* Quick cos, with accuracy of about 1.6e-3 (PI/<look-up tabs size>)
* for |angle| < 6588397.0 (Integer.MAX_VALUE * (2*PI/<look-up tabs size>)),
* and no accuracy at all for larger values.
*
* @param angle Angle in radians.
* @return Angle cosine.
*/
public static double cosQuick(double angle) {
if (USE_JDK_MATH) {
return Math.cos(angle);
}
return MyTSinCos.cosTab[((int)(Math.abs(angle) * SIN_COS_INDEXER + 0.5)) & (SIN_COS_TABS_SIZE-2)];
}
/**
* Computes sine and cosine together.
*
* @param angle Angle in radians.
* @param cosine (out) Angle cosine.
* @return Angle sine.
*/
public static double sinAndCos(double angle, DoubleWrapper cosine) {
if (USE_JDK_MATH) {
cosine.value = Math.cos(angle);
return Math.sin(angle);
}
// Using the same algorithm than sin(double) method,
// and computing also cosine at the end.
boolean negateResult = false;
if (angle < 0.0) {
angle = -angle;
negateResult = true;
}
if (angle > SIN_COS_MAX_VALUE_FOR_INT_MODULO) {
if (false) {
// Can give very bad relative error near PI (mod 2*PI).
angle = remainderTwoPi(angle);
if (angle < 0.0) {
angle = -angle;
negateResult = !negateResult;
}
} else {
final long remAndQuad = remainderPiO2(angle);
angle = decodeRemainder(remAndQuad);
final double sin;
final int q = decodeQuadrant(remAndQuad);
if (q == 0) {
sin = sin(angle);
cosine.value = cos(angle);
} else if (q == 1) {
sin = cos(angle);
cosine.value = -sin(angle);
} else if (q == 2) {
sin = -sin(angle);
cosine.value = -cos(angle);
} else {
sin = -cos(angle);
cosine.value = sin(angle);
}
return (negateResult ? -sin : sin);
}
}
int index = (int)(angle * SIN_COS_INDEXER + 0.5);
double delta = (angle - index * SIN_COS_DELTA_HI) - index * SIN_COS_DELTA_LO;
index &= (SIN_COS_TABS_SIZE-2); // index % (SIN_COS_TABS_SIZE-1)
double indexSin = MyTSinCos.sinTab[index];
double indexCos = MyTSinCos.cosTab[index];
// Could factor some multiplications (delta * factorials), but then is less accurate.
cosine.value = indexCos + delta * (-indexSin + delta * (-indexCos * ONE_DIV_F2 + delta * (indexSin * ONE_DIV_F3 + delta * indexCos * ONE_DIV_F4)));
double result = indexSin + delta * (indexCos + delta * (-indexSin * ONE_DIV_F2 + delta * (-indexCos * ONE_DIV_F3 + delta * indexSin * ONE_DIV_F4)));
return negateResult ? -result : result;
}
/**
* Can have very bad relative error near +-PI/2,
* but of the same magnitude than the relative delta between
* StrictMath.tan(PI/2) and StrictMath.tan(nextDown(PI/2)).
*
* @param angle Angle in radians.
* @return Angle tangent.
*/
public static double tan(double angle) {
if (USE_JDK_MATH) {
return Math.tan(angle);
}
boolean negateResult = false;
if (angle < 0.0) {
angle = -angle;
negateResult = true;
}
if (angle > TAN_MAX_VALUE_FOR_INT_MODULO) {
angle = remainderPi(angle);
if (angle < 0.0) {
angle = -angle;
negateResult = !negateResult;
}
}
// index: possibly outside tables range.
int index = (int)(angle * TAN_INDEXER + 0.5);
double delta = (angle - index * TAN_DELTA_HI) - index * TAN_DELTA_LO;
// Making sure index is within tables range.
// index modulo PI, i.e. 2*(virtual tab size minus one).
index &= (2*(TAN_VIRTUAL_TABS_SIZE-1)-1); // index % (2*(TAN_VIRTUAL_TABS_SIZE-1))
// Here, index is in [0,2*(TAN_VIRTUAL_TABS_SIZE-1)-1], i.e. indicates an angle in [0,PI[.
if (index > (TAN_VIRTUAL_TABS_SIZE-1)) {
index = (2*(TAN_VIRTUAL_TABS_SIZE-1)) - index;
delta = -delta;
negateResult = !negateResult;
}
double result;
if (index < TAN_TABS_SIZE) {
result = MyTTan.tanTab[index]
+ delta * (MyTTan.tanDer1DivF1Tab[index]
+ delta * (MyTTan.tanDer2DivF2Tab[index]
+ delta * (MyTTan.tanDer3DivF3Tab[index]
+ delta * MyTTan.tanDer4DivF4Tab[index])));
} else { // angle in ]TAN_MAX_VALUE_FOR_TABS,TAN_MAX_VALUE_FOR_INT_MODULO], or angle is NaN
// Using tan(angle) == 1/tan(PI/2-angle) formula: changing angle (index and delta), and inverting.
index = (TAN_VIRTUAL_TABS_SIZE-1) - index;
result = 1/(MyTTan.tanTab[index]
- delta * (MyTTan.tanDer1DivF1Tab[index]
- delta * (MyTTan.tanDer2DivF2Tab[index]
- delta * (MyTTan.tanDer3DivF3Tab[index]
- delta * MyTTan.tanDer4DivF4Tab[index]))));
}
return negateResult ? -result : result;
}
/**
* @param value Value in [-1,1].
* @return Value arcsine, in radians, in [-PI/2,PI/2].
*/
public static double asin(double value) {
if (USE_JDK_MATH) {
return Math.asin(value);
}
boolean negateResult = false;
if (value < 0.0) {
value = -value;
negateResult = true;
}
if (value <= ASIN_MAX_VALUE_FOR_TABS) {
int index = (int)(value * ASIN_INDEXER + 0.5);
double delta = value - index * ASIN_DELTA;
double result = MyTAsin.asinTab[index]
+ delta * (MyTAsin.asinDer1DivF1Tab[index]
+ delta * (MyTAsin.asinDer2DivF2Tab[index]
+ delta * (MyTAsin.asinDer3DivF3Tab[index]
+ delta * MyTAsin.asinDer4DivF4Tab[index])));
return negateResult ? -result : result;
} else if (USE_POWTABS_FOR_ASIN && (value <= ASIN_MAX_VALUE_FOR_POWTABS)) {
int index = (int)(powFast(value * ASIN_POWTABS_ONE_DIV_MAX_VALUE, ASIN_POWTABS_POWER) * ASIN_POWTABS_SIZE_MINUS_ONE + 0.5);
double delta = value - MyTAsinPow.asinParamPowTab[index];
double result = MyTAsinPow.asinPowTab[index]
+ delta * (MyTAsinPow.asinDer1DivF1PowTab[index]
+ delta * (MyTAsinPow.asinDer2DivF2PowTab[index]
+ delta * (MyTAsinPow.asinDer3DivF3PowTab[index]
+ delta * MyTAsinPow.asinDer4DivF4PowTab[index])));
return negateResult ? -result : result;
} else { // value > ASIN_MAX_VALUE_FOR_TABS, or value is NaN
// This part is derived from fdlibm.
if (value < 1.0) {
double t = (1.0 - value)*0.5;
double p = t*(ASIN_PS0+t*(ASIN_PS1+t*(ASIN_PS2+t*(ASIN_PS3+t*(ASIN_PS4+t*ASIN_PS5)))));
double q = 1.0+t*(ASIN_QS1+t*(ASIN_QS2+t*(ASIN_QS3+t*ASIN_QS4)));
double s = sqrt(t);
double z = s+s*(p/q);
double result = ASIN_PIO2_HI-((z+z)-ASIN_PIO2_LO);
return negateResult ? -result : result;
} else { // value >= 1.0, or value is NaN
if (value == 1.0) {
return negateResult ? -Math.PI/2 : Math.PI/2;
} else {
return Double.NaN;
}
}
}
}
/**
* If value is not NaN and is outside [-1,1] range, closest value in this range is used.
*
* @param value Value in [-1,1].
* @return Value arcsine, in radians, in [-PI/2,PI/2].
*/
public static double asinInRange(double value) {
if (value <= -1.0) {
return -Math.PI/2;
} else if (value >= 1.0) {
return Math.PI/2;
} else {
return asin(value);
}
}
/**
* @param value Value in [-1,1].
* @return Value arccosine, in radians, in [0,PI].
*/
public static double acos(double value) {
if (USE_JDK_MATH) {
return Math.acos(value);
}
return Math.PI/2 - asin(value);
}
/**
* If value is not NaN and is outside [-1,1] range,
* closest value in this range is used.
*
* @param value Value in [-1,1].
* @return Value arccosine, in radians, in [0,PI].
*/
public static double acosInRange(double value) {
if (value <= -1.0) {
return Math.PI;
} else if (value >= 1.0) {
return 0.0;
} else {
return acos(value);
}
}
/**
* @param value A double value.
* @return Value arctangent, in radians, in [-PI/2,PI/2].
*/
public static double atan(double value) {
if (USE_JDK_MATH) {
return Math.atan(value);
}
boolean negateResult = false;
if (value < 0.0) {
value = -value;
negateResult = true;
}
if (value == 1.0) {
// We want "exact" result for 1.0.
return negateResult ? -Math.PI/4 : Math.PI/4;
} else if (value <= ATAN_MAX_VALUE_FOR_TABS) {
int index = (int)(value * ATAN_INDEXER + 0.5);
double delta = value - index * ATAN_DELTA;
double result = MyTAtan.atanTab[index]
+ delta * (MyTAtan.atanDer1DivF1Tab[index]
+ delta * (MyTAtan.atanDer2DivF2Tab[index]
+ delta * (MyTAtan.atanDer3DivF3Tab[index]
+ delta * MyTAtan.atanDer4DivF4Tab[index])));
return negateResult ? -result : result;
} else { // value > ATAN_MAX_VALUE_FOR_TABS, or value is NaN
// This part is derived from fdlibm.
if (value < TWO_POW_66) {
double x = -1/value;
double x2 = x*x;
double x4 = x2*x2;
double s1 = x2*(ATAN_AT0+x4*(ATAN_AT2+x4*(ATAN_AT4+x4*(ATAN_AT6+x4*(ATAN_AT8+x4*ATAN_AT10)))));
double s2 = x4*(ATAN_AT1+x4*(ATAN_AT3+x4*(ATAN_AT5+x4*(ATAN_AT7+x4*ATAN_AT9))));
double result = ATAN_HI3-((x*(s1+s2)-ATAN_LO3)-x);
return negateResult ? -result : result;
} else { // value >= 2^66, or value is NaN
if (value != value) {
return Double.NaN;
} else {
return negateResult ? -Math.PI/2 : Math.PI/2;
}
}
}
}
/**
* For special values for which multiple conventions could be adopted,
* behaves like Math.atan2(double,double).
*
* @param y Coordinate on y axis.
* @param x Coordinate on x axis.
* @return Angle from x axis positive side to (x,y) position, in radians, in [-PI,PI].
* Angle measure is positive when going from x axis to y axis (positive sides).
*/
public static double atan2(double y, double x) {
if (USE_JDK_MATH) {
return Math.atan2(y,x);
}
/*
* Using sub-methods, to make method lighter for general case,
* and to avoid JIT-optimization crash on NaN.
*/
if (x > 0.0) {
if (y == 0.0) {
// +-0.0
return y;
}
if (x == Double.POSITIVE_INFINITY) {
return atan2_pinf_yyy(y);
} else {
return atan(y/x);
}
} else if (x < 0.0) {
if (y == 0.0) {
return signFromBit(y) * Math.PI;
}
if (x == Double.NEGATIVE_INFINITY) {
return atan2_ninf_yyy(y);
} else if (y > 0.0) {
return Math.PI/2 - atan(x/y);
} else if (y < 0.0) {
return -Math.PI/2 - atan(x/y);
} else {
return Double.NaN;
}
} else {
return atan2_yyy_zeroOrNaN(y, x);
}
}
/**
* Gives same result as Math.toRadians for some particular values
* like 90.0, 180.0 or 360.0, but is faster (no division).
*
* @param angdeg Angle value in degrees.
* @return Angle value in radians.
*/
public static double toRadians(double angdeg) {
if (USE_JDK_MATH) {
return Math.toRadians(angdeg);
}
return angdeg * (Math.PI/180);
}
/**
* Gives same result as Math.toDegrees for some particular values
* like Math.PI/2, Math.PI or 2*Math.PI, but is faster (no division).
*
* @param angrad Angle value in radians.
* @return Angle value in degrees.
*/
public static double toDegrees(double angrad) {
if (USE_JDK_MATH) {
return Math.toDegrees(angrad);
}
return angrad * (180/Math.PI);
}
/**
* @param sign Sign of the angle: true for positive, false for negative.
* @param degrees Degrees, in [0,180].
* @param minutes Minutes, in [0,59].
* @param seconds Seconds, in [0.0,60.0[.
* @return Angle in radians.
*/
public static double toRadians(boolean sign, int degrees, int minutes, double seconds) {
return toRadians(toDegrees(sign, degrees, minutes, seconds));
}
/**
* @param sign Sign of the angle: true for positive, false for negative.
* @param degrees Degrees, in [0,180].
* @param minutes Minutes, in [0,59].
* @param seconds Seconds, in [0.0,60.0[.
* @return Angle in degrees.
*/
public static double toDegrees(boolean sign, int degrees, int minutes, double seconds) {
double signFactor = sign ? 1.0 : -1.0;
return signFactor * (degrees + (1.0/60)*(minutes + (1.0/60)*seconds));
}
/**
* @param angrad Angle in radians.
* @param degrees (out) Degrees, in [0,180].
* @param minutes (out) Minutes, in [0,59].
* @param seconds (out) Seconds, in [0.0,60.0[.
* @return true if the resulting angle in [-180deg,180deg] is positive, false if it is negative.
*/
public static boolean toDMS(double angrad, IntWrapper degrees, IntWrapper minutes, DoubleWrapper seconds) {
// Computing longitude DMS.
double tmp = toDegrees(normalizeMinusPiPi(angrad));
boolean isNeg = (tmp < 0.0);
if (isNeg) {
tmp = -tmp;
}
degrees.value = (int)tmp;
tmp = (tmp-degrees.value)*60.0;
minutes.value = (int)tmp;
seconds.value = Math.min((tmp-minutes.value)*60.0,DOUBLE_BEFORE_60);
return !isNeg;
}
/**
* NB: Since 2*Math.PI < 2*PI, a span of 2*Math.PI does not mean full angular range.
* ex.: isInClockwiseDomain(0.0, 2*Math.PI, -1e-20) returns false.
* ---> For full angular range, use a span > 2*Math.PI, like 2*PI_SUP constant of this class.
*
* @param startAngRad An angle, in radians.
* @param angSpanRad An angular span, >= 0.0, in radians.
* @param angRad An angle, in radians.
* @return true if angRad is in the clockwise angular domain going from startAngRad, over angSpanRad,
* extremities included, false otherwise.
*/
public static boolean isInClockwiseDomain(double startAngRad, double angSpanRad, double angRad) {
if (Math.abs(angRad) < -TWO_MATH_PI_IN_MINUS_PI_PI) {
// special case for angular values of small magnitude
if (angSpanRad <= 2*Math.PI) {
if (angSpanRad < 0.0) {
// empty domain
return false;
}
// angSpanRad is in [0,2*PI]
startAngRad = normalizeMinusPiPi(startAngRad);
double endAngRad = normalizeMinusPiPi(startAngRad + angSpanRad);
if (startAngRad <= endAngRad) {
return (angRad >= startAngRad) && (angRad <= endAngRad);
} else {
return (angRad >= startAngRad) || (angRad <= endAngRad);
}
} else { // angSpanRad > 2*Math.PI, or is NaN
return (angSpanRad == angSpanRad);
}
} else {
// general case
return (normalizeZeroTwoPi(angRad - startAngRad) <= angSpanRad);
}
}
/*
* hyperbolic trigonometry
*/
/**
* Some properties of sinh(x) = (exp(x)-exp(-x))/2:
* 1) defined on ]-Infinity,+Infinity[
* 2) result in ]-Infinity,+Infinity[
* 3) sinh(x) = -sinh(-x) (implies sinh(0) = 0)
* 4) sinh(epsilon) ~= epsilon
* 5) lim(sinh(x),x->+Infinity) = +Infinity
* (y increasing exponentially faster than x)
* 6) reaches +Infinity (double overflow) for x >= 710.475860073944,
* i.e. a bit further than exp(x)
*
* @param value A double value.
* @return Value hyperbolic sine.
*/
public static double sinh(double value) {
if (USE_JDK_MATH) {
return Math.sinh(value);
}
// sinh(x) = (exp(x)-exp(-x))/2
double h;
if (value < 0.0) {
value = -value;
h = -0.5;
} else {
h = 0.5;
}
if (value < 22.0) {
if (value < TWO_POW_N28) {
return (h < 0.0) ? -value : value;
} else {
// sinh(x)
// = (exp(x)-exp(-x))/2
// = (exp(x)-1/exp(x))/2
// = (expm1(x) + 1 - 1/(expm1(x)+1))/2
// = (expm1(x) + (expm1(x)+1)/(expm1(x)+1) - 1/(expm1(x)+1))/2
// = (expm1(x) + expm1(x)/(expm1(x)+1))/2
double t = expm1(value);
// Might be more accurate, if value < 1: return h*((t+t)-t*t/(t+1.0)).
return h * (t + t/(t+1.0));
}
} else if (value < LOG_DOUBLE_MAX_VALUE) {
return h * exp(value);
} else {
double t = exp(value*0.5);
return (h*t)*t;
}
}
/**
* Some properties of cosh(x) = (exp(x)+exp(-x))/2:
* 1) defined on ]-Infinity,+Infinity[
* 2) result in [1,+Infinity[
* 3) cosh(0) = 1
* 4) cosh(x) = cosh(-x)
* 5) lim(cosh(x),x->+Infinity) = +Infinity
* (y increasing exponentially faster than x)
* 6) reaches +Infinity (double overflow) for x >= 710.475860073944,
* i.e. a bit further than exp(x)
*
* @param value A double value.
* @return Value hyperbolic cosine.
*/
public static double cosh(double value) {
if (USE_JDK_MATH) {
return Math.cosh(value);
}
// cosh(x) = (exp(x)+exp(-x))/2
if (value < 0.0) {
value = -value;
}
if (value < LOG_TWO_POW_27) {
if (value < TWO_POW_N27) {
// cosh(x)
// = (exp(x)+exp(-x))/2
// = ((1+x+x^2/2!+...) + (1-x+x^2/2!-...))/2
// = 1+x^2/2!+x^4/4!+...
// For value of x small in magnitude, the sum of the terms does not add to 1.
return 1;
} else {
// cosh(x)
// = (exp(x)+exp(-x))/2
// = (exp(x)+1/exp(x))/2
double t = exp(value);
return 0.5 * (t+1/t);
}
} else if (value < LOG_DOUBLE_MAX_VALUE) {
return 0.5 * exp(value);
} else {
double t = exp(value*0.5);
return (0.5*t)*t;
}
}
/**
* Much more accurate than cosh(value)-1,
* for arguments (and results) close to zero.
*
* coshm1(-0.0) = -0.0, for homogeneity with
* acosh1p(-0.0) = -0.0.
*
* @param value A double value.
* @return Value hyperbolic cosine, minus 1.
*/
public static double coshm1(double value) {
// cosh(x)-1 = (exp(x)+exp(-x))/2 - 1
if (value < 0.0) {
value = -value;
}
if (value < LOG_TWO_POW_27) {
if (value < TWO_POW_N27) {
if (value == 0.0) {
// +-0.0
return value;
}
// Using (expm1(x)+expm1(-x))/2
// is not accurate for tiny values,
// for expm1 results are of higher
// magnitude than the result and
// of different signs, such as their
// sum is not accurate.
// cosh(x) - 1
// = (exp(x)+exp(-x))/2 - 1
// = ((1+x+x^2/2!+...) + (1-x+x^2/2!-...))/2 - 1
// = x^2/2!+x^4/4!+...
// ~= x^2 * (1/2 + x^2 * 1/24)
// = x^2 * 0.5 (since x < 2^-27)
return 0.5 * value*value;
} else {
// cosh(x) - 1
// = (exp(x)+exp(-x))/2 - 1
// = (exp(x)-1+exp(-x)-1)/2
// = (expm1(x)+expm1(-x))/2
return 0.5 * (expm1(value)+expm1(-value));
}
} else if (value < LOG_DOUBLE_MAX_VALUE) {
return 0.5 * exp(value) - 1.0;
} else {
// No need to subtract 1 from result.
double t = exp(value*0.5);
return (0.5*t)*t;
}
}
/**
* Computes hyperbolic sine and hyperbolic cosine together.
*
* @param value A double value.
* @param hcosine (out) Value hyperbolic cosine.
* @return Value hyperbolic sine.
*/
public static double sinhAndCosh(double value, DoubleWrapper hcosine) {
if (USE_JDK_MATH) {
hcosine.value = Math.cosh(value);
return Math.sinh(value);
}
// Mixup of sinh and cosh treatments: if you modify them,
// you might want to also modify this.
double h;
if (value < 0.0) {
value = -value;
h = -0.5;
} else {
h = 0.5;
}
final double hsine;
// LOG_TWO_POW_27 = 18.714973875118524
if (value < LOG_TWO_POW_27) { // test from cosh
// sinh
if (value < TWO_POW_N28) {
hsine = (h < 0.0) ? -value : value;
} else {
double t = expm1(value);
hsine = h * (t + t/(t+1.0));
}
// cosh
if (value < TWO_POW_N27) {
hcosine.value = 1;
} else {
double t = exp(value);
hcosine.value = 0.5 * (t+1/t);
}
} else if (value < 22.0) { // test from sinh
// Here, value is in [18.714973875118524,22.0[.
double t = expm1(value);
hsine = h * (t + t/(t+1.0));
hcosine.value = 0.5 * (t+1.0);
} else {
if (value < LOG_DOUBLE_MAX_VALUE) {
hsine = h * exp(value);
} else {
double t = exp(value*0.5);
hsine = (h*t)*t;
}
hcosine.value = Math.abs(hsine);
}
return hsine;
}
/**
* Some properties of tanh(x) = sinh(x)/cosh(x) = (exp(2*x)-1)/(exp(2*x)+1):
* 1) defined on ]-Infinity,+Infinity[
* 2) result in ]-1,1[
* 3) tanh(x) = -tanh(-x) (implies tanh(0) = 0)
* 4) tanh(epsilon) ~= epsilon
* 5) lim(tanh(x),x->+Infinity) = 1
* 6) reaches 1 (double loss of precision) for x = 19.061547465398498
*
* @param value A double value.
* @return Value hyperbolic tangent.
*/
public static double tanh(double value) {
if (USE_JDK_MATH) {
return Math.tanh(value);
}
// tanh(x) = sinh(x)/cosh(x)
// = (exp(x)-exp(-x))/(exp(x)+exp(-x))
// = (exp(2*x)-1)/(exp(2*x)+1)
boolean negateResult = false;
if (value < 0.0) {
value = -value;
negateResult = true;
}
double z;
if (value < TANH_1_THRESHOLD) {
if (value < TWO_POW_N55) {
return negateResult ? -value*(1.0-value) : value*(1.0+value);
} else if (value >= 1) {
z = 1.0-2.0/(expm1(value+value)+2.0);
} else {
double t = expm1(-(value+value));
z = -t/(t+2.0);
}
} else {
z = (value != value) ? Double.NaN : 1.0;
}
return negateResult ? -z : z;
}
/**
* Some properties of asinh(x) = log(x + sqrt(x^2 + 1))
* 1) defined on ]-Infinity,+Infinity[
* 2) result in ]-Infinity,+Infinity[
* 3) asinh(x) = -asinh(-x) (implies asinh(0) = 0)
* 4) asinh(epsilon) ~= epsilon
* 5) lim(asinh(x),x->+Infinity) = +Infinity
* (y increasing logarithmically slower than x)
*
* @param value A double value.
* @return Value hyperbolic arcsine.
*/
public static double asinh(double value) {
// asinh(x) = log(x + sqrt(x^2 + 1))
boolean negateResult = false;
if (value < 0.0) {
value = -value;
negateResult = true;
}
double result;
// (about) smallest possible for
// non-log1p case to be accurate.
if (value < ASINH_LOG1P_THRESHOLD) {
// Around this range, FDLIBM uses
// log1p(value+value*value/(1+sqrt(value*value+1))),
// but it's slower, so we don't use it.
/*
* If x is close to zero, log argument is close to 1,
* so to avoid precision loss we use log1p(double),
* with
* (1+x)^p = 1 + p * x + (p*(p-1))/2! * x^2 + (p*(p-1)*(p-2))/3! * x^3 + ...
* (1+x)^p = 1 + p * x * (1 + (p-1)/2 * x * (1 + (p-2)/3 * x + ...)
* (1+x)^0.5 = 1 + 0.5 * x * (1 + (0.5-1)/2 * x * (1 + (0.5-2)/3 * x + ...)
* (1+x^2)^0.5 = 1 + 0.5 * x^2 * (1 + (0.5-1)/2 * x^2 * (1 + (0.5-2)/3 * x^2 + ...)
* x + (1+x^2)^0.5 = 1 + x * (1 + 0.5 * x * (1 + (0.5-1)/2 * x^2 * (1 + (0.5-2)/3 * x^2 + ...))
* so
* asinh(x) = log1p(x * (1 + 0.5 * x * (1 + (0.5-1)/2 * x^2 * (1 + (0.5-2)/3 * x^2 + ...)))
*/
final double x = value;
final double x2 = x*x;
// Enough terms for good accuracy,
// given our threshold.
final double argLog1p = (x *
(1 + 0.5 * x
* (1 + (0.5-1)/2 * x2
* (1 + (0.5-2)/3 * x2
* (1 + (0.5-3)/4 * x2
* (1 + (0.5-4)/5 * x2
))))));
result = log1p(argLog1p);
} else if (value < ASINH_ACOSH_SQRT_ELISION_THRESHOLD) {
// Around this range, FDLIBM uses
// log(2*value+1/(value+sqrt(value*value+1))),
// but it involves an additional division
// so we don't use it.
result = log(value + sqrt(value*value + 1.0));
} else {
// log(2*value) would overflow for value > Double.MAX_VALUE/2,
// so we compute otherwise.
result = LOG_2 + log(value);
}
return negateResult ? -result : result;
}
/**
* Some properties of acosh(x) = log(x + sqrt(x^2 - 1)):
* 1) defined on [1,+Infinity[
* 2) result in ]0,+Infinity[ (by convention, since cosh(x) = cosh(-x))
* 3) acosh(1) = 0
* 4) acosh(1+epsilon) ~= log(1 + sqrt(2*epsilon)) ~= sqrt(2*epsilon)
* 5) lim(acosh(x),x->+Infinity) = +Infinity
* (y increasing logarithmically slower than x)
*
* @param value A double value.
* @return Value hyperbolic arccosine.
*/
public static double acosh(double value) {
if (!(value > 1.0)) {
// NaN, or value <= 1
if (ANTI_JIT_OPTIM_CRASH_ON_NAN) {
return (value < 1.0) ? Double.NaN : value - 1.0;
} else {
return (value == 1.0) ? 0.0 : Double.NaN;
}
}
double result;
if (value < ASINH_ACOSH_SQRT_ELISION_THRESHOLD) {
// Around this range, FDLIBM uses
// log(2*value-1/(value+sqrt(value*value-1))),
// but it involves an additional division
// so we don't use it.
result = log(value + sqrt(value*value - 1.0));
} else {
// log(2*value) would overflow for value > Double.MAX_VALUE/2,
// so we compute otherwise.
result = LOG_2 + log(value);
}
return result;
}
/**
* Much more accurate than acosh(1+value),
* for arguments (and results) close to zero.
*
* acosh1p(-0.0) = -0.0, for homogeneity with
* sqrt(-0.0) = -0.0, which looks about the same
* near 0.
*
* @param value A double value.
* @return Hyperbolic arccosine of (1+value).
*/
public static double acosh1p(double value) {
if (!(value > 0.0)) {
// NaN, or value <= 0.
// If value is -0.0, returning -0.0.
if (ANTI_JIT_OPTIM_CRASH_ON_NAN) {
return (value < 0.0) ? Double.NaN : value;
} else {
return (value == 0.0) ? value : Double.NaN;
}
}
double result;
if (value < (ASINH_ACOSH_SQRT_ELISION_THRESHOLD-1)) {
// acosh(1+x)
// = log((1+x) + sqrt((1+x)^2 - 1))
// = log(1 + x + sqrt(1 + 2*x + x^2 - 1))
// = log1p(x + sqrt(2*x + x^2))
// = log1p(x + sqrt(x * (2 + x))
result = log1p(value + sqrt(value * (2 + value)));
} else {
result = LOG_2 + log(1+value);
}
return result;
}
/**
* Some properties of atanh(x) = log((1+x)/(1-x))/2:
* 1) defined on ]-1,1[
* 2) result in ]-Infinity,+Infinity[
* 3) atanh(-1) = -Infinity (by continuity)
* 4) atanh(1) = +Infinity (by continuity)
* 5) atanh(epsilon) ~= epsilon
* 6) lim(atanh(x),x->1) = +Infinity
*
* @param value A double value.
* @return Value hyperbolic arctangent.
*/
public static double atanh(double value) {
boolean negateResult = false;
if (value < 0.0) {
value = -value;
negateResult = true;
}
double result;
if (!(value < 1.0)) {
// NaN, or value >= 1
if (ANTI_JIT_OPTIM_CRASH_ON_NAN) {
result = (value > 1.0) ? Double.NaN : Double.POSITIVE_INFINITY + value;
} else {
result = (value == 1.0) ? Double.POSITIVE_INFINITY : Double.NaN;
}
} else {
// For value < 0.5, FDLIBM uses
// 0.5 * log1p((value+value) + (value+value)*value/(1-value)),
// instead, but this is good enough for us.
// atanh(x)
// = log((1+x)/(1-x))/2
// = log((1-x+2x)/(1-x))/2
// = log1p(2x/(1-x))/2
result = 0.5 * log1p((value+value)/(1.0-value));
}
return negateResult ? -result : result;
}
/*
* exponentials
*/
/**
* @param value A double value.
* @return e^value.
*/
public static double exp(double value) {
if (USE_JDK_MATH) {
return Math.exp(value);
}
// exp(x) = exp([x])*exp(y)
// with [x] the integer part of x, and y = x-[x]
// ===>
// We find an approximation of y, called z.
// ===>
// exp(x) = exp([x])*(exp(z)*exp(epsilon))
// with epsilon = y - z
// ===>
// We have exp([x]) and exp(z) pre-computed in tables, we "just" have to compute exp(epsilon).
//
// We use the same indexing (cast to int) to compute x integer part and the
// table index corresponding to z, to avoid two int casts.
// Also, to optimize index multiplication and division, we use powers of two,
// so that we can do it with bits shifts.
if (value > EXP_OVERFLOW_LIMIT) {
return Double.POSITIVE_INFINITY;
} else if (!(value >= EXP_UNDERFLOW_LIMIT)) {
return (value != value) ? Double.NaN : 0.0;
}
final int indexes = (int)(value*EXP_LO_INDEXING);
final int valueInt;
if (indexes >= 0) {
valueInt = (indexes>>EXP_LO_INDEXING_DIV_SHIFT);
} else {
valueInt = -((-indexes)>>EXP_LO_INDEXING_DIV_SHIFT);
}
final double hiTerm = MyTExp.expHiTab[valueInt-(int)EXP_UNDERFLOW_LIMIT];
final int zIndex = indexes - (valueInt<<EXP_LO_INDEXING_DIV_SHIFT);
final double y = (value-valueInt);
final double z = zIndex*(1.0/EXP_LO_INDEXING);
final double eps = y-z;
final double expZ = MyTExp.expLoPosTab[zIndex+EXP_LO_TAB_MID_INDEX];
final double expEps = (1+eps*(1+eps*(1.0/2+eps*(1.0/6+eps*(1.0/24)))));
final double loTerm = expZ * expEps;
return hiTerm * loTerm;
}
/**
* Quick exp, with a max relative error of about 2.94e-2 for |value| < 700.0 or so,
* and no accuracy at all outside this range.
* Derived from a note by Nicol N. Schraudolph, IDSIA, 1998.
*
* @param value A double value.
* @return e^value.
*/
public static double expQuick(double value) {
if (USE_JDK_MATH) {
return Math.exp(value);
}
/*
* Cast of double values, even in long range, into long, is slower than
* from double to int for values in int range, and then from int to long.
* For that reason, we only work with integer values in int range
* (corresponding to the 32 first bits of the long, containing sign,
* exponent, and highest significant bits of double's mantissa),
* and cast twice.
*
* Constants determined empirically, using a random-based metaheuristic.
* Should be possible to find better ones.
*/
return Double.longBitsToDouble(((long)(int)(1512775.3952 * value + 1.0726481222E9))<<32);
}
/**
* Much more accurate than exp(value)-1,
* for arguments (and results) close to zero.
*
* @param value A double value.
* @return e^value-1.
*/
public static double expm1(double value) {
if (USE_JDK_MATH) {
return Math.expm1(value);
}
// If value is far from zero, we use exp(value)-1.
//
// If value is close to zero, we use the following formula:
// exp(value)-1
// = exp(valueApprox)*exp(epsilon)-1
// = exp(valueApprox)*(exp(epsilon)-exp(-valueApprox))
// = exp(valueApprox)*(1+epsilon+epsilon^2/2!+...-exp(-valueApprox))
// = exp(valueApprox)*((1-exp(-valueApprox))+epsilon+epsilon^2/2!+...)
// exp(valueApprox) and exp(-valueApprox) being stored in tables.
if (Math.abs(value) < EXP_LO_DISTANCE_TO_ZERO) {
// Taking int part instead of rounding, which takes too long.
int i = (int)(value*EXP_LO_INDEXING);
double delta = value-i*(1.0/EXP_LO_INDEXING);
return MyTExp.expLoPosTab[i+EXP_LO_TAB_MID_INDEX]*(MyTExp.expLoNegTab[i+EXP_LO_TAB_MID_INDEX]+delta*(1+delta*(1.0/2+delta*(1.0/6+delta*(1.0/24+delta*(1.0/120))))));
} else {
return exp(value)-1;
}
}
/*
* logarithms
*/
/**
* @param value A double value.
* @return Value logarithm (base e).
*/
public static double log(double value) {
if (USE_JDK_MATH || (!USE_REDEFINED_LOG)) {
return Math.log(value);
}
if (value > 0.0) {
if (value == Double.POSITIVE_INFINITY) {
return Double.POSITIVE_INFINITY;
}
// For normal values not close to 1.0, we use the following formula:
// log(value)
// = log(2^exponent*1.mantissa)
// = log(2^exponent) + log(1.mantissa)
// = exponent * log(2) + log(1.mantissa)
// = exponent * log(2) + log(1.mantissaApprox) + log(1.mantissa/1.mantissaApprox)
// = exponent * log(2) + log(1.mantissaApprox) + log(1+epsilon)
// = exponent * log(2) + log(1.mantissaApprox) + epsilon-epsilon^2/2+epsilon^3/3-epsilon^4/4+...
// with:
// 1.mantissaApprox <= 1.mantissa,
// log(1.mantissaApprox) in table,
// epsilon = (1.mantissa/1.mantissaApprox)-1
//
// To avoid bad relative error for small results,
// values close to 1.0 are treated aside, with the formula:
// log(x) = z*(2+z^2*((2.0/3)+z^2*((2.0/5))+z^2*((2.0/7))+...)))
// with z=(x-1)/(x+1)
double h;
if (value > 0.95) {
if (value < 1.14) {
double z = (value-1.0)/(value+1.0);
double z2 = z*z;
return z*(2+z2*((2.0/3)+z2*((2.0/5)+z2*((2.0/7)+z2*((2.0/9)+z2*((2.0/11)))))));
}
h = 0.0;
} else if (value < DOUBLE_MIN_NORMAL) {
// Ensuring value is normal.
value *= TWO_POW_52;
// log(x*2^52)
// = log(x)-ln(2^52)
// = log(x)-52*ln(2)
h = -52*LOG_2;
} else {
h = 0.0;
}
int valueBitsHi = (int)(Double.doubleToRawLongBits(value)>>32);
int valueExp = (valueBitsHi>>20)-MAX_DOUBLE_EXPONENT;
// Getting the first LOG_BITS bits of the mantissa.
int xIndex = ((valueBitsHi<<12)>>>(32-LOG_BITS));
// 1.mantissa/1.mantissaApprox - 1
double z = (value * twoPowNormalOrSubnormal(-valueExp)) * MyTLog.logXInvTab[xIndex] - 1;
z *= (1-z*((1.0/2)-z*((1.0/3))));
return h + valueExp * LOG_2 + (MyTLog.logXLogTab[xIndex] + z);
} else if (value == 0.0) {
return Double.NEGATIVE_INFINITY;
} else { // value < 0.0, or value is NaN
return Double.NaN;
}
}
/**
* Quick log, with a max relative error of about 1.9e-3
* for values in ]Double.MIN_NORMAL,+Infinity[, and
* worse accuracy outside this range.
*
* @param value A double value, in ]0,+Infinity[ (strictly positive and finite).
* @return Value logarithm (base e).
*/
public static double logQuick(double value) {
if (USE_JDK_MATH) {
return Math.log(value);
}
/*
* Inverse of Schraudolph's method for exp, is very inaccurate near 1,
* and not that fast (even using floats), especially with added if's
* to deal with values near 1, so we don't use it, and use a simplified
* version of our log's redefined algorithm.
*/
// Simplified version of log's redefined algorithm:
// log(value) ~= exponent * log(2) + log(1.mantissaApprox)
double h;
if (value > 0.87) {
if (value < 1.16) {
return 2.0 * (value-1.0)/(value+1.0);
}
h = 0.0;
} else if (value < DOUBLE_MIN_NORMAL) {
value *= TWO_POW_52;
h = -52*LOG_2;
} else {
h = 0.0;
}
int valueBitsHi = (int)(Double.doubleToRawLongBits(value)>>32);
int valueExp = (valueBitsHi>>20)-MAX_DOUBLE_EXPONENT;
int xIndex = ((valueBitsHi<<12)>>>(32-LOG_BITS));
return h + valueExp * LOG_2 + MyTLog.logXLogTab[xIndex];
}
/**
* @param value A double value.
* @return Value logarithm (base 10).
*/
public static double log10(double value) {
if (USE_JDK_MATH || (!USE_REDEFINED_LOG)) {
return Math.log10(value);
}
// INV_LOG_10 is < 1, but there is no risk of log(double)
// overflow (positive or negative) while the end result shouldn't,
// since log(Double.MIN_VALUE) and log(Double.MAX_VALUE) have
// magnitudes of just a few hundreds.
return log(value) * INV_LOG_10;
}
/**
* Much more accurate than log(1+value),
* for arguments (and results) close to zero.
*
* @param value A double value.
* @return Logarithm (base e) of (1+value).
*/
public static double log1p(double value) {
if (USE_JDK_MATH) {
return Math.log1p(value);
}
if (false) {
// This also works. Simpler but a bit slower.
if (value == Double.POSITIVE_INFINITY) {
return Double.POSITIVE_INFINITY;
}
double valuePlusOne = 1+value;
if (valuePlusOne == 1.0) {
return value;
} else {
return log(valuePlusOne)*(value/(valuePlusOne-1.0));
}
}
if (value > -1.0) {
if (value == Double.POSITIVE_INFINITY) {
return Double.POSITIVE_INFINITY;
}
// ln'(x) = 1/x
// so
// log(x+epsilon) ~= log(x) + epsilon/x
//
// Let u be 1+value rounded:
// 1+value = u+epsilon
//
// log(1+value)
// = log(u+epsilon)
// ~= log(u) + epsilon/value
// We compute log(u) as done in log(double), and then add the corrective term.
double valuePlusOne = 1.0+value;
if (valuePlusOne == 1.0) {
return value;
} else if (Math.abs(value) < 0.15) {
double z = value/(value+2.0);
double z2 = z*z;
return z*(2+z2*((2.0/3)+z2*((2.0/5)+z2*((2.0/7)+z2*((2.0/9)+z2*((2.0/11)))))));
}
int valuePlusOneBitsHi = (int)(Double.doubleToRawLongBits(valuePlusOne)>>32) & 0x7FFFFFFF;
int valuePlusOneExp = (valuePlusOneBitsHi>>20)-MAX_DOUBLE_EXPONENT;
// Getting the first LOG_BITS bits of the mantissa.
int xIndex = ((valuePlusOneBitsHi<<12)>>>(32-LOG_BITS));
// 1.mantissa/1.mantissaApprox - 1
double z = (valuePlusOne * twoPowNormalOrSubnormal(-valuePlusOneExp)) * MyTLog.logXInvTab[xIndex] - 1;
z *= (1-z*((1.0/2)-z*(1.0/3)));
// Adding epsilon/valuePlusOne to z,
// with
// epsilon = value - (valuePlusOne-1)
// (valuePlusOne + epsilon ~= 1+value (not rounded))
return valuePlusOneExp * LOG_2 + MyTLog.logXLogTab[xIndex] + (z + (value - (valuePlusOne-1))/valuePlusOne);
} else if (value == -1.0) {
return Double.NEGATIVE_INFINITY;
} else { // value < -1.0, or value is NaN
return Double.NaN;
}
}
/*
* powers
*/
/**
* 1e-13ish accuracy or better on whole double range.
*
* @param value A double value.
* @param power A power.
* @return value^power.
*/
public static double pow(double value, double power) {
if (USE_JDK_MATH) {
return Math.pow(value,power);
}
if (power == 0.0) {
return 1.0;
} else if (power == 1.0) {
return value;
}
if (value <= 0.0) {
// powerInfo: 0 if not integer, 1 if even integer, -1 if odd integer
int powerInfo;
if (Math.abs(power) >= (TWO_POW_52*2)) {
// The binary digit just before comma is outside mantissa,
// thus it is always 0: power is an even integer.
powerInfo = 1;
} else {
// If power's magnitude permits, we cast into int instead of into long,
// as it is faster.
if (Math.abs(power) <= (double)Integer.MAX_VALUE) {
int powerAsInt = (int)power;
if (power == (double)powerAsInt) {
powerInfo = ((powerAsInt & 1) == 0) ? 1 : -1;
} else { // power is not an integer (and not NaN, due to test against Integer.MAX_VALUE)
powerInfo = 0;
}
} else {
long powerAsLong = (long)power;
if (power == (double)powerAsLong) {
powerInfo = ((powerAsLong & 1) == 0) ? 1 : -1;
} else { // power is not an integer, or is NaN
if (power != power) {
return Double.NaN;
}
powerInfo = 0;
}
}
}
if (value == 0.0) {
if (power < 0.0) {
return (powerInfo < 0) ? 1/value : Double.POSITIVE_INFINITY;
} else { // power > 0.0 (0 and NaN cases already treated)
return (powerInfo < 0) ? value : 0.0;
}
} else { // value < 0.0
if (value == Double.NEGATIVE_INFINITY) {
if (powerInfo < 0) { // power odd integer
return (power < 0.0) ? -0.0 : Double.NEGATIVE_INFINITY;
} else { // power even integer, or not an integer
return (power < 0.0) ? 0.0 : Double.POSITIVE_INFINITY;
}
} else {
return (powerInfo == 0) ? Double.NaN : powerInfo * exp(power*log(-value));
}
}
} else { // value > 0.0, or value is NaN
return exp(power*log(value));
}
}
/**
* Quick pow, with a max relative error of about 1e-2
* for value >= Double.MIN_NORMAL and 1e-10 < |value^power| < 1e10,
* of about 6e-2 for value >= Double.MIN_NORMAL and 1e-40 < |value^power| < 1e40,
* and worse accuracy otherwise.
*
* @param value A double value, in ]0,+Infinity[ (strictly positive and finite).
* @param power A double value.
* @return value^power.
*/
public static double powQuick(double value, double power) {
if (USE_JDK_MATH) {
return Math.pow(value,power);
}
return exp(power*logQuick(value));
}
/**
* This treatment is somehow accurate for low values of |power|,
* and for |power*getExponent(value)| < 1023 or so (to stay away
* from double extreme magnitudes (large and small)).
*
* @param value A double value.
* @param power A power.
* @return value^power.
*/
public static double powFast(double value, int power) {
if (USE_JDK_MATH) {
return Math.pow(value,power);
}
if (power < 3) {
if (power < 0) {
// Opposite of Integer.MIN_VALUE does not exist as int.
if (power == Integer.MIN_VALUE) {
// Integer.MAX_VALUE = -(power+1)
return 1.0/(powFast(value,Integer.MAX_VALUE) * value);
} else {
return 1.0/powFast(value,-power);
}
} else {
// Here, power is in [0,2].
if (power == 2) { // Most common case first.
return value * value;
} else if (power == 0) {
return 1.0;
} else { // power == 1
return value;
}
}
} else { // power >= 4
double oddRemains = 1.0;
// If power <= 5, faster to finish outside the loop.
while (power > 5) {
// Test if power is odd.
if ((power & 1) != 0) {
oddRemains *= value;
}
value *= value;
power >>= 1; // power = power / 2
}
// Here, power is in [3,5].
if (power == 3) {
return oddRemains * value * value * value;
} else { // power in [4,5].
double v2 = value * value;
if (power == 4) {
return oddRemains * v2 * v2;
} else { // power == 5
return oddRemains * v2 * v2 * value;
}
}
}
}
/**
* @param value A float value.
* @return value*value.
*/
public static float pow2(float value) {
return value*value;
}
/**
* @param value A double value.
* @return value*value.
*/
public static double pow2(double value) {
return value*value;
}
/**
* @param value A float value.
* @return value*value*value.
*/
public static float pow3(float value) {
return value*value*value;
}
/**
* @param value A double value.
* @return value*value*value.
*/
public static double pow3(double value) {
return value*value*value;
}
/*
* roots
*/
/**
* @param value A double value.
* @return Value square root.
*/
public static double sqrt(double value) {
if (USE_JDK_MATH || (!USE_REDEFINED_SQRT)) {
return Math.sqrt(value);
}
// See cbrt for comments, sqrt uses the same ideas.
if (!(value > 0.0)) { // value <= 0.0, or value is NaN
if (ANTI_JIT_OPTIM_CRASH_ON_NAN) {
return (value < 0.0) ? Double.NaN : value;
} else {
return (value == 0.0) ? value : Double.NaN;
}
} else if (value == Double.POSITIVE_INFINITY) {
return Double.POSITIVE_INFINITY;
}
double h;
if (value < DOUBLE_MIN_NORMAL) {
value *= TWO_POW_52;
h = 2*TWO_POW_N26;
} else {
h = 2.0;
}
int valueBitsHi = (int)(Double.doubleToRawLongBits(value)>>32);
int valueExponentIndex = (valueBitsHi>>20)+(-MAX_DOUBLE_EXPONENT-MIN_DOUBLE_EXPONENT);
int xIndex = ((valueBitsHi<<12)>>>(32-SQRT_LO_BITS));
double result = MyTSqrt.sqrtXSqrtHiTab[valueExponentIndex] * MyTSqrt.sqrtXSqrtLoTab[xIndex];
double slope = MyTSqrt.sqrtSlopeHiTab[valueExponentIndex] * MyTSqrt.sqrtSlopeLoTab[xIndex];
value *= 0.25;
result += (value - result * result) * slope;
result += (value - result * result) * slope;
return h*(result + (value - result * result) * slope);
}
/**
* Quick sqrt, with with a max relative error of about 3.41e-2
* for values in [Double.MIN_NORMAL,Double.MAX_VALUE], and worse
* accuracy outside this range.
*
* @param value A double value.
* @return Value square root.
*/
public static double sqrtQuick(double value) {
if (USE_JDK_MATH) {
return Math.sqrt(value);
}
final long bits = Double.doubleToRawLongBits(value);
/*
* Constant determined empirically, using a random-based metaheuristic.
* Should be possible to find a better one.
*/
return Double.longBitsToDouble((bits+4606859074900000000L)>>>1);
}
/**
* Quick inverse of square root, with a max relative error of about 3.44e-2
* for values in [Double.MIN_NORMAL,Double.MAX_VALUE], and worse accuracy
* outside this range.
*
* This implementation uses zero step of Newton's method.
* Here are the max relative errors on [Double.MIN_NORMAL,Double.MAX_VALUE]
* depending on number of steps, if you want to copy-paste this code
* and use your own number:
* n=0: about 3.44e-2
* n=1: about 1.75e-3
* n=2: about 4.6e-6
* n=3: about 3.17e-11
* n=4: about 3.92e-16
* n=5: about 3.03e-16
*
* @param value A double value.
* @return Inverse of value square root.
*/
public static double invSqrtQuick(double value) {
if (USE_JDK_MATH) {
return 1/Math.sqrt(value);
}
/*
* http://en.wikipedia.org/wiki/Fast_inverse_square_root
*/
if (false) {
// With one Newton step (much slower than
// 1/Math.sqrt(double) if not optimized).
final double halfInitial = value * 0.5;
long bits = Double.doubleToRawLongBits(value);
// If n=0, 6910474759270000000L might be better (3.38e-2 max relative error).
bits = 0x5FE6EB50C7B537A9L - (bits>>1);
value = Double.longBitsToDouble(bits);
value = value * (1.5 - halfInitial * value * value); // Newton step, can repeat.
return value;
} else {
return Double.longBitsToDouble(0x5FE6EB50C7B537A9L - (Double.doubleToRawLongBits(value)>>1));
}
}
/**
* @param value A double value.
* @return Value cubic root.
*/
public static double cbrt(double value) {
if (USE_JDK_MATH) {
return Math.cbrt(value);
}
double h;
if (value < 0.0) {
if (value == Double.NEGATIVE_INFINITY) {
return Double.NEGATIVE_INFINITY;
}
value = -value;
// Making sure value is normal.
if (value < DOUBLE_MIN_NORMAL) {
value *= (TWO_POW_52*TWO_POW_26);
// h = <result_sign> * <result_multiplicator_to_avoid_overflow> / <cbrt(value_multiplicator_to_avoid_subnormal)>
h = -2*TWO_POW_N26;
} else {
h = -2.0;
}
} else {
if (!(value < Double.POSITIVE_INFINITY)) { // value is +Infinity, or value is NaN
return value;
}
// Making sure value is normal.
if (value < DOUBLE_MIN_NORMAL) {
if (value == 0.0) {
// cbrt(0.0) = 0.0, cbrt(-0.0) = -0.0
return value;
}
value *= (TWO_POW_52*TWO_POW_26);
h = 2*TWO_POW_N26;
} else {
h = 2.0;
}
}
// Normal value is (2^<value exponent> * <a value in [1,2[>).
// First member cubic root is computed, and multiplied with an approximation
// of the cubic root of the second member, to end up with a good guess of
// the result before using Newton's (or Archimedes's) method.
// To compute the cubic root approximation, we use the formula "cbrt(value) = cbrt(x) * cbrt(value/x)",
// choosing x as close to value as possible but inferior to it, so that cbrt(value/x) is close to 1
// (we could iterate on this method, using value/x as new value for each iteration,
// but finishing with Newton's method is faster).
// Shift and cast into an int, which overall is faster than working with a long.
int valueBitsHi = (int)(Double.doubleToRawLongBits(value)>>32);
int valueExponentIndex = (valueBitsHi>>20)+(-MAX_DOUBLE_EXPONENT-MIN_DOUBLE_EXPONENT);
// Getting the first CBRT_LO_BITS bits of the mantissa.
int xIndex = ((valueBitsHi<<12)>>>(32-CBRT_LO_BITS));
double result = MyTCbrt.cbrtXCbrtHiTab[valueExponentIndex] * MyTCbrt.cbrtXCbrtLoTab[xIndex];
double slope = MyTCbrt.cbrtSlopeHiTab[valueExponentIndex] * MyTCbrt.cbrtSlopeLoTab[xIndex];
// Lowering values to avoid overflows when using Newton's method
// (we will then just have to return twice the result).
// result^3 = value
// (result/2)^3 = value/8
value *= 0.125;
// No need to divide result here, as division is factorized in result computation tables.
// result *= 0.5;
// Newton's method, looking for y = x^(1/p):
// y(n) = y(n-1) + (x-y(n-1)^p) * slope(y(n-1))
// y(n) = y(n-1) + (x-y(n-1)^p) * (1/p)*(x(n-1)^(1/p-1))
// y(n) = y(n-1) + (x-y(n-1)^p) * (1/p)*(x(n-1)^((1-p)/p))
// with x(n-1)=y(n-1)^p, i.e.:
// y(n) = y(n-1) + (x-y(n-1)^p) * (1/p)*(y(n-1)^(1-p))
//
// For p=3:
// y(n) = y(n-1) + (x-y(n-1)^3) * (1/(3*y(n-1)^2))
// To save time, we don't recompute the slope between Newton's method steps,
// as initial slope is good enough for a few iterations.
//
// NB: slope = 1/(3*trueResult*trueResult)
// As we have result = trueResult/2 (to avoid overflows), we have:
// slope = 4/(3*result*result)
// = (4/3)*resultInv*resultInv
// with newResultInv = 1/newResult
// = 1/(oldResult+resultDelta)
// = (oldResultInv)*1/(1+resultDelta/oldResult)
// = (oldResultInv)*1/(1+resultDelta*oldResultInv)
// ~= (oldResultInv)*(1-resultDelta*oldResultInv)
// ===> Successive slopes could be computed without division, if needed,
// by computing resultInv (instead of slope right away) and retrieving
// slopes from it.
result += (value - result * result * result) * slope;
result += (value - result * result * result) * slope;
return h*(result + (value - result * result * result) * slope);
}
/**
* @return sqrt(x^2+y^2) without intermediate overflow or underflow.
*/
public static double hypot(double x, double y) {
if (USE_JDK_MATH) {
return Math.hypot(x,y);
}
x = Math.abs(x);
y = Math.abs(y);
// Ensuring x <= y.
if (y < x) {
double a = x;
x = y;
y = a;
} else if (!(y >= x)) { // Testing if we have some NaN.
return hypot_NaN(x, y);
}
if (y-x == y) {
// x too small to subtract from y.
return y;
} else {
double factor;
if (y > HYPOT_MAX_MAG) {
// y is too large: scaling down.
x *= (1/HYPOT_FACTOR);
y *= (1/HYPOT_FACTOR);
factor = HYPOT_FACTOR;
} else if (x < (1/HYPOT_MAX_MAG)) {
// x is too small: scaling up.
x *= HYPOT_FACTOR;
y *= HYPOT_FACTOR;
factor = (1/HYPOT_FACTOR);
} else {
factor = 1.0;
}
return factor * sqrt(x*x+y*y);
}
}
/**
* @return sqrt(x^2+y^2+z^2) without intermediate overflow or underflow.
*/
public static double hypot(double x, double y, double z) {
if (USE_JDK_MATH) {
// No simple JDK equivalent.
}
x = Math.abs(x);
y = Math.abs(y);
z = Math.abs(z);
/*
* Considering that z magnitude is the most likely to be the smaller,
* hence ensuring z <= y <= x, and not x <= y <= z, for less swaps.
*/
// Ensuring z <= y.
if (z > y) {
// y < z: swapping y and z
double a = z;
z = y;
y = a;
} else if (!(z <= y)) { // Testing if y or z is NaN.
return hypot_NaN(x, y, z);
}
// Ensuring y <= x.
if (z > x) {
// x < z <= y: moving x
double oldZ = z;
z = x;
double oldY = y;
y = oldZ;
x = oldY;
} else if (y > x) {
// z <= x < y: swapping x and y
double a = y;
y = x;
x = a;
} else if (x != x) { // Testing if x is NaN.
return hypot_NaN(x, y, z);
}
if (x-y == x) {
// y, hence z, too small to subtract from x.
return x;
} else if (y-z == y) {
// z too small to subtract from y, hence x.
double factor;
if (x > HYPOT_MAX_MAG) {
// x is too large: scaling down.
x *= (1/HYPOT_FACTOR);
y *= (1/HYPOT_FACTOR);
factor = HYPOT_FACTOR;
} else if (y < (1/HYPOT_MAX_MAG)) {
// y is too small: scaling up.
x *= HYPOT_FACTOR;
y *= HYPOT_FACTOR;
factor = (1/HYPOT_FACTOR);
} else {
factor = 1.0;
}
return factor * sqrt(x*x+y*y);
} else {
double factor;
if (x > HYPOT_MAX_MAG) {
// x is too large: scaling down.
x *= (1/HYPOT_FACTOR);
y *= (1/HYPOT_FACTOR);
z *= (1/HYPOT_FACTOR);
factor = HYPOT_FACTOR;
} else if (z < (1/HYPOT_MAX_MAG)) {
// z is too small: scaling up.
x *= HYPOT_FACTOR;
y *= HYPOT_FACTOR;
z *= HYPOT_FACTOR;
factor = (1/HYPOT_FACTOR);
} else {
factor = 1.0;
}
// Adding smaller magnitudes together first.
return factor * sqrt(x*x+(y*y+z*z));
}
}
/*
* close values
*/
/**
* @param value A float value.
* @return Floor of value.
*/
public static float floor(float value) {
final int exponent = getExponent(value);
if (exponent < 0) {
// abs(value) < 1.
if (value < 0.0f) {
return -1.0f;
} else {
// 0.0f, or -0.0f if value is -0.0f
return 0.0f * value;
}
} else if (exponent < 23) {
// A bit faster than using casts.
final int bits = Float.floatToRawIntBits(value);
final int anteCommaBits = bits & (0xFF800000>>exponent);
if ((value < 0.0f) && (anteCommaBits != bits)) {
return Float.intBitsToFloat(anteCommaBits) - 1.0f;
} else {
return Float.intBitsToFloat(anteCommaBits);
}
} else {
// +-Infinity, NaN, or a mathematical integer.
return value;
}
}
/**
* @param value A double value.
* @return Floor of value.
*/
public static double floor(double value) {
if (USE_JDK_MATH) {
return Math.floor(value);
}
if (ANTI_SLOW_CASTS) {
double valueAbs = Math.abs(value);
if (valueAbs <= (double)Integer.MAX_VALUE) {
if (value > 0.0) {
return (double)(int)value;
} else if (value < 0.0) {
double anteCommaDigits = (double)(int)value;
if (value != anteCommaDigits) {
return anteCommaDigits - 1.0;
} else {
return anteCommaDigits;
}
} else { // value is +-0.0 (not NaN due to test against Integer.MAX_VALUE)
return value;
}
} else if (valueAbs < TWO_POW_52) {
// We split the value in two:
// high part, which is a mathematical integer,
// and the rest, for which we can get rid of the
// post comma digits by casting into an int.
double highPart = ((int)(value * TWO_POW_N26)) * TWO_POW_26;
if (value > 0.0) {
return highPart + (double)((int)(value - highPart));
} else {
double anteCommaDigits = highPart + (double)((int)(value - highPart));
if (value != anteCommaDigits) {
return anteCommaDigits - 1.0;
} else {
return anteCommaDigits;
}
}
} else { // abs(value) >= 2^52, or value is NaN
return value;
}
} else {
final int exponent = getExponent(value);
if (exponent < 0) {
// abs(value) < 1.
if (value < 0.0) {
return -1.0;
} else {
// 0.0, or -0.0 if value is -0.0
return 0.0 * value;
}
} else if (exponent < 52) {
// A bit faster than working on bits.
final long matIntPart = (long)value;
final double matIntToValue = value-(double)matIntPart;
if (matIntToValue >= 0.0) {
return (double)matIntPart;
} else {
return (double)(matIntPart - 1);
}
} else {
// +-Infinity, NaN, or a mathematical integer.
return value;
}
}
}
/**
* @param value A float value.
* @return Ceiling of value.
*/
public static float ceil(float value) {
return -floor(-value);
}
/**
* @param value A double value.
* @return Ceiling of value.
*/
public static double ceil(double value) {
if (USE_JDK_MATH) {
return Math.ceil(value);
}
return -floor(-value);
}
/**
* Might have different semantics than Math.round(float),
* see bugs 6430675 and 8010430.
*
* @param value A double value.
* @return Value rounded to nearest int, choosing superior int in case two
* are equally close (i.e. rounding-up).
*/
public static int round(float value) {
// Algorithm by Dmitry Nadezhin
// (http://mail.openjdk.java.net/pipermail/core-libs-dev/2013-August/020247.html).
final int bits = Float.floatToRawIntBits(value);
final int biasedExp = ((bits>>23)&0xFF);
// Shift to get rid of bits past comma except first one: will need to
// 1-shift to the right to end up with correct magnitude.
final int shift = (23 - 1 + MAX_FLOAT_EXPONENT) - biasedExp;
if ((shift & -32) == 0) {
// shift in [0,31], so unbiased exp in [-9,22].
int extendedMantissa = (0x00800000 | (bits & 0x007FFFFF));
if (bits < 0) {
extendedMantissa = -extendedMantissa;
}
// If value is positive and first bit past comma is 0, rounding
// to lower integer, else to upper one, which is what "+1" and
// then ">>1" do.
return ((extendedMantissa >> shift) + 1) >> 1;
} else {
// +-Infinity, NaN, or a mathematical integer.
if (false && ANTI_SLOW_CASTS) { // not worth it
if (Math.abs(value) >= -(float)Integer.MIN_VALUE) {
// +-Infinity or a mathematical integer (mostly) out of int range.
return (value < 0.0) ? Integer.MIN_VALUE : Integer.MAX_VALUE;
}
// NaN or a mathematical integer (mostly) in int range.
}
return (int)value;
}
}
/**
* Might have different semantics than Math.round(double),
* see bugs 6430675 and 8010430.
*
* @param value A double value.
* @return Value rounded to nearest long, choosing superior long in case two
* are equally close (i.e. rounding-up).
*/
public static long round(double value) {
final long bits = Double.doubleToRawLongBits(value);
final int biasedExp = (((int)(bits>>52))&0x7FF);
// Shift to get rid of bits past comma except first one: will need to
// 1-shift to the right to end up with correct magnitude.
final int shift = (52 - 1 + MAX_DOUBLE_EXPONENT) - biasedExp;
if ((shift & -64) == 0) {
// shift in [0,63], so unbiased exp in [-12,51].
long extendedMantissa = (0x0010000000000000L | (bits & 0x000FFFFFFFFFFFFFL));
if (bits < 0) {
extendedMantissa = -extendedMantissa;
}
// If value is positive and first bit past comma is 0, rounding
// to lower integer, else to upper one, which is what "+1" and
// then ">>1" do.
return ((extendedMantissa >> shift) + 1L) >> 1;
} else {
// +-Infinity, NaN, or a mathematical integer.
if (ANTI_SLOW_CASTS) {
if (Math.abs(value) >= -(double)Long.MIN_VALUE) {
// +-Infinity or a mathematical integer (mostly) out of long range.
return (value < 0.0) ? Long.MIN_VALUE : Long.MAX_VALUE;
}
// NaN or a mathematical integer (mostly) in long range.
}
return (long)value;
}
}
/**
* @param value A float value.
* @return Value rounded to nearest int, choosing even int in case two
* are equally close.
*/
public static int roundEven(float value) {
final int sign = signFromBit(value);
value = Math.abs(value);
if (ANTI_SLOW_CASTS) {
if (value < TWO_POW_23_F) {
// Getting rid of post-comma bits.
value = ((value + TWO_POW_23_F) - TWO_POW_23_F);
return sign * (int)value;
} else if (value < (float)Integer.MAX_VALUE) { // <= doesn't work because of float precision
// value is in [-Integer.MAX_VALUE,Integer.MAX_VALUE]
return sign * (int)value;
}
} else {
if (value < TWO_POW_23_F) {
// Getting rid of post-comma bits.
value = ((value + TWO_POW_23_F) - TWO_POW_23_F);
}
}
return (int)(sign * value);
}
/**
* @param value A double value.
* @return Value rounded to nearest long, choosing even long in case two
* are equally close.
*/
public static long roundEven(double value) {
final int sign = (int)signFromBit(value);
value = Math.abs(value);
if (value < TWO_POW_52) {
// Getting rid of post-comma bits.
value = ((value + TWO_POW_52) - TWO_POW_52);
}
if (ANTI_SLOW_CASTS) {
if (value <= (double)Integer.MAX_VALUE) {
// value is in [-Integer.MAX_VALUE,Integer.MAX_VALUE]
return sign * (int)value;
}
}
return (long)(sign * value);
}
/**
* @param value A float value.
* @return The float mathematical integer closest to the specified value,
* choosing even one if two are equally close, or respectively
* NaN, +-Infinity or +-0.0f if the value is any of these.
*/
public static float rint(float value) {
final int sign = signFromBit(value);
value = Math.abs(value);
if (value < TWO_POW_23_F) {
// Getting rid of post-comma bits.
value = ((TWO_POW_23_F + value ) - TWO_POW_23_F);
}
// Restoring original sign.
return sign * value;
}
/**
* @param value A double value.
* @return The double mathematical integer closest to the specified value,
* choosing even one if two are equally close, or respectively
* NaN, +-Infinity or +-0.0 if the value is any of these.
*/
public static double rint(double value) {
if (USE_JDK_MATH) {
return Math.rint(value);
}
final int sign = (int)signFromBit(value);
value = Math.abs(value);
if (value < TWO_POW_52) {
// Getting rid of post-comma bits.
value = ((TWO_POW_52 + value ) - TWO_POW_52);
}
// Restoring original sign.
return sign * value;
}
/*
* ranges
*/
/**
* @param min A float value.
* @param max A float value.
* @param value A float value.
* @return min if value < min, max if value > max, value otherwise.
*/
public static float toRange(float min, float max, float value) {
return NumbersUtils.toRange(min, max, value);
}
/**
* @param min A double value.
* @param max A double value.
* @param value A double value.
* @return min if value < min, max if value > max, value otherwise.
*/
public static double toRange(double min, double max, double value) {
return NumbersUtils.toRange(min, max, value);
}
/*
* binary operators (/,%)
*/
/**
* Returns dividend - divisor * n, where n is the mathematical integer
* closest to dividend/divisor.
* If dividend/divisor is equally close to surrounding integers,
* we choose n to be the integer of smallest magnitude, which makes
* this treatment differ from Math.IEEEremainder(double,double),
* where n is chosen to be the even integer.
* Note that the choice of n is not done considering the double
* approximation of dividend/divisor, because it could cause
* result to be outside [-|divisor|/2,|divisor|/2] range.
* The practical effect is that if multiple results would be possible,
* we always choose the result that is the closest to (and has the same
* sign as) the dividend.
* Ex. :
* - for (-3.0,2.0), this method returns -1.0,
* whereas Math.IEEEremainder returns 1.0.
* - for (-5.0,2.0), both this method and Math.IEEEremainder return -1.0.
*
* If the remainder is zero, its sign is the same as the sign of the first argument.
* If either argument is NaN, or the first argument is infinite,
* or the second argument is positive zero or negative zero,
* then the result is NaN.
* If the first argument is finite and the second argument is
* infinite, then the result is the same as the first argument.
*
* NB:
* - Modulo operator (%) returns a value in ]-|divisor|,|divisor|[,
* which sign is the same as dividend.
* - As for modulo operator, the sign of the divisor has no effect on the result.
* - On some architecture, % operator has been observed to return NaN
* for some subnormal values of divisor, when dividend exponent is 1023,
* which impacts the correctness of this method.
*
* @param dividend Dividend.
* @param divisor Divisor.
* @return Remainder of dividend/divisor, i.e. a value in [-|divisor|/2,|divisor|/2].
*/
public static double remainder(double dividend, double divisor) {
if (Double.isInfinite(divisor)) {
if (Double.isInfinite(dividend)) {
return Double.NaN;
} else {
return dividend;
}
}
double value = dividend % divisor;
if (Math.abs(value+value) > Math.abs(divisor)) {
return value + ((value > 0.0) ? -Math.abs(divisor) : Math.abs(divisor));
} else {
return value;
}
}
/**
* @param angle Angle in radians.
* @return The same angle, in radians, but in [-PI,PI].
*/
public static double normalizeMinusPiPi(double angle) {
// Not modifying values in output range.
if ((angle >= -Math.PI) && (angle <= Math.PI)) {
return angle;
}
return remainderTwoPi(angle);
}
/**
* Not accurate for large values.
*
* @param angle Angle in radians.
* @return The same angle, in radians, but in [-PI,PI].
*/
public static double normalizeMinusPiPiFast(double angle) {
// Not modifying values in output range.
if ((angle >= -Math.PI) && (angle <= Math.PI)) {
return angle;
}
return remainderTwoPiFast(angle);
}
/**
* @param angle Angle in radians.
* @return The same angle, in radians, but in [0,2*PI].
*/
public static double normalizeZeroTwoPi(double angle) {
// Not modifying values in output range.
if ((angle >= 0.0) && (angle <= 2*Math.PI)) {
return angle;
}
angle = remainderTwoPi(angle);
if (angle < 0.0) {
// LO then HI is theoretically better (when starting near 0).
return (angle + TWOPI_LO) + TWOPI_HI;
} else {
return angle;
}
}
/**
* Not accurate for large values.
*
* @param angle Angle in radians.
* @return The same angle, in radians, but in [0,2*PI].
*/
public static double normalizeZeroTwoPiFast(double angle) {
// Not modifying values in output range.
if ((angle >= 0.0) && (angle <= 2*Math.PI)) {
return angle;
}
angle = remainderTwoPiFast(angle);
if (angle < 0.0) {
// LO then HI is theoretically better (when starting near 0).
return (angle + TWOPI_LO) + TWOPI_HI;
} else {
return angle;
}
}
/**
* @param angle Angle in radians.
* @return Angle value modulo PI, in radians, in [-PI/2,PI/2].
*/
public static double normalizeMinusHalfPiHalfPi(double angle) {
// Not modifying values in output range.
if ((angle >= -Math.PI/2) && (angle <= Math.PI/2)) {
return angle;
}
return remainderPi(angle);
}
/**
* Not accurate for large values.
*
* @param angle Angle in radians.
* @return Angle value modulo PI, in radians, in [-PI/2,PI/2].
*/
public static double normalizeMinusHalfPiHalfPiFast(double angle) {
// Not modifying values in output range.
if ((angle >= -Math.PI/2) && (angle <= Math.PI/2)) {
return angle;
}
return remainderPiFast(angle);
}
/*
* floating points utils
*/
/**
* @param value A float value.
* @return true if the specified value is NaN or +-Infinity, false otherwise.
*/
public static boolean isNaNOrInfinite(float value) {
return NumbersUtils.isNaNOrInfinite(value);
}
/**
* @param value A double value.
* @return true if the specified value is NaN or +-Infinity, false otherwise.
*/
public static boolean isNaNOrInfinite(double value) {
return NumbersUtils.isNaNOrInfinite(value);
}
/**
* @param value A float value.
* @return Value unbiased exponent.
*/
public static int getExponent(float value) {
return ((Float.floatToRawIntBits(value)>>23)&0xFF)-MAX_FLOAT_EXPONENT;
}
/**
* @param value A double value.
* @return Value unbiased exponent.
*/
public static int getExponent(double value) {
return (((int)(Double.doubleToRawLongBits(value)>>52))&0x7FF)-MAX_DOUBLE_EXPONENT;
}
/**
* @param value A float value.
* @return -1.0f if the specified value is < 0, 1.0f if it is > 0,
* and the value itself if it is NaN or +-0.0f.
*/
public static float signum(float value) {
if (USE_JDK_MATH) {
return Math.signum(value);
}
if ((value == 0.0f) || (value != value)) {
return value;
}
return (float)signFromBit(value);
}
/**
* @param value A double value.
* @return -1.0 if the specified value is < 0, 1.0 if it is > 0,
* and the value itself if it is NaN or +-0.0.
*/
public static double signum(double value) {
if (USE_JDK_MATH) {
return Math.signum(value);
}
if ((value == 0.0) || (value != value)) {
return value;
}
if (ANTI_SLOW_CASTS) {
return (double)(int)signFromBit(value);
} else {
return (double)signFromBit(value);
}
}
/**
* @param value A float value.
* @return -1 if sign bit is 1, 1 if sign bit is 0.
*/
public static int signFromBit(float value) {
return ((Float.floatToRawIntBits(value)>>30)|1);
}
/**
* @param value A double value.
* @return -1 if sign bit is 1, 1 if sign bit is 0.
*/
public static long signFromBit(double value) {
// Returning a long, to avoid useless cast into int.
return ((Double.doubleToRawLongBits(value)>>62)|1);
}
/**
* A sign of NaN can be interpreted as positive or negative.
*
* @param magnitude A float value.
* @param sign A float value.
* @return A value with the magnitude of the first argument, and the sign
* of the second argument.
*/
public static float copySign(float magnitude, float sign) {
return Float.intBitsToFloat(
(Float.floatToRawIntBits(sign) & Integer.MIN_VALUE)
| (Float.floatToRawIntBits(magnitude) & Integer.MAX_VALUE));
}
/**
* A sign of NaN can be interpreted as positive or negative.
*
* @param magnitude A double value.
* @param sign A double value.
* @return A value with the magnitude of the first argument, and the sign
* of the second argument.
*/
public static double copySign(double magnitude, double sign) {
return Double.longBitsToDouble(
(Double.doubleToRawLongBits(sign) & Long.MIN_VALUE)
| (Double.doubleToRawLongBits(magnitude) & Long.MAX_VALUE));
}
/**
* The ULP (Unit in the Last Place) is the distance to the next value larger
* in magnitude.
*
* @param value A float value.
* @return The size of an ulp of the specified value, or Float.MIN_VALUE
* if it is +-0.0f, or +Infinity if it is +-Infinity, or NaN
* if it is NaN.
*/
public static float ulp(float value) {
if (USE_JDK_MATH) {
return Math.ulp(value);
}
/*
* Look-up table not really worth it in micro-benchmark,
* so should be worse with cache-misses.
*/
final int exponent = getExponent(value);
if (exponent >= (MIN_FLOAT_NORMAL_EXPONENT+23)) {
if (exponent == MAX_FLOAT_EXPONENT+1) {
// NaN or +-Infinity
return Math.abs(value);
}
// normal: returning 2^(exponent-23)
return Float.intBitsToFloat((exponent+(MAX_FLOAT_EXPONENT-23))<<23);
} else {
if (exponent == MIN_FLOAT_NORMAL_EXPONENT-1) {
// +-0.0f or subnormal
return Float.MIN_VALUE;
}
// subnormal result
return Float.intBitsToFloat(1<<(exponent-MIN_FLOAT_NORMAL_EXPONENT));
}
}
/**
* The ULP (Unit in the Last Place) is the distance to the next value larger
* in magnitude.
*
* @param value A double value.
* @return The size of an ulp of the specified value, or Double.MIN_VALUE
* if it is +-0.0, or +Infinity if it is +-Infinity, or NaN
* if it is NaN.
*/
public static double ulp(double value) {
if (USE_JDK_MATH) {
return Math.ulp(value);
}
/*
* Look-up table not really worth it in micro-benchmark,
* so should be worse with cache-misses.
*/
final int exponent = getExponent(value);
if (exponent >= (MIN_DOUBLE_NORMAL_EXPONENT+52)) {
if (exponent == MAX_DOUBLE_EXPONENT+1) {
// NaN or +-Infinity
return Math.abs(value);
}
// normal: returning 2^(exponent-52)
return Double.longBitsToDouble((exponent+(MAX_DOUBLE_EXPONENT-52L))<<52);
} else {
if (exponent == MIN_DOUBLE_NORMAL_EXPONENT-1) {
// +-0.0f or subnormal
return Double.MIN_VALUE;
}
// subnormal result
return Double.longBitsToDouble(1L<<(exponent-MIN_DOUBLE_NORMAL_EXPONENT));
}
}
/**
* If both arguments are +-0.0(f), (float)direction is returned.
*
* If both arguments are +Infinity or -Infinity,
* respectively +Infinity or -Infinity is returned.
*
* @param start A float value.
* @param direction A double value.
* @return The float adjacent to start towards direction, considering that
* +(-)Float.MIN_VALUE is adjacent to +(-)0.0f, and that
* +(-)Float.MAX_VALUE is adjacent to +(-)Infinity,
* or NaN if any argument is NaN.
*/
public static float nextAfter(float start, double direction) {
if (direction < start) {
// Going towards -Infinity.
if (start == 0.0f) {
// +-0.0f
return -Float.MIN_VALUE;
}
final int bits = Float.floatToRawIntBits(start);
return Float.intBitsToFloat(bits + ((bits > 0) ? -1 : 1));
} else if (direction > start) {
// Going towards +Infinity.
// +0.0f to get rid of eventual -0.0f
final int bits = Float.floatToRawIntBits(start + 0.0f);
return Float.intBitsToFloat(bits + (bits >= 0 ? 1 : -1));
} else if (start == direction) {
return (float)direction;
} else {
// Returning a NaN derived from the input NaN(s).
return start + (float)direction;
}
}
/**
* If both arguments are +-0.0, direction is returned.
*
* If both arguments are +Infinity or -Infinity,
* respectively +Infinity or -Infinity is returned.
*
* @param start A double value.
* @param direction A double value.
* @return The double adjacent to start towards direction, considering that
* +(-)Double.MIN_VALUE is adjacent to +(-)0.0, and that
* +(-)Double.MAX_VALUE is adjacent to +(-)Infinity,
* or NaN if any argument is NaN.
*/
public static double nextAfter(double start, double direction) {
if (direction < start) {
// Going towards -Infinity.
if (start == 0.0) {
// +-0.0
return -Double.MIN_VALUE;
}
final long bits = Double.doubleToRawLongBits(start);
return Double.longBitsToDouble(bits + ((bits > 0) ? -1 : 1));
} else if (direction > start) {
// Going towards +Infinity.
// +0.0 to get rid of eventual -0.0
final long bits = Double.doubleToRawLongBits(start + 0.0f);
return Double.longBitsToDouble(bits + (bits >= 0 ? 1 : -1));
} else if (start == direction) {
return direction;
} else {
// Returning a NaN derived from the input NaN(s).
return start + direction;
}
}
/**
* Semantically equivalent to nextAfter(start,Double.NEGATIVE_INFINITY).
*/
public static float nextDown(float start) {
if (start > Float.NEGATIVE_INFINITY) {
if (start == 0.0f) {
// +-0.0f
return -Float.MIN_VALUE;
}
final int bits = Float.floatToRawIntBits(start);
return Float.intBitsToFloat(bits + ((bits > 0) ? -1 : 1));
} else if (start == Float.NEGATIVE_INFINITY) {
return Float.NEGATIVE_INFINITY;
} else {
// NaN
return start;
}
}
/**
* Semantically equivalent to nextAfter(start,Double.NEGATIVE_INFINITY).
*/
public static double nextDown(double start) {
if (start > Double.NEGATIVE_INFINITY) {
if (start == 0.0) {
// +-0.0
return -Double.MIN_VALUE;
}
final long bits = Double.doubleToRawLongBits(start);
return Double.longBitsToDouble(bits + ((bits > 0) ? -1 : 1));
} else if (start == Double.NEGATIVE_INFINITY) {
return Double.NEGATIVE_INFINITY;
} else {
// NaN
return start;
}
}
/**
* Semantically equivalent to nextAfter(start,Double.POSITIVE_INFINITY).
*/
public static float nextUp(float start) {
if (start < Float.POSITIVE_INFINITY) {
// +0.0f to get rid of eventual -0.0f
final int bits = Float.floatToRawIntBits(start + 0.0f);
return Float.intBitsToFloat(bits + (bits >= 0 ? 1 : -1));
} else if (start == Float.POSITIVE_INFINITY) {
return Float.POSITIVE_INFINITY;
} else {
// NaN
return start;
}
}
/**
* Semantically equivalent to nextAfter(start,Double.POSITIVE_INFINITY).
*/
public static double nextUp(double start) {
if (start < Double.POSITIVE_INFINITY) {
// +0.0 to get rid of eventual -0.0
final long bits = Double.doubleToRawLongBits(start + 0.0);
return Double.longBitsToDouble(bits + (bits >= 0 ? 1 : -1));
} else if (start == Double.POSITIVE_INFINITY) {
return Double.POSITIVE_INFINITY;
} else {
// NaN
return start;
}
}
/**
* Precision may be lost if the result is subnormal.
*
* @param value A float value.
* @param scaleFactor An int value.
* @return value * 2^scaleFactor, or a value equivalent to the specified
* one if it is NaN, +-Infinity or +-0.0f.
*/
public static float scalb(float value, int scaleFactor) {
// Large enough to imply overflow or underflow for
// a finite non-zero value.
final int MAX_SCALE = 2*MAX_FLOAT_EXPONENT+23+1;
// Making sure scaling factor is in a reasonable range.
scaleFactor = Math.max(Math.min(scaleFactor, MAX_SCALE), -MAX_SCALE);
return (float)(((double)value) * twoPowNormal(scaleFactor));
}
/**
* Precision may be lost if the result is subnormal.
*
* @param value A double value.
* @param scaleFactor An int value.
* @return value * 2^scaleFactor, or a value equivalent to the specified
* one if it is NaN, +-Infinity or +-0.0.
*/
public static double scalb(double value, int scaleFactor) {
if ((scaleFactor > -MAX_DOUBLE_EXPONENT) && (scaleFactor <= MAX_DOUBLE_EXPONENT)) {
// Quick case (as done in apache FastMath).
return value * twoPowNormal(scaleFactor);
}
// Large enough to imply overflow or underflow for
// a finite non-zero value.
final int MAX_SCALE = 2*MAX_DOUBLE_EXPONENT+52+1;
// Making sure scaling factor is in a reasonable range.
final int exponentAdjust;
final int scaleIncrement;
final double exponentDelta;
if (scaleFactor < 0) {
scaleFactor = Math.max(scaleFactor, -MAX_SCALE);
scaleIncrement = -512;
exponentDelta = TWO_POW_N512;
} else {
scaleFactor = Math.min(scaleFactor, MAX_SCALE);
scaleIncrement = 512;
exponentDelta = TWO_POW_512;
}
// Calculating (scaleFactor % +-512), 512 = 2^9, using
// technique from "Hacker's Delight" section 10-2.
final int t = ((scaleFactor >> (9-1)) >>> (32-9));
exponentAdjust = ((scaleFactor + t) & (512-1)) - t;
value *= twoPowNormal(exponentAdjust);
scaleFactor -= exponentAdjust;
while (scaleFactor != 0) {
value *= exponentDelta;
scaleFactor -= scaleIncrement;
}
return value;
}
/*
* Non-redefined Math public values and treatments.
*/
public static float abs(float a) {
return Math.abs(a);
}
public static double abs(double a) {
return Math.abs(a);
}
public static float min(float a, float b) {
return Math.min(a,b);
}
public static double min(double a, double b) {
return Math.min(a,b);
}
public static float max(float a, float b) {
return Math.max(a,b);
}
public static double max(double a, double b) {
return Math.max(a,b);
}
public static double IEEEremainder(double f1, double f2) {
return Math.IEEEremainder(f1,f2);
}
public static double random() {
return Math.random();
}
//--------------------------------------------------------------------------
// PRIVATE METHODS
//--------------------------------------------------------------------------
/**
* Non-instantiable.
*/
private FastMath() {
}
/*
* Remainders (accurate).
*/
/**
* @param angle Angle in radians.
* @return Remainder of (angle % (2*PI)), in [-PI,PI].
*/
private static double remainderTwoPi(double angle) {
if (USE_JDK_MATH) {
return jdkRemainderTwoPi(angle);
}
boolean negateResult = false;
if (angle < 0.0) {
angle = -angle;
negateResult = true;
}
if (angle <= (4*NORMALIZE_ANGLE_MAX_MEDIUM_DOUBLE_PIO2)) {
double fn = (double)(int)(angle*TWOPI_INV+0.5);
angle = (angle - fn*TWOPI_HI) - fn*TWOPI_LO;
// Ensuring range.
// HI/LO can help a bit, even though we are always far from 0.
if (angle < -Math.PI) {
angle = (angle + TWOPI_HI) + TWOPI_LO;
} else if (angle > Math.PI) {
angle = (angle - TWOPI_HI) - TWOPI_LO;
}
return negateResult ? -angle : angle;
} else if (angle < Double.POSITIVE_INFINITY) {
angle = heavyRemainderTwoPi(angle);
return negateResult ? -angle : angle;
} else { // angle is +Infinity or NaN
return Double.NaN;
}
}
/**
* @param angle Angle in radians.
* @return Remainder of (angle % PI), in [-PI/2,PI/2].
*/
private static double remainderPi(double angle) {
if (USE_JDK_MATH) {
return jdkRemainderPi(angle);
}
boolean negateResult = false;
if (angle < 0.0) {
angle = -angle;
negateResult = true;
}
if (angle <= (2*NORMALIZE_ANGLE_MAX_MEDIUM_DOUBLE_PIO2)) {
double fn = (double)(int)(angle*PI_INV+0.5);
angle = (angle - fn*PI_HI) - fn*PI_LO;
// Ensuring range.
// HI/LO can help a bit, even though we are always far from 0.
if (angle < -Math.PI/2) {
angle = (angle + PI_HI) + PI_LO;
} else if (angle > Math.PI/2) {
angle = (angle - PI_HI) - PI_LO;
}
return negateResult ? -angle : angle;
} else if (angle < Double.POSITIVE_INFINITY) {
angle = heavyRemainderPi(angle);
return negateResult ? -angle : angle;
} else { // angle is +Infinity or NaN
return Double.NaN;
}
}
/**
* @param angle Angle in radians.
* @return Bits of double corresponding to remainder of (angle % (PI/2)),
* in [-PI/4,PI/4], with quadrant encoded in exponent bits.
*/
private static long remainderPiO2(double angle) {
if (USE_JDK_MATH) {
return jdkRemainderPiO2(angle, false);
}
boolean negateResult = false;
if (angle < 0.0) {
angle = -angle;
negateResult = true;
}
if (angle <= NORMALIZE_ANGLE_MAX_MEDIUM_DOUBLE_PIO2) {
int n = (int)(angle*PIO2_INV+0.5);
double fn = (double)n;
angle = (angle - fn*PIO2_HI) - fn*PIO2_LO;
// Ensuring range.
// HI/LO can help a bit, even though we are always far from 0.
if (angle < -Math.PI/4) {
angle = (angle + PIO2_HI) + PIO2_LO;
n--;
} else if (angle > Math.PI/4) {
angle = (angle - PIO2_HI) - PIO2_LO;
n++;
}
if (negateResult) {
angle = -angle;
}
return encodeRemainderAndQuadrant(angle, n&3);
} else if (angle < Double.POSITIVE_INFINITY) {
return heavyRemainderPiO2(angle, negateResult);
} else { // angle is +Infinity or NaN
return encodeRemainderAndQuadrant(Double.NaN, 0);
}
}
/*
* Remainders (fast).
*/
/**
* Not accurate for large values.
*
* @param angle Angle in radians.
* @return Remainder of (angle % (2*PI)), in [-PI,PI].
*/
private static double remainderTwoPiFast(double angle) {
if (USE_JDK_MATH) {
return jdkRemainderTwoPi(angle);
}
boolean negateResult = false;
if (angle < 0.0) {
angle = -angle;
negateResult = true;
}
// - We don't bother with values higher than (2*PI*(2^52)),
// since they are spaced by 2*PI or more from each other.
// - For large values, we don't use % because it might be very slow,
// and we split computation in two, because cast from double to int
// with large numbers might be very slow also.
if (angle <= TWO_POW_26*(2*Math.PI)) {
// ok
} else if (angle <= TWO_POW_52*(2*Math.PI)) {
// Computing remainder of angle modulo TWO_POW_26*(2*PI).
double fn = (double)(int)(angle*(TWOPI_INV/TWO_POW_26)+0.5);
angle = (angle - fn*(TWOPI_HI*TWO_POW_26)) - fn*(TWOPI_LO*TWO_POW_26);
// Here, angle is in [-TWO_POW_26*PI,TWO_POW_26*PI], or so.
if (angle < 0.0) {
angle = -angle;
negateResult = !negateResult;
}
} else if (angle < Double.POSITIVE_INFINITY) {
return 0.0;
} else { // angle is +Infinity or NaN
return Double.NaN;
}
// Computing remainder of angle modulo 2*PI.
double fn = (double)(int)(angle*TWOPI_INV+0.5);
angle = (angle - fn*TWOPI_HI) - fn*TWOPI_LO;
// Ensuring range.
// HI/LO can help a bit, even though we are always far from 0.
if (angle < -Math.PI) {
angle = (angle + TWOPI_HI) + TWOPI_LO;
} else if (angle > Math.PI) {
angle = (angle - TWOPI_HI) - TWOPI_LO;
}
return negateResult ? -angle : angle;
}
/**
* Not accurate for large values.
*
* @param angle Angle in radians.
* @return Remainder of (angle % PI), in [-PI/2,PI/2].
*/
private static double remainderPiFast(double angle) {
if (USE_JDK_MATH) {
return jdkRemainderPi(angle);
}
boolean negateResult = false;
if (angle < 0.0) {
angle = -angle;
negateResult = true;
}
// - We don't bother with values higher than (PI*(2^52)),
// since they are spaced by PI or more from each other.
// - For large values, we don't use % because it might be very slow,
// and we split computation in two, because cast from double to int
// with large numbers might be very slow also.
if (angle <= TWO_POW_26*Math.PI) {
// ok
} else if (angle <= TWO_POW_52*Math.PI) {
// Computing remainder of angle modulo TWO_POW_26*PI.
double fn = (double)(int)(angle*(PI_INV/TWO_POW_26)+0.5);
angle = (angle - fn*(PI_HI*TWO_POW_26)) - fn*(PI_LO*TWO_POW_26);
// Here, angle is in [-TWO_POW_26*PI/2,TWO_POW_26*PI/2], or so.
if (angle < 0.0) {
angle = -angle;
negateResult = !negateResult;
}
} else if (angle < Double.POSITIVE_INFINITY) {
return 0.0;
} else { // angle is +Infinity or NaN
return Double.NaN;
}
// Computing remainder of angle modulo PI.
double fn = (double)(int)(angle*PI_INV+0.5);
angle = (angle - fn*PI_HI) - fn*PI_LO;
// Ensuring range.
// HI/LO can help a bit, even though we are always far from 0.
if (angle < -Math.PI/2) {
angle = (angle + PI_HI) + PI_LO;
} else if (angle > Math.PI/2) {
angle = (angle - PI_HI) - PI_LO;
}
return negateResult ? -angle : angle;
}
}
| apache-2.0 |
arturgaleno/Android2Java | src/test/java/com/android2java/sparsearraytest/Foo.java | 704 | package com.android2java.sparsearraytest;
import java.lang.ref.WeakReference;
import com.android2java.SparseLongObjArray;
public class Foo {
private static final SparseLongObjArray<WeakReference<String>> DESCRIPTION_POOL = new SparseLongObjArray<>(100000);
private int id;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public void setDesc(String desc) {
if (DESCRIPTION_POOL.get(this.id) == null) {
DESCRIPTION_POOL.put(id, new WeakReference<String>(desc));
}
}
public String getDesc() {
return DESCRIPTION_POOL.get(this.id).get();
}
public static SparseLongObjArray<WeakReference<String>> getPool() {
return DESCRIPTION_POOL;
}
}
| apache-2.0 |
kisskys/incubator-asterixdb | asterixdb/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java | 164187 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.aql.translator;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Random;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.asterix.api.common.APIFramework;
import org.apache.asterix.api.common.SessionConfig;
import org.apache.asterix.api.common.SessionConfig.OutputFormat;
import org.apache.asterix.app.external.CentralFeedManager;
import org.apache.asterix.app.external.ExternalIndexingOperations;
import org.apache.asterix.app.external.FeedJoint;
import org.apache.asterix.app.external.FeedLifecycleListener;
import org.apache.asterix.app.external.FeedOperations;
import org.apache.asterix.common.config.AsterixExternalProperties;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.config.MetadataConstants;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.functions.FunctionSignature;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.external.api.IAdapterFactory;
import org.apache.asterix.external.feed.api.IFeed;
import org.apache.asterix.external.feed.api.IFeed.FeedType;
import org.apache.asterix.external.feed.api.IFeedJoint;
import org.apache.asterix.external.feed.api.IFeedJoint.FeedJointType;
import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber;
import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
import org.apache.asterix.external.feed.management.FeedConnectionId;
import org.apache.asterix.external.feed.management.FeedConnectionRequest;
import org.apache.asterix.external.feed.management.FeedId;
import org.apache.asterix.external.feed.management.FeedJointKey;
import org.apache.asterix.external.feed.management.FeedLifecycleEventSubscriber;
import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
import org.apache.asterix.external.feed.watch.FeedActivity.FeedActivityDetails;
import org.apache.asterix.external.indexing.ExternalFile;
import org.apache.asterix.file.DatasetOperations;
import org.apache.asterix.file.DataverseOperations;
import org.apache.asterix.file.IndexOperations;
import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
import org.apache.asterix.lang.aql.statement.SubscribeFeedStatement;
import org.apache.asterix.lang.common.base.IRewriterFactory;
import org.apache.asterix.lang.common.base.IStatementRewriter;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.expression.TypeExpression;
import org.apache.asterix.lang.common.statement.CompactStatement;
import org.apache.asterix.lang.common.statement.ConnectFeedStatement;
import org.apache.asterix.lang.common.statement.CreateDataverseStatement;
import org.apache.asterix.lang.common.statement.CreateFeedPolicyStatement;
import org.apache.asterix.lang.common.statement.CreateFeedStatement;
import org.apache.asterix.lang.common.statement.CreateFunctionStatement;
import org.apache.asterix.lang.common.statement.CreateIndexStatement;
import org.apache.asterix.lang.common.statement.CreatePrimaryFeedStatement;
import org.apache.asterix.lang.common.statement.CreateSecondaryFeedStatement;
import org.apache.asterix.lang.common.statement.DatasetDecl;
import org.apache.asterix.lang.common.statement.DataverseDecl;
import org.apache.asterix.lang.common.statement.DataverseDropStatement;
import org.apache.asterix.lang.common.statement.DeleteStatement;
import org.apache.asterix.lang.common.statement.DisconnectFeedStatement;
import org.apache.asterix.lang.common.statement.DropStatement;
import org.apache.asterix.lang.common.statement.ExternalDetailsDecl;
import org.apache.asterix.lang.common.statement.FeedDropStatement;
import org.apache.asterix.lang.common.statement.FeedPolicyDropStatement;
import org.apache.asterix.lang.common.statement.FunctionDecl;
import org.apache.asterix.lang.common.statement.FunctionDropStatement;
import org.apache.asterix.lang.common.statement.IDatasetDetailsDecl;
import org.apache.asterix.lang.common.statement.IndexDropStatement;
import org.apache.asterix.lang.common.statement.InsertStatement;
import org.apache.asterix.lang.common.statement.InternalDetailsDecl;
import org.apache.asterix.lang.common.statement.LoadStatement;
import org.apache.asterix.lang.common.statement.NodeGroupDropStatement;
import org.apache.asterix.lang.common.statement.NodegroupDecl;
import org.apache.asterix.lang.common.statement.Query;
import org.apache.asterix.lang.common.statement.RefreshExternalDatasetStatement;
import org.apache.asterix.lang.common.statement.RunStatement;
import org.apache.asterix.lang.common.statement.SetStatement;
import org.apache.asterix.lang.common.statement.TypeDecl;
import org.apache.asterix.lang.common.statement.TypeDropStatement;
import org.apache.asterix.lang.common.statement.WriteStatement;
import org.apache.asterix.lang.common.struct.Identifier;
import org.apache.asterix.lang.common.util.FunctionUtil;
import org.apache.asterix.metadata.IDatasetDetails;
import org.apache.asterix.metadata.MetadataException;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
import org.apache.asterix.metadata.api.IMetadataEntity;
import org.apache.asterix.metadata.dataset.hints.DatasetHints;
import org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint;
import org.apache.asterix.metadata.declared.AqlMetadataProvider;
import org.apache.asterix.metadata.entities.CompactionPolicy;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Datatype;
import org.apache.asterix.metadata.entities.Dataverse;
import org.apache.asterix.metadata.entities.ExternalDatasetDetails;
import org.apache.asterix.metadata.entities.Feed;
import org.apache.asterix.metadata.entities.FeedPolicyEntity;
import org.apache.asterix.metadata.entities.Function;
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.metadata.entities.InternalDatasetDetails;
import org.apache.asterix.metadata.entities.NodeGroup;
import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
import org.apache.asterix.metadata.utils.DatasetUtils;
import org.apache.asterix.metadata.utils.ExternalDatasetsRegistry;
import org.apache.asterix.metadata.utils.KeyFieldTypeUtils;
import org.apache.asterix.metadata.utils.MetadataLockManager;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.types.TypeSignature;
import org.apache.asterix.om.util.AsterixAppContextInfo;
import org.apache.asterix.om.util.AsterixClusterProperties;
import org.apache.asterix.optimizer.rules.IntroduceSecondaryIndexInsertDeleteRule;
import org.apache.asterix.result.ResultReader;
import org.apache.asterix.result.ResultUtils;
import org.apache.asterix.transaction.management.service.transaction.DatasetIdFactory;
import org.apache.asterix.translator.AbstractLangTranslator;
import org.apache.asterix.translator.CompiledStatements.CompiledConnectFeedStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledDatasetDropStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledDeleteStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledIndexCompactStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledUpsertStatement;
import org.apache.asterix.translator.CompiledStatements.ICompiledDmlStatement;
import org.apache.asterix.translator.TypeTranslator;
import org.apache.asterix.translator.util.ValidateUtil;
import org.apache.asterix.util.FlushDatasetUtils;
import org.apache.asterix.util.JobUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.common.utils.Triple;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
import org.apache.hyracks.algebricks.data.IAWriterFactory;
import org.apache.hyracks.algebricks.data.IResultSerializerFactoryProvider;
import org.apache.hyracks.algebricks.runtime.serializer.ResultSerializerFactoryProvider;
import org.apache.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.dataset.IHyracksDataset;
import org.apache.hyracks.api.dataset.ResultSetId;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.dataflow.std.file.FileSplit;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.google.common.collect.Lists;
/*
* Provides functionality for executing a batch of Query statements (queries included)
* sequentially.
*/
public class QueryTranslator extends AbstractLangTranslator {
private static Logger LOGGER = Logger.getLogger(QueryTranslator.class.getName());
private enum ProgressState {
NO_PROGRESS,
ADDED_PENDINGOP_RECORD_TO_METADATA
}
public enum ResultDelivery {
SYNC,
ASYNC,
ASYNC_DEFERRED
}
public static final boolean IS_DEBUG_MODE = false;// true
private final List<Statement> statements;
private final SessionConfig sessionConfig;
private Dataverse activeDefaultDataverse;
private final List<FunctionDecl> declaredFunctions;
private final APIFramework apiFramework;
private final IRewriterFactory rewriterFactory;
public QueryTranslator(List<Statement> aqlStatements, SessionConfig conf,
ILangCompilationProvider compliationProvider) {
this.statements = aqlStatements;
this.sessionConfig = conf;
this.declaredFunctions = getDeclaredFunctions(aqlStatements);
this.apiFramework = new APIFramework(compliationProvider);
this.rewriterFactory = compliationProvider.getRewriterFactory();
}
private List<FunctionDecl> getDeclaredFunctions(List<Statement> statements) {
List<FunctionDecl> functionDecls = new ArrayList<FunctionDecl>();
for (Statement st : statements) {
if (st.getKind().equals(Statement.Kind.FUNCTION_DECL)) {
functionDecls.add((FunctionDecl) st);
}
}
return functionDecls;
}
/**
* Compiles and submits for execution a list of AQL statements.
*
* @param hcc
* A Hyracks client connection that is used to submit a jobspec to Hyracks.
* @param hdc
* A Hyracks dataset client object that is used to read the results.
* @param resultDelivery
* True if the results should be read asynchronously or false if we should wait for results to be read.
* @return A List<QueryResult> containing a QueryResult instance corresponding to each submitted query.
* @throws Exception
*/
public void compileAndExecute(IHyracksClientConnection hcc, IHyracksDataset hdc, ResultDelivery resultDelivery)
throws Exception {
compileAndExecute(hcc, hdc, resultDelivery, new ResultUtils.Stats());
}
public void compileAndExecute(IHyracksClientConnection hcc, IHyracksDataset hdc, ResultDelivery resultDelivery,
ResultUtils.Stats stats) throws Exception {
int resultSetIdCounter = 0;
FileSplit outputFile = null;
IAWriterFactory writerFactory = PrinterBasedWriterFactory.INSTANCE;
IResultSerializerFactoryProvider resultSerializerFactoryProvider = ResultSerializerFactoryProvider.INSTANCE;
Map<String, String> config = new HashMap<String, String>();
for (Statement stmt : statements) {
if (sessionConfig.is(SessionConfig.FORMAT_HTML)) {
sessionConfig.out().println(APIFramework.HTML_STATEMENT_SEPARATOR);
}
validateOperation(activeDefaultDataverse, stmt);
rewriteStatement(stmt); // Rewrite the statement's AST.
AqlMetadataProvider metadataProvider = new AqlMetadataProvider(activeDefaultDataverse,
CentralFeedManager.getInstance());
metadataProvider.setWriterFactory(writerFactory);
metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider);
metadataProvider.setOutputFile(outputFile);
metadataProvider.setConfig(config);
switch (stmt.getKind()) {
case SET: {
handleSetStatement(metadataProvider, stmt, config);
break;
}
case DATAVERSE_DECL: {
activeDefaultDataverse = handleUseDataverseStatement(metadataProvider, stmt);
break;
}
case CREATE_DATAVERSE: {
handleCreateDataverseStatement(metadataProvider, stmt);
break;
}
case DATASET_DECL: {
handleCreateDatasetStatement(metadataProvider, stmt, hcc);
break;
}
case CREATE_INDEX: {
handleCreateIndexStatement(metadataProvider, stmt, hcc);
break;
}
case TYPE_DECL: {
handleCreateTypeStatement(metadataProvider, stmt);
break;
}
case NODEGROUP_DECL: {
handleCreateNodeGroupStatement(metadataProvider, stmt);
break;
}
case DATAVERSE_DROP: {
handleDataverseDropStatement(metadataProvider, stmt, hcc);
break;
}
case DATASET_DROP: {
handleDatasetDropStatement(metadataProvider, stmt, hcc);
break;
}
case INDEX_DROP: {
handleIndexDropStatement(metadataProvider, stmt, hcc);
break;
}
case TYPE_DROP: {
handleTypeDropStatement(metadataProvider, stmt);
break;
}
case NODEGROUP_DROP: {
handleNodegroupDropStatement(metadataProvider, stmt);
break;
}
case CREATE_FUNCTION: {
handleCreateFunctionStatement(metadataProvider, stmt);
break;
}
case FUNCTION_DROP: {
handleFunctionDropStatement(metadataProvider, stmt);
break;
}
case LOAD: {
handleLoadStatement(metadataProvider, stmt, hcc);
break;
}
case INSERT:
case UPSERT: {
handleInsertUpsertStatement(metadataProvider, stmt, hcc);
break;
}
case DELETE: {
handleDeleteStatement(metadataProvider, stmt, hcc);
break;
}
case CREATE_PRIMARY_FEED:
case CREATE_SECONDARY_FEED: {
handleCreateFeedStatement(metadataProvider, stmt, hcc);
break;
}
case DROP_FEED: {
handleDropFeedStatement(metadataProvider, stmt, hcc);
break;
}
case DROP_FEED_POLICY: {
handleDropFeedPolicyStatement(metadataProvider, stmt, hcc);
break;
}
case CONNECT_FEED: {
handleConnectFeedStatement(metadataProvider, stmt, hcc);
break;
}
case DISCONNECT_FEED: {
handleDisconnectFeedStatement(metadataProvider, stmt, hcc);
break;
}
case SUBSCRIBE_FEED: {
handleSubscribeFeedStatement(metadataProvider, stmt, hcc);
break;
}
case CREATE_FEED_POLICY: {
handleCreateFeedPolicyStatement(metadataProvider, stmt, hcc);
break;
}
case QUERY: {
metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++));
metadataProvider.setResultAsyncMode(
resultDelivery == ResultDelivery.ASYNC || resultDelivery == ResultDelivery.ASYNC_DEFERRED);
handleQuery(metadataProvider, (Query) stmt, hcc, hdc, resultDelivery, stats);
break;
}
case COMPACT: {
handleCompactStatement(metadataProvider, stmt, hcc);
break;
}
case EXTERNAL_DATASET_REFRESH: {
handleExternalDatasetRefreshStatement(metadataProvider, stmt, hcc);
break;
}
case WRITE: {
Pair<IAWriterFactory, FileSplit> result = handleWriteStatement(metadataProvider, stmt);
if (result.first != null) {
writerFactory = result.first;
}
outputFile = result.second;
break;
}
case RUN: {
handleRunStatement(metadataProvider, stmt, hcc);
break;
}
default:
break;
}
}
}
private void handleSetStatement(AqlMetadataProvider metadataProvider, Statement stmt, Map<String, String> config) {
SetStatement ss = (SetStatement) stmt;
String pname = ss.getPropName();
String pvalue = ss.getPropValue();
config.put(pname, pvalue);
}
private Pair<IAWriterFactory, FileSplit> handleWriteStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws InstantiationException, IllegalAccessException, ClassNotFoundException {
WriteStatement ws = (WriteStatement) stmt;
File f = new File(ws.getFileName());
FileSplit outputFile = new FileSplit(ws.getNcName().getValue(), new FileReference(f));
IAWriterFactory writerFactory = null;
if (ws.getWriterClassName() != null) {
writerFactory = (IAWriterFactory) Class.forName(ws.getWriterClassName()).newInstance();
}
return new Pair<IAWriterFactory, FileSplit>(writerFactory, outputFile);
}
private Dataverse handleUseDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
throws Exception {
DataverseDecl dvd = (DataverseDecl) stmt;
String dvName = dvd.getDataverseName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.acquireDataverseReadLock(dvName);
try {
Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
if (dv == null) {
throw new MetadataException("Unknown dataverse " + dvName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return dv;
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw new MetadataException(e);
} finally {
MetadataLockManager.INSTANCE.releaseDataverseReadLock(dvName);
}
}
private void handleCreateDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
String dvName = stmtCreateDataverse.getDataverseName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.acquireDataverseReadLock(dvName);
try {
Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
if (dv != null) {
if (stmtCreateDataverse.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("A dataverse with this name " + dvName + " already exists.");
}
}
MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(),
new Dataverse(dvName, stmtCreateDataverse.getFormat(), IMetadataEntity.PENDING_NO_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
MetadataLockManager.INSTANCE.releaseDataverseReadLock(dvName);
}
}
private void validateCompactionPolicy(String compactionPolicy, Map<String, String> compactionPolicyProperties,
MetadataTransactionContext mdTxnCtx, boolean isExternalDataset) throws AsterixException, Exception {
CompactionPolicy compactionPolicyEntity = MetadataManager.INSTANCE.getCompactionPolicy(mdTxnCtx,
MetadataConstants.METADATA_DATAVERSE_NAME, compactionPolicy);
if (compactionPolicyEntity == null) {
throw new AsterixException("Unknown compaction policy: " + compactionPolicy);
}
String compactionPolicyFactoryClassName = compactionPolicyEntity.getClassName();
ILSMMergePolicyFactory mergePolicyFactory = (ILSMMergePolicyFactory) Class
.forName(compactionPolicyFactoryClassName).newInstance();
if (isExternalDataset && mergePolicyFactory.getName().compareTo("correlated-prefix") == 0) {
throw new AsterixException("The correlated-prefix merge policy cannot be used with external dataset.");
}
if (compactionPolicyProperties == null) {
if (mergePolicyFactory.getName().compareTo("no-merge") != 0) {
throw new AsterixException("Compaction policy properties are missing.");
}
} else {
for (Map.Entry<String, String> entry : compactionPolicyProperties.entrySet()) {
if (!mergePolicyFactory.getPropertiesNames().contains(entry.getKey())) {
throw new AsterixException("Invalid compaction policy property: " + entry.getKey());
}
}
for (String p : mergePolicyFactory.getPropertiesNames()) {
if (!compactionPolicyProperties.containsKey(p)) {
throw new AsterixException("Missing compaction policy property: " + p);
}
}
}
}
private void handleCreateDatasetStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws AsterixException, Exception {
ProgressState progress = ProgressState.NO_PROGRESS;
DatasetDecl dd = (DatasetDecl) stmt;
String dataverseName = getActiveDataverse(dd.getDataverse());
String datasetName = dd.getName().getValue();
DatasetType dsType = dd.getDatasetType();
String itemTypeDataverseName = dd.getItemTypeDataverse().getValue();
String itemTypeName = dd.getItemTypeName().getValue();
String metaItemTypeDataverseName = dd.getMetaItemTypeDataverse().getValue();
String metaItemTypeName = dd.getMetaItemTypeName().getValue();
Identifier ngNameId = dd.getNodegroupName();
String nodegroupName = getNodeGroupName(ngNameId, dd, dataverseName);
String compactionPolicy = dd.getCompactionPolicy();
Map<String, String> compactionPolicyProperties = dd.getCompactionPolicyProperties();
boolean defaultCompactionPolicy = (compactionPolicy == null);
boolean temp = dd.getDatasetDetailsDecl().isTemp();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.createDatasetBegin(dataverseName, itemTypeDataverseName,
itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName,
metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy,
dataverseName + "." + datasetName, defaultCompactionPolicy);
Dataset dataset = null;
try {
IDatasetDetails datasetDetails = null;
Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
if (ds != null) {
if (dd.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
}
}
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
itemTypeDataverseName, itemTypeName);
if (dt == null) {
throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
}
String ngName = ngNameId != null ? ngNameId.getValue()
: configureNodegroupForDataset(dd, dataverseName, mdTxnCtx);
if (compactionPolicy == null) {
compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
} else {
validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, false);
}
switch (dd.getDatasetType()) {
case INTERNAL: {
IAType itemType = dt.getDatatype();
if (itemType.getTypeTag() != ATypeTag.RECORD) {
throw new AlgebricksException("Dataset type has to be a record type.");
}
IAType metaItemType = null;
if (metaItemTypeDataverseName != null && metaItemTypeName != null) {
metaItemType = metadataProvider.findType(metaItemTypeDataverseName, metaItemTypeName);
}
if (metaItemType != null && metaItemType.getTypeTag() != ATypeTag.RECORD) {
throw new AlgebricksException("Dataset meta type has to be a record type.");
}
ARecordType metaRecType = (ARecordType) metaItemType;
List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
.getPartitioningExprs();
List<Integer> keySourceIndicators = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
.getKeySourceIndicators();
boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
ARecordType aRecordType = (ARecordType) itemType;
List<IAType> partitioningTypes = ValidateUtil.validatePartitioningExpressions(aRecordType,
metaRecType, partitioningExprs, keySourceIndicators, autogenerated);
List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
if (filterField != null) {
ValidateUtil.validateFilterField(aRecordType, filterField);
}
if (compactionPolicy == null) {
if (filterField != null) {
// If the dataset has a filter and the user didn't specify a merge
// policy, then we will pick the
// correlated-prefix as the default merge policy.
compactionPolicy = GlobalConfig.DEFAULT_FILTERED_DATASET_COMPACTION_POLICY_NAME;
compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
}
}
datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
keySourceIndicators, partitioningTypes, autogenerated, filterField, temp);
break;
}
case EXTERNAL: {
String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
datasetDetails = new ExternalDatasetDetails(adapter, properties, new Date(),
ExternalDatasetTransactionState.COMMIT);
break;
}
}
// #. initialize DatasetIdFactory if it is not initialized.
if (!DatasetIdFactory.isInitialized()) {
DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
}
// #. add a new dataset with PendingAddOp
dataset = new Dataset(dataverseName, datasetName, itemTypeDataverseName, itemTypeName,
metaItemTypeDataverseName, metaItemTypeName, ngName, compactionPolicy, compactionPolicyProperties,
datasetDetails, dd.getHints(), dsType, DatasetIdFactory.generateDatasetId(),
IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
if (dd.getDatasetType() == DatasetType.INTERNAL) {
Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
dataverseName);
JobSpecification jobSpec = DatasetOperations.createDatasetJobSpec(dataverse, datasetName,
metadataProvider);
// #. make metadataTxn commit before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
// #. runJob
JobUtils.runJob(hcc, jobSpec, true);
// #. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
// #. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
dataset.setPendingOp(IMetadataEntity.PENDING_NO_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
// #. execute compensation operations
// remove the index in NC
// [Notice]
// As long as we updated(and committed) metadata, we should remove any effect of the job
// because an exception occurs during runJob.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
try {
JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, jobSpec, true);
} catch (Exception e2) {
e.addSuppressed(e2);
if (bActiveTxn) {
abort(e, e2, mdTxnCtx);
}
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
+ "." + datasetName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
MetadataLockManager.INSTANCE.createDatasetEnd(dataverseName, itemTypeDataverseName,
itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName,
metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy,
dataverseName + "." + datasetName, defaultCompactionPolicy);
}
}
private void validateIfResourceIsActiveInFeed(String dataverseName, String datasetName) throws AsterixException {
List<FeedConnectionId> activeFeedConnections = FeedLifecycleListener.INSTANCE.getActiveFeedConnections(null);
boolean resourceInUse = false;
StringBuilder builder = new StringBuilder();
if (activeFeedConnections != null && !activeFeedConnections.isEmpty()) {
for (FeedConnectionId connId : activeFeedConnections) {
if (connId.getDatasetName().equals(datasetName)) {
resourceInUse = true;
builder.append(connId + "\n");
}
}
}
if (resourceInUse) {
throw new AsterixException("Dataset " + datasetName + " is currently being "
+ "fed into by the following feed(s).\n" + builder.toString() + "\n" + "Operation not supported");
}
}
private String getNodeGroupName(Identifier ngNameId, DatasetDecl dd, String dataverse) {
if (ngNameId != null) {
return ngNameId.getValue();
}
String hintValue = dd.getHints().get(DatasetNodegroupCardinalityHint.NAME);
if (hintValue == null) {
return MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME;
} else {
return (dataverse + ":" + dd.getName().getValue());
}
}
private String configureNodegroupForDataset(DatasetDecl dd, String dataverse, MetadataTransactionContext mdTxnCtx)
throws AsterixException {
int nodegroupCardinality = -1;
String nodegroupName;
String hintValue = dd.getHints().get(DatasetNodegroupCardinalityHint.NAME);
if (hintValue == null) {
nodegroupName = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME;
return nodegroupName;
} else {
int numChosen = 0;
boolean valid = DatasetHints.validate(DatasetNodegroupCardinalityHint.NAME,
dd.getHints().get(DatasetNodegroupCardinalityHint.NAME)).first;
if (!valid) {
throw new AsterixException("Incorrect use of hint:" + DatasetNodegroupCardinalityHint.NAME);
} else {
nodegroupCardinality = Integer.parseInt(dd.getHints().get(DatasetNodegroupCardinalityHint.NAME));
}
List<String> nodeNames = AsterixAppContextInfo.getInstance().getMetadataProperties().getNodeNames();
List<String> nodeNamesClone = new ArrayList<String>(nodeNames);
String metadataNodeName = AsterixAppContextInfo.getInstance().getMetadataProperties().getMetadataNodeName();
List<String> selectedNodes = new ArrayList<String>();
selectedNodes.add(metadataNodeName);
numChosen++;
nodeNamesClone.remove(metadataNodeName);
if (numChosen < nodegroupCardinality) {
Random random = new Random();
String[] nodes = nodeNamesClone.toArray(new String[] {});
int[] b = new int[nodeNamesClone.size()];
for (int i = 0; i < b.length; i++) {
b[i] = i;
}
for (int i = 0; i < nodegroupCardinality - numChosen; i++) {
int selected = i + random.nextInt(nodeNamesClone.size() - i);
int selNodeIndex = b[selected];
selectedNodes.add(nodes[selNodeIndex]);
int temp = b[0];
b[0] = b[selected];
b[selected] = temp;
}
}
nodegroupName = dataverse + ":" + dd.getName().getValue();
MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(nodegroupName, selectedNodes));
return nodegroupName;
}
}
private void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
ProgressState progress = ProgressState.NO_PROGRESS;
CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
String dataverseName = getActiveDataverse(stmtCreateIndex.getDataverseName());
String datasetName = stmtCreateIndex.getDatasetName().getValue();
List<Integer> keySourceIndicators = stmtCreateIndex.getFieldSourceIndicators();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.createIndexBegin(dataverseName, dataverseName + "." + datasetName);
String indexName = null;
JobSpecification spec = null;
Dataset ds = null;
// For external datasets
ArrayList<ExternalFile> externalFilesSnapshot = null;
boolean firstExternalDatasetIndex = false;
boolean filesIndexReplicated = false;
Index filesIndex = null;
boolean datasetLocked = false;
try {
ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
if (ds == null) {
throw new AlgebricksException(
"There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
}
indexName = stmtCreateIndex.getIndexName().getValue();
Index idx = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, indexName);
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
ds.getItemTypeDataverseName(), ds.getItemTypeName());
ARecordType aRecordType = (ARecordType) dt.getDatatype();
ARecordType metaRecordType = null;
if (ds.hasMetaPart()) {
Datatype metaDt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
ds.getMetaItemTypeDataverseName(), ds.getMetaItemTypeName());
metaRecordType = (ARecordType) metaDt.getDatatype();
}
List<List<String>> indexFields = new ArrayList<List<String>>();
List<IAType> indexFieldTypes = new ArrayList<IAType>();
int keyIndex = 0;
for (Pair<List<String>, TypeExpression> fieldExpr : stmtCreateIndex.getFieldExprs()) {
IAType fieldType = null;
ARecordType subType = KeyFieldTypeUtils.chooseSource(keySourceIndicators, keyIndex, aRecordType,
metaRecordType);
boolean isOpen = subType.isOpen();
int i = 0;
if (fieldExpr.first.size() > 1 && !isOpen) {
for (; i < fieldExpr.first.size() - 1;) {
subType = (ARecordType) subType.getFieldType(fieldExpr.first.get(i));
i++;
if (subType.isOpen()) {
isOpen = true;
break;
} ;
}
}
if (fieldExpr.second == null) {
fieldType = subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size()));
} else {
if (!stmtCreateIndex.isEnforced()) {
throw new AlgebricksException("Cannot create typed index on \"" + fieldExpr.first
+ "\" field without enforcing it's type");
}
if (!isOpen) {
throw new AlgebricksException("Typed index on \"" + fieldExpr.first
+ "\" field could be created only for open datatype");
}
Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, fieldExpr.second,
indexName, dataverseName);
TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
fieldType = typeMap.get(typeSignature);
}
if (fieldType == null) {
throw new AlgebricksException(
"Unknown type " + (fieldExpr.second == null ? fieldExpr.first : fieldExpr.second));
}
indexFields.add(fieldExpr.first);
indexFieldTypes.add(fieldType);
++keyIndex;
}
ValidateUtil.validateKeyFields(aRecordType, metaRecordType, indexFields, keySourceIndicators,
indexFieldTypes, stmtCreateIndex.getIndexType());
if (idx != null) {
if (stmtCreateIndex.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("An index with this name " + indexName + " already exists.");
}
}
// Checks whether a user is trying to create an inverted secondary index on a dataset
// with a variable-length primary key.
// Currently, we do not support this. Therefore, as a temporary solution, we print an
// error message and stop.
if (stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
|| stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX
|| stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
|| stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(ds);
for (List<String> partitioningKey : partitioningKeys) {
IAType keyType = aRecordType.getSubFieldType(partitioningKey);
ITypeTraits typeTrait = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
// If it is not a fixed length
if (typeTrait.getFixedLength() < 0) {
throw new AlgebricksException("The keyword or ngram index -" + indexName
+ " cannot be created on the dataset -" + datasetName
+ " due to its variable-length primary key field - " + partitioningKey);
}
}
}
if (ds.getDatasetType() == DatasetType.INTERNAL) {
validateIfResourceIsActiveInFeed(dataverseName, datasetName);
} else {
// External dataset
// Check if the dataset is indexible
if (!ExternalIndexingOperations.isIndexible((ExternalDatasetDetails) ds.getDatasetDetails())) {
throw new AlgebricksException(
"dataset using " + ((ExternalDatasetDetails) ds.getDatasetDetails()).getAdapter()
+ " Adapter can't be indexed");
}
// Check if the name of the index is valid
if (!ExternalIndexingOperations.isValidIndexName(datasetName, indexName)) {
throw new AlgebricksException("external dataset index name is invalid");
}
// Check if the files index exist
filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
firstExternalDatasetIndex = (filesIndex == null);
// Lock external dataset
ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
datasetLocked = true;
if (firstExternalDatasetIndex) {
// Verify that no one has created an index before we acquire the lock
filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
dataverseName, datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
if (filesIndex != null) {
ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
firstExternalDatasetIndex = false;
ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
}
}
if (firstExternalDatasetIndex) {
// Get snapshot from External File System
externalFilesSnapshot = ExternalIndexingOperations.getSnapshotFromExternalFileSystem(ds);
// Add an entry for the files index
filesIndex = new Index(dataverseName, datasetName,
ExternalIndexingOperations.getFilesIndexName(datasetName), IndexType.BTREE,
ExternalIndexingOperations.FILE_INDEX_FIELD_NAMES, null,
ExternalIndexingOperations.FILE_INDEX_FIELD_TYPES, false, false,
IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
// Add files to the external files index
for (ExternalFile file : externalFilesSnapshot) {
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
}
// This is the first index for the external dataset, replicate the files index
spec = ExternalIndexingOperations.buildFilesIndexReplicationJobSpec(ds, externalFilesSnapshot,
metadataProvider, true);
if (spec == null) {
throw new AsterixException(
"Failed to create job spec for replicating Files Index For external dataset");
}
filesIndexReplicated = true;
JobUtils.runJob(hcc, spec, true);
}
}
// check whether there exists another enforced index on the same field
if (stmtCreateIndex.isEnforced()) {
List<Index> indexes = MetadataManager.INSTANCE
.getDatasetIndexes(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
for (Index index : indexes) {
if (index.getKeyFieldNames().equals(indexFields)
&& !index.getKeyFieldTypes().equals(indexFieldTypes) && index.isEnforcingKeyFileds()) {
throw new AsterixException("Cannot create index " + indexName + " , enforced index "
+ index.getIndexName() + " on field \"" + StringUtils.join(indexFields, ',')
+ "\" is already defined with type \"" + index.getKeyFieldTypes() + "\"");
}
}
}
// #. add a new index with PendingAddOp
Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(), indexFields,
keySourceIndicators, indexFieldTypes, stmtCreateIndex.getGramLength(), stmtCreateIndex.isEnforced(),
false, IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
ARecordType enforcedType = null;
if (stmtCreateIndex.isEnforced()) {
enforcedType = IntroduceSecondaryIndexInsertDeleteRule.createEnforcedType(aRecordType,
Lists.newArrayList(index));
}
// #. prepare to create the index artifact in NC.
CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
index.getDatasetName(), index.getKeyFieldNames(), index.getKeyFieldTypes(),
index.isEnforcingKeyFileds(), index.getGramLength(), index.getIndexType());
spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, aRecordType, enforcedType, metadataProvider);
if (spec == null) {
throw new AsterixException("Failed to create job spec for creating index '"
+ stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
// #. create the index artifact in NC.
JobUtils.runJob(hcc, spec, true);
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. load data into the index in NC.
cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName, index.getDatasetName(),
index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(),
index.getGramLength(), index.getIndexType());
spec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, aRecordType, enforcedType, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, spec, true);
// #. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. add another new index with PendingNoOp after deleting the index with PendingAddOp
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
indexName);
index.setPendingOp(IMetadataEntity.PENDING_NO_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
// add another new files index with PendingNoOp after deleting the index with
// PendingAddOp
if (firstExternalDatasetIndex) {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
filesIndex.getIndexName());
filesIndex.setPendingOp(IMetadataEntity.PENDING_NO_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
// update transaction timestamp
((ExternalDatasetDetails) ds.getDatasetDetails()).setRefreshTimestamp(new Date());
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
// If files index was replicated for external dataset, it should be cleaned up on NC side
if (filesIndexReplicated) {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
ExternalIndexingOperations.getFilesIndexName(datasetName));
try {
JobSpecification jobSpec = ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
metadataProvider, ds);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, jobSpec, true);
} catch (Exception e2) {
e.addSuppressed(e2);
if (bActiveTxn) {
abort(e, e2, mdTxnCtx);
}
}
}
if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
// #. execute compensation operations
// remove the index in NC
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
try {
JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
ds);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, jobSpec, true);
} catch (Exception e2) {
e.addSuppressed(e2);
if (bActiveTxn) {
abort(e, e2, mdTxnCtx);
}
}
if (firstExternalDatasetIndex) {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
// Drop External Files from metadata
MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending files for("
+ dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
// Drop the files index from metadata
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName
+ "." + datasetName + "." + ExternalIndexingOperations.getFilesIndexName(datasetName)
+ ") couldn't be removed from the metadata", e);
}
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, indexName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is in inconsistent state: pending index(" + dataverseName
+ "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
MetadataLockManager.INSTANCE.createIndexEnd(dataverseName, dataverseName + "." + datasetName);
if (datasetLocked) {
ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
}
}
}
private void handleCreateTypeStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
TypeDecl stmtCreateType = (TypeDecl) stmt;
String dataverseName = getActiveDataverse(stmtCreateType.getDataverseName());
String typeName = stmtCreateType.getIdent().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.createTypeBegin(dataverseName, dataverseName + "." + typeName);
try {
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
if (dv == null) {
throw new AlgebricksException("Unknown dataverse " + dataverseName);
}
Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
if (dt != null) {
if (!stmtCreateType.getIfNotExists()) {
throw new AlgebricksException("A datatype with this name " + typeName + " already exists.");
}
} else {
if (builtinTypeMap.get(typeName) != null) {
throw new AlgebricksException("Cannot redefine builtin type " + typeName + ".");
} else {
Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx,
stmtCreateType.getTypeDef(), stmtCreateType.getIdent().getValue(), dataverseName);
TypeSignature typeSignature = new TypeSignature(dataverseName, typeName);
IAType type = typeMap.get(typeSignature);
MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, typeName, type, false));
}
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
MetadataLockManager.INSTANCE.createTypeEnd(dataverseName, dataverseName + "." + typeName);
}
}
private void handleDataverseDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
String dataverseName = stmtDelete.getDataverseName().getValue();
ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.acquireDataverseWriteLock(dataverseName);
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
if (dv == null) {
if (stmtDelete.getIfExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
}
}
// # disconnect all feeds from any datasets in the dataverse.
List<FeedConnectionId> activeFeedConnections = FeedLifecycleListener.INSTANCE
.getActiveFeedConnections(null);
DisconnectFeedStatement disStmt = null;
Identifier dvId = new Identifier(dataverseName);
for (FeedConnectionId connection : activeFeedConnections) {
FeedId feedId = connection.getFeedId();
if (feedId.getDataverse().equals(dataverseName)) {
disStmt = new DisconnectFeedStatement(dvId, new Identifier(feedId.getFeedName()),
new Identifier(connection.getDatasetName()));
try {
handleDisconnectFeedStatement(metadataProvider, disStmt, hcc);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Disconnected feed " + feedId.getFeedName() + " from dataset "
+ connection.getDatasetName());
}
} catch (Exception exception) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Unable to disconnect feed " + feedId.getFeedName() + " from dataset "
+ connection.getDatasetName() + ". Encountered exception " + exception);
}
}
// prepare job to remove feed log storage
jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(
MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, feedId.getFeedName())));
}
}
// #. prepare jobs which will drop corresponding datasets with indexes.
List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
for (int j = 0; j < datasets.size(); j++) {
String datasetName = datasets.get(j).getDatasetName();
DatasetType dsType = datasets.get(j).getDatasetType();
if (dsType == DatasetType.INTERNAL) {
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
datasetName);
for (int k = 0; k < indexes.size(); k++) {
if (indexes.get(k).isSecondaryIndex()) {
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexes.get(k).getIndexName());
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
datasets.get(j)));
}
}
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
} else {
// External dataset
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
datasetName);
for (int k = 0; k < indexes.size(); k++) {
if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexes.get(k).getIndexName());
jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
metadataProvider, datasets.get(j)));
} else {
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexes.get(k).getIndexName());
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
datasets.get(j)));
}
}
ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(datasets.get(j));
}
}
jobsToExecute.add(DataverseOperations.createDropDataverseJobSpec(dv, metadataProvider));
// #. mark PendingDropOp on the dataverse record by
// first, deleting the dataverse record from the DATAVERSE_DATASET
// second, inserting the dataverse record with the PendingDropOp value into the
// DATAVERSE_DATASET
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
MetadataManager.INSTANCE.addDataverse(mdTxnCtx,
new Dataverse(dataverseName, dv.getDataFormat(), IMetadataEntity.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. finally, delete the dataverse.
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
activeDefaultDataverse = null;
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
activeDefaultDataverse = null;
}
// #. execute compensation operations
// remove the all indexes in NC
try {
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
} catch (Exception e2) {
// do no throw exception since still the metadata needs to be compensated.
e.addSuppressed(e2);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
try {
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName
+ ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
MetadataLockManager.INSTANCE.releaseDataverseWriteLock(dataverseName);
}
}
private void handleDatasetDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
DropStatement stmtDelete = (DropStatement) stmt;
String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
String datasetName = stmtDelete.getDatasetName().getValue();
ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.dropDatasetBegin(dataverseName, dataverseName + "." + datasetName);
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
if (ds == null) {
if (stmtDelete.getIfExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse "
+ dataverseName + ".");
}
}
Map<FeedConnectionId, Pair<JobSpecification, Boolean>> disconnectJobList = new HashMap<FeedConnectionId, Pair<JobSpecification, Boolean>>();
if (ds.getDatasetType() == DatasetType.INTERNAL) {
// prepare job spec(s) that would disconnect any active feeds involving the dataset.
List<FeedConnectionId> feedConnections = FeedLifecycleListener.INSTANCE.getActiveFeedConnections(null);
if (feedConnections != null && !feedConnections.isEmpty()) {
for (FeedConnectionId connection : feedConnections) {
Pair<JobSpecification, Boolean> p = FeedOperations.buildDisconnectFeedJobSpec(metadataProvider,
connection);
disconnectJobList.put(connection, p);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Disconnecting feed " + connection.getFeedId().getFeedName() + " from dataset "
+ datasetName + " as dataset is being dropped");
}
// prepare job to remove feed log storage
jobsToExecute
.add(FeedOperations.buildRemoveFeedStorageJob(MetadataManager.INSTANCE.getFeed(mdTxnCtx,
connection.getFeedId().getDataverse(), connection.getFeedId().getFeedName())));
}
}
// #. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
for (int j = 0; j < indexes.size(); j++) {
if (indexes.get(j).isSecondaryIndex()) {
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexes.get(j).getIndexName());
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
}
}
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
// #. mark the existing dataset as PendingDropOp
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(mdTxnCtx,
new Dataset(dataverseName, datasetName, ds.getItemTypeDataverseName(), ds.getItemTypeName(),
ds.getMetaItemTypeDataverseName(), ds.getMetaItemTypeName(), ds.getNodeGroupName(),
ds.getCompactionPolicy(), ds.getCompactionPolicyProperties(), ds.getDatasetDetails(),
ds.getHints(), ds.getDatasetType(), ds.getDatasetId(),
IMetadataEntity.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
// # disconnect the feeds
for (Pair<JobSpecification, Boolean> p : disconnectJobList.values()) {
JobUtils.runJob(hcc, p.first, true);
}
// #. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
} else {
// External dataset
ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
// #. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
for (int j = 0; j < indexes.size(); j++) {
if (ExternalIndexingOperations.isFileIndex(indexes.get(j))) {
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexes.get(j).getIndexName());
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
} else {
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexes.get(j).getIndexName());
jobsToExecute
.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds, metadataProvider, ds));
}
}
// #. mark the existing dataset as PendingDropOp
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(mdTxnCtx,
new Dataset(dataverseName, datasetName, ds.getItemTypeDataverseName(), ds.getItemTypeName(),
ds.getNodeGroupName(), ds.getCompactionPolicy(), ds.getCompactionPolicyProperties(),
ds.getDatasetDetails(), ds.getHints(), ds.getDatasetType(), ds.getDatasetId(),
IMetadataEntity.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
// #. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
if (indexes.size() > 0) {
ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
// #. finally, delete the dataset.
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
// Drop the associated nodegroup
String nodegroup = ds.getNodeGroupName();
if (!nodegroup.equalsIgnoreCase(MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME)) {
MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, dataverseName + ":" + datasetName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
// #. execute compensation operations
// remove the all indexes in NC
try {
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
} catch (Exception e2) {
// do no throw exception since still the metadata needs to be compensated.
e.addSuppressed(e2);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
+ "." + datasetName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
MetadataLockManager.INSTANCE.dropDatasetEnd(dataverseName, dataverseName + "." + datasetName);
}
}
private void handleIndexDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
String datasetName = stmtIndexDrop.getDatasetName().getValue();
String dataverseName = getActiveDataverse(stmtIndexDrop.getDataverseName());
ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.dropIndexBegin(dataverseName, dataverseName + "." + datasetName);
String indexName = null;
// For external index
boolean dropFilesIndex = false;
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
if (ds == null) {
throw new AlgebricksException(
"There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
}
List<FeedConnectionId> feedConnections = FeedLifecycleListener.INSTANCE.getActiveFeedConnections(null);
boolean resourceInUse = false;
if (feedConnections != null && !feedConnections.isEmpty()) {
StringBuilder builder = new StringBuilder();
for (FeedConnectionId connection : feedConnections) {
if (connection.getDatasetName().equals(datasetName)) {
resourceInUse = true;
builder.append(connection + "\n");
}
}
if (resourceInUse) {
throw new AsterixException(
"Dataset" + datasetName + " is currently being fed into by the following feeds " + "."
+ builder.toString() + "\nOperation not supported.");
}
}
if (ds.getDatasetType() == DatasetType.INTERNAL) {
indexName = stmtIndexDrop.getIndexName().getValue();
Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
if (index == null) {
if (stmtIndexDrop.getIfExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("There is no index with this name " + indexName + ".");
}
}
// #. prepare a job to drop the index in NC.
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
// #. mark PendingDropOp on the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
MetadataManager.INSTANCE.addIndex(mdTxnCtx,
new Index(dataverseName, datasetName, indexName, index.getIndexType(), index.getKeyFieldNames(),
index.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(),
index.isEnforcingKeyFileds(), index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
// #. commit the existing transaction before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
// #. begin a new transaction
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. finally, delete the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
} else {
// External dataset
indexName = stmtIndexDrop.getIndexName().getValue();
Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
if (index == null) {
if (stmtIndexDrop.getIfExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("There is no index with this name " + indexName + ".");
}
} else if (ExternalIndexingOperations.isFileIndex(index)) {
throw new AlgebricksException("Dropping a dataset's files index is not allowed.");
}
// #. prepare a job to drop the index in NC.
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
datasetName);
if (datasetIndexes.size() == 2) {
dropFilesIndex = true;
// only one index + the files index, we need to delete both of the indexes
for (Index externalIndex : datasetIndexes) {
if (ExternalIndexingOperations.isFileIndex(externalIndex)) {
cds = new CompiledIndexDropStatement(dataverseName, datasetName,
externalIndex.getIndexName());
jobsToExecute.add(
ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds, metadataProvider, ds));
// #. mark PendingDropOp on the existing files index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName,
externalIndex.getIndexName());
MetadataManager.INSTANCE.addIndex(mdTxnCtx,
new Index(dataverseName, datasetName, externalIndex.getIndexName(),
externalIndex.getIndexType(), externalIndex.getKeyFieldNames(),
externalIndex.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(),
index.isEnforcingKeyFileds(), externalIndex.isPrimaryIndex(),
IMetadataEntity.PENDING_DROP_OP));
}
}
}
// #. mark PendingDropOp on the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
MetadataManager.INSTANCE.addIndex(mdTxnCtx,
new Index(dataverseName, datasetName, indexName, index.getIndexType(), index.getKeyFieldNames(),
index.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(),
index.isEnforcingKeyFileds(), index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
// #. commit the existing transaction before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
// #. begin a new transaction
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. finally, delete the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
if (dropFilesIndex) {
// delete the files index too
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName,
ExternalIndexingOperations.getFilesIndexName(datasetName));
MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
}
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
// #. execute compensation operations
// remove the all indexes in NC
try {
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
} catch (Exception e2) {
// do no throw exception since still the metadata needs to be compensated.
e.addSuppressed(e2);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, indexName);
if (dropFilesIndex) {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName + "."
+ datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
MetadataLockManager.INSTANCE.dropIndexEnd(dataverseName, dataverseName + "." + datasetName);
}
}
private void handleTypeDropStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
TypeDropStatement stmtTypeDrop = (TypeDropStatement) stmt;
String dataverseName = getActiveDataverse(stmtTypeDrop.getDataverseName());
String typeName = stmtTypeDrop.getTypeName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.dropTypeBegin(dataverseName, dataverseName + "." + typeName);
try {
Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
if (dt == null) {
if (!stmtTypeDrop.getIfExists()) {
throw new AlgebricksException("There is no datatype with this name " + typeName + ".");
}
} else {
MetadataManager.INSTANCE.dropDatatype(mdTxnCtx, dataverseName, typeName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
MetadataLockManager.INSTANCE.dropTypeEnd(dataverseName, dataverseName + "." + typeName);
}
}
private void handleNodegroupDropStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
NodeGroupDropStatement stmtDelete = (NodeGroupDropStatement) stmt;
String nodegroupName = stmtDelete.getNodeGroupName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(nodegroupName);
try {
NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodegroupName);
if (ng == null) {
if (!stmtDelete.getIfExists()) {
throw new AlgebricksException("There is no nodegroup with this name " + nodegroupName + ".");
}
} else {
MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodegroupName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
MetadataLockManager.INSTANCE.releaseNodeGroupWriteLock(nodegroupName);
}
}
private void handleCreateFunctionStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
CreateFunctionStatement cfs = (CreateFunctionStatement) stmt;
String dataverse = getActiveDataverseName(cfs.getSignature().getNamespace());
String functionName = cfs.getaAterixFunction().getName();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.functionStatementBegin(dataverse, dataverse + "." + functionName);
try {
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
if (dv == null) {
throw new AlgebricksException("There is no dataverse with this name " + dataverse + ".");
}
Function function = new Function(dataverse, functionName, cfs.getaAterixFunction().getArity(),
cfs.getParamList(), Function.RETURNTYPE_VOID, cfs.getFunctionBody(), Function.LANGUAGE_AQL,
FunctionKind.SCALAR.toString());
MetadataManager.INSTANCE.addFunction(mdTxnCtx, function);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
MetadataLockManager.INSTANCE.functionStatementEnd(dataverse, dataverse + "." + functionName);
}
}
private void handleFunctionDropStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
FunctionDropStatement stmtDropFunction = (FunctionDropStatement) stmt;
FunctionSignature signature = stmtDropFunction.getFunctionSignature();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.functionStatementBegin(signature.getNamespace(),
signature.getNamespace() + "." + signature.getName());
try {
Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
if (function == null) {
if (!stmtDropFunction.getIfExists()) {
throw new AlgebricksException("Unknonw function " + signature);
}
} else {
MetadataManager.INSTANCE.dropFunction(mdTxnCtx, signature);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
MetadataLockManager.INSTANCE.functionStatementEnd(signature.getNamespace(),
signature.getNamespace() + "." + signature.getName());
}
}
private void handleLoadStatement(AqlMetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc)
throws Exception {
LoadStatement loadStmt = (LoadStatement) stmt;
String dataverseName = getActiveDataverse(loadStmt.getDataverseName());
String datasetName = loadStmt.getDatasetName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.modifyDatasetBegin(dataverseName, dataverseName + "." + datasetName);
try {
CompiledLoadFromFileStatement cls = new CompiledLoadFromFileStatement(dataverseName,
loadStmt.getDatasetName().getValue(), loadStmt.getAdapter(), loadStmt.getProperties(),
loadStmt.dataIsAlreadySorted());
JobSpecification spec = apiFramework.compileQuery(null, metadataProvider, null, 0, null, sessionConfig,
cls);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (spec != null) {
JobUtils.runJob(hcc, spec, true);
}
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
MetadataLockManager.INSTANCE.modifyDatasetEnd(dataverseName, dataverseName + "." + datasetName);
}
}
private void handleInsertUpsertStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
InsertStatement stmtInsertUpsert = (InsertStatement) stmt;
String dataverseName = getActiveDataverse(stmtInsertUpsert.getDataverseName());
Query query = stmtInsertUpsert.getQuery();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(dataverseName,
dataverseName + "." + stmtInsertUpsert.getDatasetName(), query.getDataverses(), query.getDatasets());
try {
metadataProvider.setWriteTransaction(true);
CompiledInsertStatement clfrqs = null;
switch (stmtInsertUpsert.getKind()) {
case INSERT:
clfrqs = new CompiledInsertStatement(dataverseName, stmtInsertUpsert.getDatasetName().getValue(),
query, stmtInsertUpsert.getVarCounter());
break;
case UPSERT:
clfrqs = new CompiledUpsertStatement(dataverseName, stmtInsertUpsert.getDatasetName().getValue(),
query, stmtInsertUpsert.getVarCounter());
break;
default:
throw new AlgebricksException("Unsupported statement type " + stmtInsertUpsert.getKind());
}
JobSpecification compiled = rewriteCompileQuery(metadataProvider, query, clfrqs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
JobUtils.runJob(hcc, compiled, true);
}
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
MetadataLockManager.INSTANCE.insertDeleteUpsertEnd(dataverseName,
dataverseName + "." + stmtInsertUpsert.getDatasetName(), query.getDataverses(),
query.getDatasets());
}
}
private void handleDeleteStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
DeleteStatement stmtDelete = (DeleteStatement) stmt;
String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(dataverseName,
dataverseName + "." + stmtDelete.getDatasetName(), stmtDelete.getDataverses(),
stmtDelete.getDatasets());
try {
metadataProvider.setWriteTransaction(true);
CompiledDeleteStatement clfrqs = new CompiledDeleteStatement(stmtDelete.getVariableExpr(), dataverseName,
stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getVarCounter(),
stmtDelete.getQuery());
JobSpecification compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
JobUtils.runJob(hcc, compiled, true);
}
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
MetadataLockManager.INSTANCE.insertDeleteUpsertEnd(dataverseName,
dataverseName + "." + stmtDelete.getDatasetName(), stmtDelete.getDataverses(),
stmtDelete.getDatasets());
}
}
private JobSpecification rewriteCompileQuery(AqlMetadataProvider metadataProvider, Query query,
ICompiledDmlStatement stmt)
throws AsterixException, RemoteException, AlgebricksException, JSONException, ACIDException {
// Query Rewriting (happens under the same ongoing metadata transaction)
Pair<Query, Integer> reWrittenQuery = apiFramework.reWriteQuery(declaredFunctions, metadataProvider, query,
sessionConfig);
// Query Compilation (happens under the same ongoing metadata transaction)
JobSpecification spec = apiFramework.compileQuery(declaredFunctions, metadataProvider, reWrittenQuery.first,
reWrittenQuery.second, stmt == null ? null : stmt.getDatasetName(), sessionConfig, stmt);
return spec;
}
private void handleCreateFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
CreateFeedStatement cfs = (CreateFeedStatement) stmt;
String dataverseName = getActiveDataverse(cfs.getDataverseName());
String feedName = cfs.getFeedName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.createFeedBegin(dataverseName, dataverseName + "." + feedName);
Feed feed = null;
try {
feed = MetadataManager.INSTANCE.getFeed(metadataProvider.getMetadataTxnContext(), dataverseName, feedName);
if (feed != null) {
if (cfs.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("A feed with this name " + feedName + " already exists.");
}
}
switch (stmt.getKind()) {
case CREATE_PRIMARY_FEED:
CreatePrimaryFeedStatement cpfs = (CreatePrimaryFeedStatement) stmt;
String adaptorName = cpfs.getAdaptorName();
feed = new Feed(dataverseName, feedName, cfs.getAppliedFunction(), FeedType.PRIMARY, feedName,
adaptorName, cpfs.getAdaptorConfiguration());
break;
case CREATE_SECONDARY_FEED:
CreateSecondaryFeedStatement csfs = (CreateSecondaryFeedStatement) stmt;
feed = new Feed(dataverseName, feedName, csfs.getAppliedFunction(), FeedType.SECONDARY,
csfs.getSourceFeedName(), null, null);
break;
default:
throw new IllegalStateException();
}
MetadataManager.INSTANCE.addFeed(metadataProvider.getMetadataTxnContext(), feed);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
MetadataLockManager.INSTANCE.createFeedEnd(dataverseName, dataverseName + "." + feedName);
}
}
private void handleCreateFeedPolicyStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
String dataverse;
String policy;
FeedPolicyEntity newPolicy = null;
CreateFeedPolicyStatement cfps = (CreateFeedPolicyStatement) stmt;
dataverse = getActiveDataverse(null);
policy = cfps.getPolicyName();
MetadataLockManager.INSTANCE.createFeedPolicyBegin(dataverse, dataverse + "." + policy);
try {
FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE
.getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, policy);
if (feedPolicy != null) {
if (cfps.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("A policy with this name " + policy + " already exists.");
}
}
boolean extendingExisting = cfps.getSourcePolicyName() != null;
String description = cfps.getDescription() == null ? "" : cfps.getDescription();
if (extendingExisting) {
FeedPolicyEntity sourceFeedPolicy = MetadataManager.INSTANCE
.getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, cfps.getSourcePolicyName());
if (sourceFeedPolicy == null) {
sourceFeedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(),
MetadataConstants.METADATA_DATAVERSE_NAME, cfps.getSourcePolicyName());
if (sourceFeedPolicy == null) {
throw new AlgebricksException("Unknown policy " + cfps.getSourcePolicyName());
}
}
Map<String, String> policyProperties = sourceFeedPolicy.getProperties();
policyProperties.putAll(cfps.getProperties());
newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
} else {
Properties prop = new Properties();
try {
InputStream stream = new FileInputStream(cfps.getSourcePolicyFile());
prop.load(stream);
} catch (Exception e) {
throw new AlgebricksException("Unable to read policy file" + cfps.getSourcePolicyFile());
}
Map<String, String> policyProperties = new HashMap<String, String>();
for (Entry<Object, Object> entry : prop.entrySet()) {
policyProperties.put((String) entry.getKey(), (String) entry.getValue());
}
newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
}
MetadataManager.INSTANCE.addFeedPolicy(mdTxnCtx, newPolicy);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
MetadataLockManager.INSTANCE.createFeedPolicyEnd(dataverse, dataverse + "." + policy);
}
}
private void handleDropFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
FeedDropStatement stmtFeedDrop = (FeedDropStatement) stmt;
String dataverseName = getActiveDataverse(stmtFeedDrop.getDataverseName());
String feedName = stmtFeedDrop.getFeedName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.dropFeedBegin(dataverseName, dataverseName + "." + feedName);
try {
Feed feed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, feedName);
if (feed == null) {
if (!stmtFeedDrop.getIfExists()) {
throw new AlgebricksException("There is no feed with this name " + feedName + ".");
}
}
FeedId feedId = new FeedId(dataverseName, feedName);
List<FeedConnectionId> activeConnections = FeedLifecycleListener.INSTANCE.getActiveFeedConnections(feedId);
if (activeConnections != null && !activeConnections.isEmpty()) {
StringBuilder builder = new StringBuilder();
for (FeedConnectionId connectionId : activeConnections) {
builder.append(connectionId.getDatasetName() + "\n");
}
throw new AlgebricksException("Feed " + feedId
+ " is currently active and connected to the following dataset(s) \n" + builder.toString());
} else {
JobSpecification spec = FeedOperations.buildRemoveFeedStorageJob(
MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverse(), feedId.getFeedName()));
JobUtils.runJob(hcc, spec, true);
MetadataManager.INSTANCE.dropFeed(mdTxnCtx, dataverseName, feedName);
}
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Removed feed " + feedId);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
MetadataLockManager.INSTANCE.dropFeedEnd(dataverseName, dataverseName + "." + feedName);
}
}
private void handleDropFeedPolicyStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
FeedPolicyDropStatement stmtFeedPolicyDrop = (FeedPolicyDropStatement) stmt;
String dataverseName = getActiveDataverse(stmtFeedPolicyDrop.getDataverseName());
String policyName = stmtFeedPolicyDrop.getPolicyName().getValue();
MetadataLockManager.INSTANCE.dropFeedPolicyBegin(dataverseName, dataverseName + "." + policyName);
try {
FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverseName, policyName);
if (feedPolicy == null) {
if (!stmtFeedPolicyDrop.getIfExists()) {
throw new AlgebricksException("Unknown policy " + policyName + " in dataverse " + dataverseName);
}
}
MetadataManager.INSTANCE.dropFeedPolicy(mdTxnCtx, dataverseName, policyName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
MetadataLockManager.INSTANCE.dropFeedPolicyEnd(dataverseName, dataverseName + "." + policyName);
}
}
private void handleConnectFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
ConnectFeedStatement cfs = (ConnectFeedStatement) stmt;
String dataverseName = getActiveDataverse(cfs.getDataverseName());
String feedName = cfs.getFeedName();
String datasetName = cfs.getDatasetName().getValue();
boolean bActiveTxn = true;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
boolean subscriberRegistered = false;
IFeedLifecycleEventSubscriber eventSubscriber = new FeedLifecycleEventSubscriber();
FeedConnectionId feedConnId = null;
MetadataLockManager.INSTANCE.connectFeedBegin(dataverseName, dataverseName + "." + datasetName,
dataverseName + "." + feedName);
try {
metadataProvider.setWriteTransaction(true);
CompiledConnectFeedStatement cbfs = new CompiledConnectFeedStatement(dataverseName, cfs.getFeedName(),
cfs.getDatasetName().getValue(), cfs.getPolicy(), cfs.getQuery(), cfs.getVarCounter());
FeedMetadataUtil.validateIfDatasetExists(dataverseName, cfs.getDatasetName().getValue(),
metadataProvider.getMetadataTxnContext());
Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, cfs.getFeedName(),
metadataProvider.getMetadataTxnContext());
feedConnId = new FeedConnectionId(dataverseName, cfs.getFeedName(), cfs.getDatasetName().getValue());
subscriberRegistered = FeedLifecycleListener.INSTANCE.isFeedConnectionActive(feedConnId, eventSubscriber);
if (subscriberRegistered) {
throw new AsterixException("Feed " + cfs.getFeedName() + " is already connected to dataset "
+ cfs.getDatasetName().getValue());
}
FeedPolicyEntity feedPolicy = FeedMetadataUtil.validateIfPolicyExists(dataverseName, cbfs.getPolicyName(),
mdTxnCtx);
// All Metadata checks have passed. Feed connect request is valid. //
FeedPolicyAccessor policyAccessor = new FeedPolicyAccessor(feedPolicy.getProperties());
Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>> triple = getFeedConnectionRequest(dataverseName,
feed, cbfs.getDatasetName(), feedPolicy, mdTxnCtx);
FeedConnectionRequest connectionRequest = triple.first;
boolean createFeedIntakeJob = triple.second;
FeedLifecycleListener.INSTANCE.registerFeedEventSubscriber(feedConnId, eventSubscriber);
subscriberRegistered = true;
if (createFeedIntakeJob) {
FeedId feedId = connectionRequest.getFeedJointKey().getFeedId();
Feed primaryFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverse(),
feedId.getFeedName());
Pair<JobSpecification, IAdapterFactory> pair = FeedOperations.buildFeedIntakeJobSpec(primaryFeed,
metadataProvider, policyAccessor);
// adapter configuration are valid at this stage
// register the feed joints (these are auto-de-registered)
int numOfPrividers = pair.second.getPartitionConstraint().getLocations().length;
for (IFeedJoint fj : triple.third) {
FeedLifecycleListener.INSTANCE.registerFeedJoint(fj, numOfPrividers);
}
JobUtils.runJob(hcc, pair.first, false);
/*
* TODO: Fix record tracking
* IFeedAdapterFactory adapterFactory = pair.second;
* if (adapterFactory.isRecordTrackingEnabled()) {
* FeedLifecycleListener.INSTANCE.registerFeedIntakeProgressTracker(feedConnId,
* adapterFactory.createIntakeProgressTracker());
* }
*/
eventSubscriber.assertEvent(FeedLifecycleEvent.FEED_INTAKE_STARTED);
} else {
for (IFeedJoint fj : triple.third) {
FeedLifecycleListener.INSTANCE.registerFeedJoint(fj, 0);
}
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
eventSubscriber.assertEvent(FeedLifecycleEvent.FEED_COLLECT_STARTED);
if (Boolean.valueOf(metadataProvider.getConfig().get(ConnectFeedStatement.WAIT_FOR_COMPLETION))) {
eventSubscriber.assertEvent(FeedLifecycleEvent.FEED_COLLECT_ENDED); // blocking call
}
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
MetadataLockManager.INSTANCE.connectFeedEnd(dataverseName, dataverseName + "." + datasetName,
dataverseName + "." + feedName);
if (subscriberRegistered) {
FeedLifecycleListener.INSTANCE.deregisterFeedEventSubscriber(feedConnId, eventSubscriber);
}
}
}
/**
* Generates a subscription request corresponding to a connect feed request. In addition, provides a boolean
* flag indicating if feed intake job needs to be started (source primary feed not found to be active).
*
* @param dataverse
* @param feed
* @param dataset
* @param feedPolicy
* @param mdTxnCtx
* @return
* @throws MetadataException
*/
private Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>> getFeedConnectionRequest(String dataverse,
Feed feed, String dataset, FeedPolicyEntity feedPolicy, MetadataTransactionContext mdTxnCtx)
throws MetadataException {
IFeedJoint sourceFeedJoint = null;
FeedConnectionRequest request = null;
List<String> functionsToApply = new ArrayList<String>();
boolean needIntakeJob = false;
List<IFeedJoint> jointsToRegister = new ArrayList<IFeedJoint>();
FeedConnectionId connectionId = new FeedConnectionId(feed.getFeedId(), dataset);
FeedRuntimeType connectionLocation = null;
FeedJointKey feedJointKey = getFeedJointKey(feed, mdTxnCtx);
boolean isFeedJointAvailable = FeedLifecycleListener.INSTANCE.isFeedJointAvailable(feedJointKey);
if (!isFeedJointAvailable) {
sourceFeedJoint = FeedLifecycleListener.INSTANCE.getAvailableFeedJoint(feedJointKey);
if (sourceFeedJoint == null) { // the feed is currently not being ingested, i.e., it is unavailable.
connectionLocation = FeedRuntimeType.INTAKE;
FeedId sourceFeedId = feedJointKey.getFeedId(); // the root/primary feedId
Feed primaryFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverse, sourceFeedId.getFeedName());
FeedJointKey intakeFeedJointKey = new FeedJointKey(sourceFeedId, new ArrayList<String>());
sourceFeedJoint = new FeedJoint(intakeFeedJointKey, primaryFeed.getFeedId(), connectionLocation,
FeedJointType.INTAKE, connectionId);
jointsToRegister.add(sourceFeedJoint);
needIntakeJob = true;
} else {
connectionLocation = sourceFeedJoint.getConnectionLocation();
}
String[] functions = feedJointKey.getStringRep()
.substring(sourceFeedJoint.getFeedJointKey().getStringRep().length()).trim().split(":");
for (String f : functions) {
if (f.trim().length() > 0) {
functionsToApply.add(f);
}
}
// register the compute feed point that represents the final output from the collection of
// functions that will be applied.
if (!functionsToApply.isEmpty()) {
FeedJointKey computeFeedJointKey = new FeedJointKey(feed.getFeedId(), functionsToApply);
IFeedJoint computeFeedJoint = new FeedJoint(computeFeedJointKey, feed.getFeedId(),
FeedRuntimeType.COMPUTE, FeedJointType.COMPUTE, connectionId);
jointsToRegister.add(computeFeedJoint);
}
} else {
sourceFeedJoint = FeedLifecycleListener.INSTANCE.getFeedJoint(feedJointKey);
connectionLocation = sourceFeedJoint.getConnectionLocation();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Feed joint " + sourceFeedJoint + " is available! need not apply any further computation");
}
}
request = new FeedConnectionRequest(sourceFeedJoint.getFeedJointKey(), connectionLocation, functionsToApply,
dataset, feedPolicy.getPolicyName(), feedPolicy.getProperties(), feed.getFeedId());
sourceFeedJoint.addConnectionRequest(request);
return new Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>>(request, needIntakeJob, jointsToRegister);
}
/*
* Gets the feed joint corresponding to the feed definition. Tuples constituting the feed are
* available at this feed joint.
*/
private FeedJointKey getFeedJointKey(Feed feed, MetadataTransactionContext ctx) throws MetadataException {
Feed sourceFeed = feed;
List<String> appliedFunctions = new ArrayList<String>();
while (sourceFeed.getFeedType().equals(IFeed.FeedType.SECONDARY)) {
if (sourceFeed.getAppliedFunction() != null) {
appliedFunctions.add(0, sourceFeed.getAppliedFunction().getName());
}
Feed parentFeed = MetadataManager.INSTANCE.getFeed(ctx, feed.getDataverseName(),
sourceFeed.getSourceFeedName());
sourceFeed = parentFeed;
}
if (sourceFeed.getAppliedFunction() != null) {
appliedFunctions.add(0, sourceFeed.getAppliedFunction().getName());
}
return new FeedJointKey(sourceFeed.getFeedId(), appliedFunctions);
}
private void handleDisconnectFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
DisconnectFeedStatement cfs = (DisconnectFeedStatement) stmt;
String dataverseName = getActiveDataverse(cfs.getDataverseName());
String datasetName = cfs.getDatasetName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
FeedMetadataUtil.validateIfDatasetExists(dataverseName, cfs.getDatasetName().getValue(), mdTxnCtx);
Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, cfs.getFeedName().getValue(), mdTxnCtx);
FeedConnectionId connectionId = new FeedConnectionId(feed.getFeedId(), cfs.getDatasetName().getValue());
IFeedLifecycleEventSubscriber eventSubscriber = new FeedLifecycleEventSubscriber();
boolean isFeedConnectionActive = FeedLifecycleListener.INSTANCE.isFeedConnectionActive(connectionId,
eventSubscriber);
if (!isFeedConnectionActive) {
throw new AsterixException("Feed " + feed.getFeedId().getFeedName() + " is currently not connected to "
+ cfs.getDatasetName().getValue() + ". Invalid operation!");
}
MetadataLockManager.INSTANCE.disconnectFeedBegin(dataverseName, dataverseName + "." + datasetName,
dataverseName + "." + cfs.getFeedName());
try {
Dataset dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(),
dataverseName, cfs.getDatasetName().getValue());
if (dataset == null) {
throw new AsterixException(
"Unknown dataset :" + cfs.getDatasetName().getValue() + " in dataverse " + dataverseName);
}
Pair<JobSpecification, Boolean> specDisconnectType = FeedOperations
.buildDisconnectFeedJobSpec(metadataProvider, connectionId);
JobSpecification jobSpec = specDisconnectType.first;
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, jobSpec, true);
if (!specDisconnectType.second) {
CentralFeedManager.getInstance().getFeedLoadManager().removeFeedActivity(connectionId);
FeedLifecycleListener.INSTANCE.reportPartialDisconnection(connectionId);
}
eventSubscriber.assertEvent(FeedLifecycleEvent.FEED_COLLECT_ENDED);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
MetadataLockManager.INSTANCE.disconnectFeedEnd(dataverseName, dataverseName + "." + datasetName,
dataverseName + "." + cfs.getFeedName());
}
}
private void handleSubscribeFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Subscriber Feed Statement :" + stmt);
}
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
metadataProvider.setWriteTransaction(true);
SubscribeFeedStatement bfs = (SubscribeFeedStatement) stmt;
bfs.initialize(metadataProvider.getMetadataTxnContext());
CompiledSubscribeFeedStatement csfs = new CompiledSubscribeFeedStatement(bfs.getSubscriptionRequest(),
bfs.getVarCounter());
metadataProvider.getConfig().put(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, "" + Boolean.TRUE);
metadataProvider.getConfig().put(FeedActivityDetails.FEED_POLICY_NAME, "" + bfs.getPolicy());
metadataProvider.getConfig().put(FeedActivityDetails.COLLECT_LOCATIONS,
StringUtils.join(bfs.getLocations(), ','));
JobSpecification compiled = rewriteCompileQuery(metadataProvider, bfs.getQuery(), csfs);
FeedConnectionId feedConnectionId = new FeedConnectionId(bfs.getSubscriptionRequest().getReceivingFeedId(),
bfs.getSubscriptionRequest().getTargetDataset());
String dataverse = feedConnectionId.getFeedId().getDataverse();
String dataset = feedConnectionId.getDatasetName();
MetadataLockManager.INSTANCE.subscribeFeedBegin(dataverse, dataverse + "." + dataset,
dataverse + "." + feedConnectionId.getFeedId().getFeedName());
try {
JobSpecification alteredJobSpec = FeedMetadataUtil.alterJobSpecificationForFeed(compiled, feedConnectionId,
bfs.getSubscriptionRequest().getPolicyParameters());
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (compiled != null) {
JobUtils.runJob(hcc, alteredJobSpec, false);
}
} catch (Exception e) {
e.printStackTrace();
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
MetadataLockManager.INSTANCE.subscribeFeedEnd(dataverse, dataverse + "." + dataset,
dataverse + "." + feedConnectionId.getFeedId().getFeedName());
}
}
private void handleCompactStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
CompactStatement compactStatement = (CompactStatement) stmt;
String dataverseName = getActiveDataverse(compactStatement.getDataverseName());
String datasetName = compactStatement.getDatasetName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.compactBegin(dataverseName, dataverseName + "." + datasetName);
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
if (ds == null) {
throw new AlgebricksException(
"There is no dataset with this name " + datasetName + " in dataverse " + dataverseName + ".");
}
String itemTypeName = ds.getItemTypeName();
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
ds.getItemTypeDataverseName(), itemTypeName);
// Prepare jobs to compact the datatset and its indexes
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
if (indexes.size() == 0) {
throw new AlgebricksException(
"Cannot compact the extrenal dataset " + datasetName + " because it has no indexes");
}
Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
dataverseName);
jobsToExecute.add(DatasetOperations.compactDatasetJobSpec(dataverse, datasetName, metadataProvider));
ARecordType aRecordType = (ARecordType) dt.getDatatype();
ARecordType enforcedType = IntroduceSecondaryIndexInsertDeleteRule.createEnforcedType(aRecordType, indexes);
if (ds.getDatasetType() == DatasetType.INTERNAL) {
for (int j = 0; j < indexes.size(); j++) {
if (indexes.get(j).isSecondaryIndex()) {
jobsToExecute
.add(DatasetOperations.compactDatasetJobSpec(dataverse, datasetName, metadataProvider));
}
}
} else {
for (int j = 0; j < indexes.size(); j++) {
if (!ExternalIndexingOperations.isFileIndex(indexes.get(j))) {
CompiledIndexCompactStatement cics = new CompiledIndexCompactStatement(dataverseName,
datasetName, indexes.get(j).getIndexName(), indexes.get(j).getKeyFieldNames(),
indexes.get(j).getKeyFieldTypes(), indexes.get(j).isEnforcingKeyFileds(),
indexes.get(j).getGramLength(), indexes.get(j).getIndexType());
jobsToExecute.add(IndexOperations.buildSecondaryIndexCompactJobSpec(cics, aRecordType,
enforcedType, metadataProvider, ds));
}
}
jobsToExecute.add(ExternalIndexingOperations.compactFilesIndexJobSpec(ds, metadataProvider));
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
// #. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
MetadataLockManager.INSTANCE.compactEnd(dataverseName, dataverseName + "." + datasetName);
}
}
private void handleQuery(AqlMetadataProvider metadataProvider, Query query, IHyracksClientConnection hcc,
IHyracksDataset hdc, ResultDelivery resultDelivery, ResultUtils.Stats stats) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.queryBegin(activeDefaultDataverse, query.getDataverses(), query.getDatasets());
JobSpecification compiled = null;
try {
compiled = rewriteCompileQuery(metadataProvider, query, null);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (sessionConfig.isExecuteQuery() && compiled != null) {
GlobalConfig.ASTERIX_LOGGER.info(compiled.toJSON().toString(1));
JobId jobId = JobUtils.runJob(hcc, compiled, false);
JSONObject response = new JSONObject();
switch (resultDelivery) {
case ASYNC:
JSONArray handle = new JSONArray();
handle.put(jobId.getId());
handle.put(metadataProvider.getResultSetId().getId());
response.put("handle", handle);
sessionConfig.out().print(response);
sessionConfig.out().flush();
hcc.waitForCompletion(jobId);
break;
case SYNC:
hcc.waitForCompletion(jobId);
ResultReader resultReader = new ResultReader(hcc, hdc);
resultReader.open(jobId, metadataProvider.getResultSetId());
// In this case (the normal case), we don't use the
// "response" JSONObject - just stream the results
// to the "out" PrintWriter
if (sessionConfig.fmt() == OutputFormat.CSV
&& sessionConfig.is(SessionConfig.FORMAT_CSV_HEADER)) {
ResultUtils.displayCSVHeader(metadataProvider.findOutputRecordType(), sessionConfig);
}
ResultUtils.displayResults(resultReader, sessionConfig, stats);
break;
case ASYNC_DEFERRED:
handle = new JSONArray();
handle.put(jobId.getId());
handle.put(metadataProvider.getResultSetId().getId());
response.put("handle", handle);
hcc.waitForCompletion(jobId);
sessionConfig.out().print(response);
sessionConfig.out().flush();
break;
default:
break;
}
}
} catch (Exception e) {
e.printStackTrace();
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
MetadataLockManager.INSTANCE.queryEnd(query.getDataverses(), query.getDatasets());
// release external datasets' locks acquired during compilation of the query
ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
}
}
private void handleCreateNodeGroupStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
NodegroupDecl stmtCreateNodegroup = (NodegroupDecl) stmt;
String ngName = stmtCreateNodegroup.getNodegroupName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(ngName);
try {
NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, ngName);
if (ng != null) {
if (!stmtCreateNodegroup.getIfNotExists()) {
throw new AlgebricksException("A nodegroup with this name " + ngName + " already exists.");
}
} else {
List<Identifier> ncIdentifiers = stmtCreateNodegroup.getNodeControllerNames();
List<String> ncNames = new ArrayList<String>(ncIdentifiers.size());
for (Identifier id : ncIdentifiers) {
ncNames.add(id.getValue());
}
MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(ngName, ncNames));
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
MetadataLockManager.INSTANCE.releaseNodeGroupWriteLock(ngName);
}
}
private void handleExternalDatasetRefreshStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
RefreshExternalDatasetStatement stmtRefresh = (RefreshExternalDatasetStatement) stmt;
String dataverseName = getActiveDataverse(stmtRefresh.getDataverseName());
String datasetName = stmtRefresh.getDatasetName().getValue();
ExternalDatasetTransactionState transactionState = ExternalDatasetTransactionState.COMMIT;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
MetadataLockManager.INSTANCE.refreshDatasetBegin(dataverseName, dataverseName + "." + datasetName);
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
JobSpecification spec = null;
Dataset ds = null;
List<ExternalFile> metadataFiles = null;
List<ExternalFile> deletedFiles = null;
List<ExternalFile> addedFiles = null;
List<ExternalFile> appendedFiles = null;
List<Index> indexes = null;
Dataset transactionDataset = null;
boolean lockAquired = false;
boolean success = false;
try {
ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName);
// Dataset exists ?
if (ds == null) {
throw new AlgebricksException(
"There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
}
// Dataset external ?
if (ds.getDatasetType() != DatasetType.EXTERNAL) {
throw new AlgebricksException(
"dataset " + datasetName + " in dataverse " + dataverseName + " is not an external dataset");
}
// Dataset has indexes ?
indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
if (indexes.size() == 0) {
throw new AlgebricksException("External dataset " + datasetName + " in dataverse " + dataverseName
+ " doesn't have any index");
}
// Record transaction time
Date txnTime = new Date();
// refresh lock here
ExternalDatasetsRegistry.INSTANCE.refreshBegin(ds);
lockAquired = true;
// Get internal files
metadataFiles = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, ds);
deletedFiles = new ArrayList<ExternalFile>();
addedFiles = new ArrayList<ExternalFile>();
appendedFiles = new ArrayList<ExternalFile>();
// Compute delta
// Now we compare snapshot with external file system
if (ExternalIndexingOperations.isDatasetUptodate(ds, metadataFiles, addedFiles, deletedFiles,
appendedFiles)) {
((ExternalDatasetDetails) ds.getDatasetDetails()).setRefreshTimestamp(txnTime);
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
// latch will be released in the finally clause
return;
}
// At this point, we know data has changed in the external file system, record
// transaction in metadata and start
transactionDataset = ExternalIndexingOperations.createTransactionDataset(ds);
/*
* Remove old dataset record and replace it with a new one
*/
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, transactionDataset);
// Add delta files to the metadata
for (ExternalFile file : addedFiles) {
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
}
for (ExternalFile file : appendedFiles) {
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
}
for (ExternalFile file : deletedFiles) {
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
}
// Create the files index update job
spec = ExternalIndexingOperations.buildFilesIndexUpdateOp(ds, metadataFiles, deletedFiles, addedFiles,
appendedFiles, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
transactionState = ExternalDatasetTransactionState.BEGIN;
// run the files update job
JobUtils.runJob(hcc, spec, true);
for (Index index : indexes) {
if (!ExternalIndexingOperations.isFileIndex(index)) {
spec = ExternalIndexingOperations.buildIndexUpdateOp(ds, index, metadataFiles, deletedFiles,
addedFiles, appendedFiles, metadataProvider);
// run the files update job
JobUtils.runJob(hcc, spec, true);
}
}
// all index updates has completed successfully, record transaction state
spec = ExternalIndexingOperations.buildCommitJob(ds, indexes, metadataProvider);
// Aquire write latch again -> start a transaction and record the decision to commit
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
bActiveTxn = true;
((ExternalDatasetDetails) transactionDataset.getDatasetDetails())
.setState(ExternalDatasetTransactionState.READY_TO_COMMIT);
((ExternalDatasetDetails) transactionDataset.getDatasetDetails()).setRefreshTimestamp(txnTime);
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, transactionDataset);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
transactionState = ExternalDatasetTransactionState.READY_TO_COMMIT;
// We don't release the latch since this job is expected to be quick
JobUtils.runJob(hcc, spec, true);
// Start a new metadata transaction to record the final state of the transaction
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
bActiveTxn = true;
for (ExternalFile file : metadataFiles) {
if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP) {
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
} else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_NO_OP) {
Iterator<ExternalFile> iterator = appendedFiles.iterator();
while (iterator.hasNext()) {
ExternalFile appendedFile = iterator.next();
if (file.getFileName().equals(appendedFile.getFileName())) {
// delete existing file
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
// delete existing appended file
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, appendedFile);
// add the original file with appended information
appendedFile.setFileNumber(file.getFileNumber());
appendedFile.setPendingOp(ExternalFilePendingOp.PENDING_NO_OP);
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, appendedFile);
iterator.remove();
}
}
}
}
// remove the deleted files delta
for (ExternalFile file : deletedFiles) {
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
}
// insert new files
for (ExternalFile file : addedFiles) {
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
file.setPendingOp(ExternalFilePendingOp.PENDING_NO_OP);
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
}
// mark the transaction as complete
((ExternalDatasetDetails) transactionDataset.getDatasetDetails())
.setState(ExternalDatasetTransactionState.COMMIT);
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, transactionDataset);
// commit metadata transaction
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
success = true;
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
if (transactionState == ExternalDatasetTransactionState.READY_TO_COMMIT) {
throw new IllegalStateException("System is inconsistent state: commit of (" + dataverseName + "."
+ datasetName + ") refresh couldn't carry out the commit phase", e);
}
if (transactionState == ExternalDatasetTransactionState.COMMIT) {
// Nothing to do , everything should be clean
throw e;
}
if (transactionState == ExternalDatasetTransactionState.BEGIN) {
// transaction failed, need to do the following
// clean NCs removing transaction components
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
spec = ExternalIndexingOperations.buildAbortOp(ds, indexes, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
try {
JobUtils.runJob(hcc, spec, true);
} catch (Exception e2) {
// This should never happen -- fix throw illegal
e.addSuppressed(e2);
throw new IllegalStateException("System is in inconsistent state. Failed to abort refresh", e);
}
// remove the delta of files
// return the state of the dataset to committed
try {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
for (ExternalFile file : deletedFiles) {
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
}
for (ExternalFile file : addedFiles) {
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
}
for (ExternalFile file : appendedFiles) {
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
}
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
// commit metadata transaction
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
abort(e, e2, mdTxnCtx);
e.addSuppressed(e2);
throw new IllegalStateException("System is in inconsistent state. Failed to drop delta files", e);
}
}
} finally {
if (lockAquired) {
ExternalDatasetsRegistry.INSTANCE.refreshEnd(ds, success);
}
MetadataLockManager.INSTANCE.refreshDatasetEnd(dataverseName, dataverseName + "." + datasetName);
}
}
private void handleRunStatement(AqlMetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc)
throws AsterixException, Exception {
RunStatement runStmt = (RunStatement) stmt;
switch (runStmt.getSystem()) {
case "pregel":
case "pregelix":
handlePregelixStatement(metadataProvider, runStmt, hcc);
break;
default:
throw new AlgebricksException(
"The system \"" + runStmt.getSystem() + "\" specified in your run statement is not supported.");
}
}
private void handlePregelixStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
RunStatement pregelixStmt = (RunStatement) stmt;
boolean bActiveTxn = true;
String dataverseNameFrom = getActiveDataverse(pregelixStmt.getDataverseNameFrom());
String dataverseNameTo = getActiveDataverse(pregelixStmt.getDataverseNameTo());
String datasetNameFrom = pregelixStmt.getDatasetNameFrom().getValue();
String datasetNameTo = pregelixStmt.getDatasetNameTo().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
List<String> readDataverses = new ArrayList<String>();
readDataverses.add(dataverseNameFrom);
List<String> readDatasets = new ArrayList<String>();
readDatasets.add(datasetNameFrom);
MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(dataverseNameTo, datasetNameTo, readDataverses,
readDatasets);
try {
prepareRunExternalRuntime(metadataProvider, hcc, pregelixStmt, dataverseNameFrom, dataverseNameTo,
datasetNameFrom, datasetNameTo, mdTxnCtx);
String pregelixHomeKey = "PREGELIX_HOME";
// Finds PREGELIX_HOME in system environment variables.
String pregelixHome = System.getenv(pregelixHomeKey);
// Finds PREGELIX_HOME in Java properties.
if (pregelixHome == null) {
pregelixHome = System.getProperty(pregelixHomeKey);
}
// Finds PREGELIX_HOME in AsterixDB configuration.
if (pregelixHome == null) {
// Since there is a default value for PREGELIX_HOME in AsterixCompilerProperties,
// pregelixHome can never be null.
pregelixHome = AsterixAppContextInfo.getInstance().getCompilerProperties().getPregelixHome();
}
// Constructs the pregelix command line.
List<String> cmd = constructPregelixCommand(pregelixStmt, dataverseNameFrom, datasetNameFrom,
dataverseNameTo, datasetNameTo);
ProcessBuilder pb = new ProcessBuilder(cmd);
pb.directory(new File(pregelixHome));
pb.redirectErrorStream(true);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
// Executes the Pregelix command.
int resultState = executeExternalShellProgram(pb);
// Checks the return state of the external Pregelix command.
if (resultState != 0) {
throw new AlgebricksException(
"Something went wrong executing your Pregelix Job. Perhaps the Pregelix cluster needs to be restarted. "
+ "Check the following things: Are the datatypes of Asterix and Pregelix matching? "
+ "Is the server configuration correct (node names, buffer sizes, framesize)? Check the logfiles for more details.");
}
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
MetadataLockManager.INSTANCE.insertDeleteUpsertEnd(dataverseNameTo, datasetNameTo, readDataverses,
readDatasets);
}
}
// Prepares to run a program on external runtime.
private void prepareRunExternalRuntime(AqlMetadataProvider metadataProvider, IHyracksClientConnection hcc,
RunStatement pregelixStmt, String dataverseNameFrom, String dataverseNameTo, String datasetNameFrom,
String datasetNameTo, MetadataTransactionContext mdTxnCtx)
throws AlgebricksException, AsterixException, Exception {
// Validates the source/sink dataverses and datasets.
Dataset fromDataset = metadataProvider.findDataset(dataverseNameFrom, datasetNameFrom);
if (fromDataset == null) {
throw new AsterixException("The source dataset " + datasetNameFrom + " in dataverse " + dataverseNameFrom
+ " could not be found for the Run command");
}
Dataset toDataset = metadataProvider.findDataset(dataverseNameTo, datasetNameTo);
if (toDataset == null) {
throw new AsterixException("The sink dataset " + datasetNameTo + " in dataverse " + dataverseNameTo
+ " could not be found for the Run command");
}
try {
// Find the primary index of the sink dataset.
Index toIndex = null;
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseNameTo,
pregelixStmt.getDatasetNameTo().getValue());
for (Index index : indexes) {
if (index.isPrimaryIndex()) {
toIndex = index;
break;
}
}
if (toIndex == null) {
throw new AlgebricksException("Tried to access non-existing dataset: " + datasetNameTo);
}
// Cleans up the sink dataset -- Drop and then Create.
DropStatement dropStmt = new DropStatement(new Identifier(dataverseNameTo), pregelixStmt.getDatasetNameTo(),
true);
this.handleDatasetDropStatement(metadataProvider, dropStmt, hcc);
IDatasetDetailsDecl idd = new InternalDetailsDecl(toIndex.getKeyFieldNames(),
toIndex.getKeyFieldSourceIndicators(), false, null, toDataset.getDatasetDetails().isTemp());
DatasetDecl createToDataset = new DatasetDecl(new Identifier(dataverseNameTo),
pregelixStmt.getDatasetNameTo(), new Identifier(toDataset.getItemTypeDataverseName()),
new Identifier(toDataset.getItemTypeName()),
new Identifier(toDataset.getMetaItemTypeDataverseName()),
new Identifier(toDataset.getMetaItemTypeName()), new Identifier(toDataset.getNodeGroupName()),
toDataset.getCompactionPolicy(), toDataset.getCompactionPolicyProperties(), toDataset.getHints(),
toDataset.getDatasetType(), idd, false);
this.handleCreateDatasetStatement(metadataProvider, createToDataset, hcc);
} catch (Exception e) {
e.printStackTrace();
throw new AlgebricksException("Error cleaning the result dataset. This should not happen.");
}
// Flushes source dataset.
FlushDatasetUtils.flushDataset(hcc, metadataProvider, mdTxnCtx, dataverseNameFrom, datasetNameFrom,
datasetNameFrom);
}
// Executes external shell commands.
private int executeExternalShellProgram(ProcessBuilder pb)
throws IOException, AlgebricksException, InterruptedException {
Process process = pb.start();
try (BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
String line;
while ((line = in.readLine()) != null) {
LOGGER.info(line);
if (line.contains("Exception") || line.contains("Error")) {
LOGGER.severe(line);
if (line.contains("Connection refused")) {
throw new AlgebricksException(
"The connection to your Pregelix cluster was refused. Is it running? Is the port in the query correct?");
}
if (line.contains("Could not find or load main class")) {
throw new AlgebricksException(
"The main class of your Pregelix query was not found. Is the path to your .jar file correct?");
}
if (line.contains("ClassNotFoundException")) {
throw new AlgebricksException(
"The vertex class of your Pregelix query was not found. Does it exist? Is the spelling correct?");
}
}
}
process.waitFor();
}
// Gets the exit value of the program.
int resultState = process.exitValue();
return resultState;
}
// Constructs a Pregelix command line.
private List<String> constructPregelixCommand(RunStatement pregelixStmt, String fromDataverseName,
String fromDatasetName, String toDataverseName, String toDatasetName) {
// Constructs AsterixDB parameters, e.g., URL, source dataset and sink dataset.
AsterixExternalProperties externalProperties = AsterixAppContextInfo.getInstance().getExternalProperties();
AsterixClusterProperties clusterProperties = AsterixClusterProperties.INSTANCE;
String clientIP = clusterProperties.getCluster().getMasterNode().getClientIp();
StringBuilder asterixdbParameterBuilder = new StringBuilder();
asterixdbParameterBuilder.append(
"pregelix.asterixdb.url=" + "http://" + clientIP + ":" + externalProperties.getAPIServerPort() + ",");
asterixdbParameterBuilder.append("pregelix.asterixdb.source=true,");
asterixdbParameterBuilder.append("pregelix.asterixdb.sink=true,");
asterixdbParameterBuilder.append("pregelix.asterixdb.input.dataverse=" + fromDataverseName + ",");
asterixdbParameterBuilder.append("pregelix.asterixdb.input.dataset=" + fromDatasetName + ",");
asterixdbParameterBuilder.append("pregelix.asterixdb.output.dataverse=" + toDataverseName + ",");
asterixdbParameterBuilder.append("pregelix.asterixdb.output.dataset=" + toDatasetName + ",");
asterixdbParameterBuilder.append("pregelix.asterixdb.output.cleanup=false,");
// construct command
List<String> cmds = new ArrayList<String>();
cmds.add("bin/pregelix");
cmds.add(pregelixStmt.getParameters().get(0)); // jar
cmds.add(pregelixStmt.getParameters().get(1)); // class
String customizedPregelixProperty = "-cust-prop";
String inputConverterClassKey = "pregelix.asterixdb.input.converterclass";
String inputConverterClassValue = "=org.apache.pregelix.example.converter.VLongIdInputVertexConverter,";
String outputConverterClassKey = "pregelix.asterixdb.output.converterclass";
String outputConverterClassValue = "=org.apache.pregelix.example.converter.VLongIdOutputVertexConverter,";
boolean custPropAdded = false;
boolean meetCustProp = false;
// User parameters.
for (String s : pregelixStmt.getParameters().get(2).split(" ")) {
if (meetCustProp) {
if (!s.contains(inputConverterClassKey)) {
asterixdbParameterBuilder.append(inputConverterClassKey + inputConverterClassValue);
}
if (!s.contains(outputConverterClassKey)) {
asterixdbParameterBuilder.append(outputConverterClassKey + outputConverterClassValue);
}
cmds.add(asterixdbParameterBuilder.toString() + s);
meetCustProp = false;
custPropAdded = true;
continue;
}
cmds.add(s);
if (s.equals(customizedPregelixProperty)) {
meetCustProp = true;
}
}
if (!custPropAdded) {
cmds.add(customizedPregelixProperty);
// Appends default converter classes to asterixdbParameterBuilder.
asterixdbParameterBuilder.append(inputConverterClassKey + inputConverterClassValue);
asterixdbParameterBuilder.append(outputConverterClassKey + outputConverterClassValue);
// Remove the last comma.
asterixdbParameterBuilder.delete(asterixdbParameterBuilder.length() - 1,
asterixdbParameterBuilder.length());
cmds.add(asterixdbParameterBuilder.toString());
}
return cmds;
}
private String getActiveDataverseName(String dataverse) throws AlgebricksException {
if (dataverse != null) {
return dataverse;
}
if (activeDefaultDataverse != null) {
return activeDefaultDataverse.getDataverseName();
}
throw new AlgebricksException("dataverse not specified");
}
private String getActiveDataverse(Identifier dataverse) throws AlgebricksException {
return getActiveDataverseName(dataverse != null ? dataverse.getValue() : null);
}
private void abort(Exception rootE, Exception parentE, MetadataTransactionContext mdTxnCtx) {
try {
if (IS_DEBUG_MODE) {
rootE.printStackTrace();
}
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
} catch (Exception e2) {
parentE.addSuppressed(e2);
throw new IllegalStateException(rootE);
}
}
private void rewriteStatement(Statement stmt) throws AsterixException {
IStatementRewriter rewriter = rewriterFactory.createStatementRewriter();
rewriter.rewrite(stmt);
}
}
| apache-2.0 |
xSAVIKx/openweathermap-java-api | api-query/src/main/java/org/openweathermap/api/query/uvi/history/ByGeographicCoordinates.java | 1720 | /*
* Copyright 2021, Yurii Serhiichuk
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.openweathermap.api.query.uvi.history;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.openweathermap.api.common.Coordinate;
import org.openweathermap.api.query.uvi.AbstractByGeographicCoordinates;
@EqualsAndHashCode(callSuper = true)
@Data
public class ByGeographicCoordinates extends AbstractByGeographicCoordinates implements HistoryUviQuery {
private long start;
private long end;
private int count;
public ByGeographicCoordinates(Coordinate coordinate) {
super(coordinate);
}
@Override
protected String getSearchPath() {
return super.getSearchPath() + HISTORY;
}
@Override
public String toStringRepresentation(String apiKey) {
StringBuilder stringBuilder = new StringBuilder(super.toStringRepresentation(apiKey));
stringBuilder.append(AND).append("start=").append(getStart());
stringBuilder.append(AND).append("end=").append(getEnd());
if (getCount() > 0) {
stringBuilder.append(AND).append("cnt=").append(getCount());
}
return stringBuilder.toString();
}
}
| apache-2.0 |
datanucleus/tests | jdo/rdbms/src/java/org/datanucleus/samples/rdbms/views/FNameView.java | 1681 | /**********************************************************************
Copyright (c) 2015 Andy Jefferson and others. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors:
...
**********************************************************************/
package org.datanucleus.samples.rdbms.views;
import javax.jdo.annotations.Extension;
import javax.jdo.annotations.IdentityType;
import javax.jdo.annotations.PersistenceCapable;
@PersistenceCapable(detachable="true", identityType=IdentityType.NONDURABLE)
@Extension(vendorName="datanucleus", key="view-definition",
value="CREATE VIEW {this} ({this.id},{this.name}) AS "+
"SELECT {NameObject}.{NameObject.id}, {NameObject}.{NameObject.name} FROM {NameObject} WHERE {NameObject}.{NameObject.name} LIKE 'F%'")
@Extension(vendorName="datanucleus", key="view-imports", value="import org.datanucleus.samples.rdbms.views.NameObject;")
public class FNameView
{
Long id;
String name;
public FNameView(long id, String name)
{
this.id = id;
this.name = name;
}
public String getName()
{
return name;
}
public Long getId()
{
return id;
}
} | apache-2.0 |
rd-shinetech/quicktest | 03-Development/dto/src/main/java/co/shinetech/dto/User.java | 1671 | /**
*
*/
package co.shinetech.dto;
import java.util.Arrays;
/**
* @author Robin
*
*/
@SuppressWarnings("serial")
public class User implements Domain {
private final long id;
private String login;
private char[] password;
private Profile profile;
public User(long id) {
this.id = id;
}
public User(long id, Profile profile) {
super();
this.id = id;
this.profile = profile;
}
public User(long id, String login, char[] password, Profile profile) {
super();
this.id = id;
this.login = login;
this.password = password;
this.profile = profile;
}
public String getLogin() {
return login;
}
public void setLogin(String login) {
this.login = login;
}
public char[] getPassword() {
return password;
}
public void setPassword(char[] password) {
this.password = password;
}
public Profile getProfile() {
return profile;
}
public void setProfile(Profile profile) {
this.profile = profile;
}
public long getPk() {
return this.id;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (id ^ (id >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
User other = (User) obj;
if (id != other.id)
return false;
return true;
}
@Override
public String toString() {
return "User [id=" + id + ", login=" + login + ", password="
+ Arrays.toString(password) + ", profile=" + profile + "]";
}
}
| apache-2.0 |
velvia/filo | schema/flatbuffers/gen-java/org/velvia/filo/vector/NaMask.java | 2057 | // automatically generated, do not modify
package org.velvia.filo.vector;
import java.nio.*;
import java.lang.*;
import java.util.*;
import com.google.flatbuffers.*;
@SuppressWarnings("unused")
public final class NaMask extends Table {
public static NaMask getRootAsNaMask(ByteBuffer _bb) { return getRootAsNaMask(_bb, new NaMask()); }
public static NaMask getRootAsNaMask(ByteBuffer _bb, NaMask obj) { _bb.order(ByteOrder.LITTLE_ENDIAN); return (obj.__init(_bb.getInt(_bb.position()) + _bb.position(), _bb)); }
public NaMask __init(int _i, ByteBuffer _bb) { bb_pos = _i; bb = _bb; return this; }
public byte maskType() { int o = __offset(4); return o != 0 ? bb.get(o + bb_pos) : 2; }
/**
* for type = SimpleBitMask
*/
public long bitMask(int j) { int o = __offset(6); return o != 0 ? bb.getLong(__vector(o) + j * 8) : 0; }
public int bitMaskLength() { int o = __offset(6); return o != 0 ? __vector_len(o) : 0; }
public ByteBuffer bitMaskAsByteBuffer() { return __vector_as_bytebuffer(6, 8); }
public static int createNaMask(FlatBufferBuilder builder,
byte maskType,
int bitMask) {
builder.startObject(2);
NaMask.addBitMask(builder, bitMask);
NaMask.addMaskType(builder, maskType);
return NaMask.endNaMask(builder);
}
public static void startNaMask(FlatBufferBuilder builder) { builder.startObject(2); }
public static void addMaskType(FlatBufferBuilder builder, byte maskType) { builder.addByte(0, maskType, 2); }
public static void addBitMask(FlatBufferBuilder builder, int bitMaskOffset) { builder.addOffset(1, bitMaskOffset, 0); }
public static int createBitMaskVector(FlatBufferBuilder builder, long[] data) { builder.startVector(8, data.length, 8); for (int i = data.length - 1; i >= 0; i--) builder.addLong(data[i]); return builder.endVector(); }
public static void startBitMaskVector(FlatBufferBuilder builder, int numElems) { builder.startVector(8, numElems, 8); }
public static int endNaMask(FlatBufferBuilder builder) {
int o = builder.endObject();
return o;
}
};
| apache-2.0 |
ckclark/leetcode | java/leetcode/best_time_to_buy_and_sell_stock_with_cooldown/Solution.java | 555 | package leetcode.best_time_to_buy_and_sell_stock_with_cooldown;
public class Solution {
public int maxProfit(int[] prices) {
if(prices.length < 2)
return 0;
int sell = 0, sold = 0, buy = -prices[0];
for(int price : prices){
int next_sell = price + buy;
int next_sold = Math.max(sold, sell);
int next_buy = Math.max(sold - price, buy);
sell = next_sell;
sold = next_sold;
buy = next_buy;
}
return Math.max(sell, sold);
}
} | apache-2.0 |
PerfCake/PerfClipse | org.perfclipse.core/src/org/perfclipse/core/commands/AddHeaderCommand.java | 1203 | /*
* Perfclispe
*
*
* Copyright (c) 2013 Jakub Knetl
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.perfclipse.core.commands;
import org.eclipse.gef.commands.Command;
import org.perfcake.model.Header;
import org.perfclipse.core.model.MessageModel;
/**
* @author Jakub Knetl
*
*/
public class AddHeaderCommand extends Command {
private MessageModel message;
private Header header;
public AddHeaderCommand(MessageModel message, Header header) {
super("Add header");
this.message = message;
this.header = header;
}
@Override
public void execute() {
message.addHeader(header);
}
@Override
public void undo() {
message.removeHeader(header);
}
}
| apache-2.0 |
eddumelendez/spring-security | web/src/main/java/org/springframework/security/web/authentication/preauth/j2ee/J2eeBasedPreAuthenticatedWebAuthenticationDetailsSource.java | 4758 | /*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.web.authentication.preauth.j2ee;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.security.authentication.AuthenticationDetailsSource;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.mapping.Attributes2GrantedAuthoritiesMapper;
import org.springframework.security.core.authority.mapping.MappableAttributesRetriever;
import org.springframework.security.core.authority.mapping.SimpleAttributes2GrantedAuthoritiesMapper;
import org.springframework.security.web.authentication.preauth.PreAuthenticatedGrantedAuthoritiesWebAuthenticationDetails;
import org.springframework.util.Assert;
import javax.servlet.http.HttpServletRequest;
import java.util.*;
/**
* Implementation of AuthenticationDetailsSource which converts the user's J2EE roles (as
* obtained by calling {@link HttpServletRequest#isUserInRole(String)}) into
* {@code GrantedAuthority}s and stores these in the authentication details object.
*
* @author Ruud Senden
* @since 2.0
*/
public class J2eeBasedPreAuthenticatedWebAuthenticationDetailsSource
implements
AuthenticationDetailsSource<HttpServletRequest, PreAuthenticatedGrantedAuthoritiesWebAuthenticationDetails>,
InitializingBean {
protected final Log logger = LogFactory.getLog(getClass());
/** The role attributes returned by the configured {@code MappableAttributesRetriever} */
protected Set<String> j2eeMappableRoles;
protected Attributes2GrantedAuthoritiesMapper j2eeUserRoles2GrantedAuthoritiesMapper = new SimpleAttributes2GrantedAuthoritiesMapper();
/**
* Check that all required properties have been set.
*/
public void afterPropertiesSet() throws Exception {
Assert.notNull(j2eeMappableRoles, "No mappable roles available");
Assert.notNull(j2eeUserRoles2GrantedAuthoritiesMapper,
"Roles to granted authorities mapper not set");
}
/**
* Obtains the list of user roles based on the current user's JEE roles. The
* {@link javax.servlet.http.HttpServletRequest#isUserInRole(String)} method is called
* for each of the values in the {@code j2eeMappableRoles} set to determine if that
* role should be assigned to the user.
*
* @param request the request which should be used to extract the user's roles.
* @return The subset of {@code j2eeMappableRoles} which applies to the current user
* making the request.
*/
protected Collection<String> getUserRoles(HttpServletRequest request) {
ArrayList<String> j2eeUserRolesList = new ArrayList<>();
for (String role : j2eeMappableRoles) {
if (request.isUserInRole(role)) {
j2eeUserRolesList.add(role);
}
}
return j2eeUserRolesList;
}
/**
* Builds the authentication details object.
*
* @see org.springframework.security.authentication.AuthenticationDetailsSource#buildDetails(Object)
*/
public PreAuthenticatedGrantedAuthoritiesWebAuthenticationDetails buildDetails(
HttpServletRequest context) {
Collection<String> j2eeUserRoles = getUserRoles(context);
Collection<? extends GrantedAuthority> userGas = j2eeUserRoles2GrantedAuthoritiesMapper
.getGrantedAuthorities(j2eeUserRoles);
if (logger.isDebugEnabled()) {
logger.debug("J2EE roles [" + j2eeUserRoles
+ "] mapped to Granted Authorities: [" + userGas + "]");
}
PreAuthenticatedGrantedAuthoritiesWebAuthenticationDetails result = new PreAuthenticatedGrantedAuthoritiesWebAuthenticationDetails(
context, userGas);
return result;
}
/**
* @param aJ2eeMappableRolesRetriever The MappableAttributesRetriever to use
*/
public void setMappableRolesRetriever(
MappableAttributesRetriever aJ2eeMappableRolesRetriever) {
this.j2eeMappableRoles = Collections.unmodifiableSet(aJ2eeMappableRolesRetriever
.getMappableAttributes());
}
/**
* @param mapper The Attributes2GrantedAuthoritiesMapper to use
*/
public void setUserRoles2GrantedAuthoritiesMapper(
Attributes2GrantedAuthoritiesMapper mapper) {
j2eeUserRoles2GrantedAuthoritiesMapper = mapper;
}
}
| apache-2.0 |
Haixing-Hu/commons | src/main/java/com/github/haixing_hu/util/buffer/FloatBufferBinarySerializer.java | 2512 | /*
* Copyright (c) 2014 Haixing Hu
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.github.haixing_hu.util.buffer;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import com.github.haixing_hu.io.exception.InvalidFormatException;
import com.github.haixing_hu.io.exception.SerializationException;
import com.github.haixing_hu.io.serialize.BinarySerializer;
import static com.github.haixing_hu.CommonsMessages.UNEXPECTED_NULL_VALUE;
import static com.github.haixing_hu.io.InputUtils.*;
import static com.github.haixing_hu.io.OutputUtils.*;
/**
* The {@link BinarySerializer} for the {@link FloatBuffer} class.
*
* @author Haixing Hu
*/
@Immutable
public final class FloatBufferBinarySerializer implements BinarySerializer {
public static final FloatBufferBinarySerializer INSTANCE = new FloatBufferBinarySerializer();
@Override
public FloatBuffer deserialize(final InputStream in, final boolean allowNull)
throws IOException {
if (readNullMark(in)) {
if (allowNull) {
return null;
} else {
throw new InvalidFormatException(UNEXPECTED_NULL_VALUE);
}
}
final FloatBuffer result = new FloatBuffer();
final int n = readVarInt(in);
if (n > 0) {
result.buffer = new float[n];
for (int i = 0; i < n; ++i) {
result.buffer[i] = readFloat(in);
}
result.length = n;
}
return result;
}
@Override
public void serialize(final OutputStream out, @Nullable final Object obj)
throws IOException {
if (! writeNullMark(out, obj)) {
FloatBuffer buffer;
try {
buffer = (FloatBuffer) obj;
} catch (final ClassCastException e) {
throw new SerializationException(e);
}
writeVarInt(out, buffer.length);
for (int i = 0; i < buffer.length; ++i) {
writeFloat(out, buffer.buffer[i]);
}
}
}
}
| apache-2.0 |
aerogear/aerogear-crypto-java | src/test/java/org/jboss/aerogear/crypto/AlgorithmTest.java | 1176 | /**
* JBoss, Home of Professional Open Source
* Copyright Red Hat, Inc., and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.aerogear.crypto;
import org.junit.Test;
import static org.jboss.aerogear.AeroGearCrypto.Algorithm.AES;
import static org.junit.Assert.assertEquals;
public class AlgorithmTest {
@Test
public void testAESToString() throws Exception {
assertEquals("Should return the correct algorithm name", "AES", AES.toString());
}
@Test
public void testGetAESKeySize() throws Exception {
assertEquals("Should return the correct key size", 256, AES.getKeySize());
}
}
| apache-2.0 |
OpenVnmrJ/OpenVnmrJ | src/vnmrj/src/vnmr/lc/VPdaPlot.java | 4784 | /*
* Copyright (C) 2015 University of Oregon
*
* You may distribute under the terms of either the GNU General Public
* License or the Apache License, as specified in the LICENSE file.
*
* For more information, see the LICENSE file.
*/
package vnmr.lc;
import vnmr.bo.*;
import vnmr.ui.*;
import vnmr.util.*;
/**
* This VObj object displays a plot of a PDA spectrum.
*/
public class VPdaPlot extends VPlot {
private PdaCorbaClient m_corbaClient = null;
private boolean firsttime = true;
private double m_xFirst = -1;
private double m_xLast = -1;
private int m_itr = 0; // For testing
//private VContainer m_lcGraphPanel;
PdaScan m_scan= new PdaScan();
public VPdaPlot(SessionShare sshare, ButtonIF vif, String typ) {
super(sshare, vif, typ);
try {
m_plot.setXAxis(true);
m_plot.setXLabel("Wavelength (nm)");
m_plot.setYAxis(true);
m_plot.setYLabel("Absorption (AU)");
m_plot.setTitle("UV Spectrum");
//m_plot.setGrid(false);
//m_plot.setImpulses(true);
//m_lcGraphPanel= (vif).getLcPanel();
} catch (Exception e) {
Messages.postError("Error creating PDA plot");
Messages.writeStackTrace(e);
}
}
private void drawPlot(int[] data, Plot plot) {
int nPts = data.length / 2;
plot.clear(false);
for (int i = 0; i < nPts; i++) {
plot.addPoint(0, (double) data[2 * i] / 256.0,
(double) data[2 * i + 1], false);
}
}
public void showCurrentPdaData(Plot plot, PdaScan scan) {
//plot.clear(false);
//plot.setShowZeroY(true);
//plot.repaint();
if (firsttime) {
firsttime = false;
//corbaClient.setScanOn(true);
}
//Messages.postDebug("PdaData",
// "showCurrentPdaData(): ...got Data");
int npts = scan.x.length;
if (scan == null) {
//Messages.postDebug("Null PDA scan returned");
return;
}
int len = scan.y.length;
if (len != npts) {
Messages.postDebug("showCurrentPdaData: "
+ "have " + len + " data points for "
+ npts + " wavelengths");
len = Math.min(len, npts);
}
/*if (len < npts) {
Messages.postDebug("showCurrentPdaData: "
+ "Not enough data in scan array; have "
+ len + ", need >=" + npts);
return;
}*/
Messages.postDebug("PdaData", "PDA Data: len=" + len);
float firstLambda = scan.x[0];
float lastLambda = scan.x[npts - 1];
double xFirst = firstLambda;
double xLast = lastLambda;
plot.setXFillRange(xFirst, xLast);
if (xFirst != m_xFirst || xLast != m_xLast) {
plot.setBars(false);
plot.setConnected(true);
plot.setXRange(xFirst, xLast);
m_xFirst = xFirst;
m_xLast = xLast;
}
// NB: This steps through the data set with each call
//int idx = npts * m_itr;
//if (idx + npts - 1 > len) {
// idx = m_itr = 0;
//}
//++m_itr;
//for (int i = idx; i < idx + npts; i++) {
// Messages.postDebug("PdaData+",
// "lambda[" + i + "]=" + scan.x[i]
// + ", inten=" + scan.y[i]);
// plot.addPoint(0, scan.x[i], scan.y[i]*1000, true);
//}
plot.setPoints(0, m_scan.dX, m_scan.dY, len, true, true);
//plot.repaint();
}
public void setTitle(String title) {
m_plot.setTitle(title);
}
public void setYArray(float y[]){
m_scan.setY(y);
showCurrentPdaData(m_plot, m_scan);
}
public void setXArray(float x[]){
m_scan.setX(x);
}
public class PdaScan{
public float x[]= null;
public float y[]= null;
public double dX[]= null;
public double dY[]= new double[0];
public PdaScan(){
}
public void setX(float[] newX){
x = newX;
int len = x.length;
dX = new double[len];
for (int i = 0; i < len; i++) {
dX[i] = x[i];
}
}
public void setY(float[] newY){
y = newY;
int len = y.length;
if (dY.length != len) {
dY = new double[len];
}
for (int i = 0; i < len; i++) {
dY[i] = y[i];
}
}
}
}
| apache-2.0 |
Hevelian/hevelian-olastic | olastic-core/src/main/java/com/hevelian/olastic/core/api/uri/queryoption/expression/member/impl/BaseMember.java | 4792 | package com.hevelian.olastic.core.api.uri.queryoption.expression.member.impl;
import static com.hevelian.olastic.core.elastic.ElasticConstants.WILDCARD_CHAR;
import static com.hevelian.olastic.core.elastic.utils.ElasticUtils.addKeywordIfNeeded;
import static com.hevelian.olastic.core.utils.ProcessorUtils.throwNotImplemented;
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery;
import org.apache.olingo.server.api.ODataApplicationException;
import org.elasticsearch.index.query.QueryBuilder;
import com.hevelian.olastic.core.api.uri.queryoption.expression.member.ExpressionMember;
/**
* Base common class for any expression member.
*
* @author Taras Kohut
*/
public abstract class BaseMember implements ExpressionMember {
@Override
public ExpressionMember any() throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember all() throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember and(ExpressionMember expressionMember)
throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember or(ExpressionMember expressionMember) throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember not() throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember eq(ExpressionMember expressionMember) throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember ne(ExpressionMember expressionMember) throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember ge(ExpressionMember expressionMember) throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember gt(ExpressionMember expressionMember) throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember le(ExpressionMember expressionMember) throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember lt(ExpressionMember expressionMember) throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember contains(ExpressionMember expressionMember)
throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember startsWith(ExpressionMember expressionMember)
throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember endsWith(ExpressionMember expressionMember)
throws ODataApplicationException {
return throwNotImplemented();
}
@Override
public ExpressionMember date() throws ODataApplicationException {
return throwNotImplemented();
}
/**
* Builds contains query based on {@link AnnotatedMember} and value.
*
* @param member
* annotated member with field and annotations
* @param value
* contains value
* @return query builder instance
*/
protected QueryBuilder buildContainsQuery(AnnotatedMember member, Object value) {
return wildcardQuery(addKeywordIfNeeded(member.getField(), member.getAnnotations()),
WILDCARD_CHAR + value + WILDCARD_CHAR);
}
/**
* Builds starts with query based on {@link AnnotatedMember} and value.
*
* @param member
* annotated member with field and annotations
* @param value
* starts with value
* @return query builder instance
*/
protected QueryBuilder buildStartsWithQuery(AnnotatedMember member, String value) {
return prefixQuery(addKeywordIfNeeded(member.getField(), member.getAnnotations()), value);
}
/**
* Builds ends with query based on {@link AnnotatedMember} and value.
*
* @param member
* annotated member with field and annotations
* @param value
* ends with value
* @return query builder instance
*/
protected QueryBuilder buildEndsWithQuery(AnnotatedMember member, String value) {
return wildcardQuery(addKeywordIfNeeded(member.getField(), member.getAnnotations()),
WILDCARD_CHAR + value);
}
}
| apache-2.0 |
syershov/omim | android/src/com/mapswithme/maps/MwmApplication.java | 14642 | package com.mapswithme.maps;
import android.app.Application;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.PackageManager.NameNotFoundException;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.NonNull;
import android.support.annotation.UiThread;
import android.support.multidex.MultiDex;
import android.text.TextUtils;
import android.util.Log;
import java.io.File;
import java.util.List;
import com.crashlytics.android.Crashlytics;
import com.crashlytics.android.ndk.CrashlyticsNdk;
import com.mapswithme.maps.background.AppBackgroundTracker;
import com.mapswithme.maps.background.Notifier;
import com.mapswithme.maps.bookmarks.data.BookmarkManager;
import com.mapswithme.maps.downloader.CountryItem;
import com.mapswithme.maps.downloader.MapManager;
import com.mapswithme.maps.editor.Editor;
import com.mapswithme.maps.location.LocationHelper;
import com.mapswithme.maps.location.TrackRecorder;
import com.mapswithme.maps.routing.RoutingController;
import com.mapswithme.maps.settings.StoragePathManager;
import com.mapswithme.maps.sound.TtsPlayer;
import com.mapswithme.maps.traffic.TrafficManager;
import com.mapswithme.util.Config;
import com.mapswithme.util.Constants;
import com.mapswithme.util.Counters;
import com.mapswithme.util.CrashlyticsUtils;
import com.mapswithme.util.PermissionsUtils;
import com.mapswithme.util.ThemeSwitcher;
import com.mapswithme.util.UiUtils;
import com.mapswithme.util.Utils;
import com.mapswithme.util.log.Logger;
import com.mapswithme.util.log.LoggerFactory;
import com.mapswithme.util.statistics.PushwooshHelper;
import com.mapswithme.util.statistics.Statistics;
import com.my.tracker.MyTracker;
import com.my.tracker.MyTrackerParams;
import com.pushwoosh.PushManager;
import io.fabric.sdk.android.Fabric;
public class MwmApplication extends Application
{
private Logger mLogger;
private final static String TAG = "MwmApplication";
private static final String PW_EMPTY_APP_ID = "XXXXX";
private static MwmApplication sSelf;
private SharedPreferences mPrefs;
private AppBackgroundTracker mBackgroundTracker;
private boolean mFrameworkInitialized;
private boolean mPlatformInitialized;
private boolean mCrashlyticsInitialized;
private Handler mMainLoopHandler;
private final Object mMainQueueToken = new Object();
private final MapManager.StorageCallback mStorageCallbacks = new MapManager.StorageCallback()
{
@Override
public void onStatusChanged(List<MapManager.StorageCallbackData> data)
{
for (MapManager.StorageCallbackData item : data)
if (item.isLeafNode && item.newStatus == CountryItem.STATUS_FAILED)
{
if (MapManager.nativeIsAutoretryFailed())
{
Notifier.cancelDownloadSuggest();
Notifier.notifyDownloadFailed(item.countryId, MapManager.nativeGetName(item.countryId));
MapManager.sendErrorStat(Statistics.EventName.DOWNLOADER_ERROR, MapManager.nativeGetError(item.countryId));
}
return;
}
}
@Override
public void onProgress(String countryId, long localSize, long remoteSize) {}
};
@NonNull
private final AppBackgroundTracker.OnTransitionListener mBackgroundListener =
new AppBackgroundTracker.OnTransitionListener()
{
@Override
public void onTransit(boolean foreground)
{
if (!foreground && LoggerFactory.INSTANCE.isFileLoggingEnabled())
{
Log.i(TAG, "The app goes to background. All logs are going to be zipped.");
LoggerFactory.INSTANCE.zipLogs(null);
}
}
};
@NonNull
private final AppBackgroundTracker.OnVisibleAppLaunchListener mVisibleAppLaunchListener =
new AppBackgroundTracker.OnVisibleAppLaunchListener()
{
@Override
public void onVisibleAppLaunch()
{
Statistics.INSTANCE.trackColdStartupInfo();
}
};
public MwmApplication()
{
super();
sSelf = this;
}
public static MwmApplication get()
{
return sSelf;
}
public static AppBackgroundTracker backgroundTracker()
{
return sSelf.mBackgroundTracker;
}
public synchronized static SharedPreferences prefs()
{
if (sSelf.mPrefs == null)
sSelf.mPrefs = sSelf.getSharedPreferences(sSelf.getString(R.string.pref_file_name), MODE_PRIVATE);
return sSelf.mPrefs;
}
public static boolean isCrashlyticsEnabled()
{
return !BuildConfig.FABRIC_API_KEY.startsWith("0000");
}
@Override
protected void attachBaseContext(Context base)
{
super.attachBaseContext(base);
MultiDex.install(this);
}
@SuppressWarnings("ResultOfMethodCallIgnored")
@Override
public void onCreate()
{
super.onCreate();
mLogger = LoggerFactory.INSTANCE.getLogger(LoggerFactory.Type.MISC);
mLogger.d(TAG, "Application is created");
mMainLoopHandler = new Handler(getMainLooper());
initCrashlytics();
initPushWoosh();
mPrefs = getSharedPreferences(getString(R.string.pref_file_name), MODE_PRIVATE);
mBackgroundTracker = new AppBackgroundTracker();
mBackgroundTracker.addListener(mVisibleAppLaunchListener);
}
public void initPlatformAndCore(){
initNativePlatform();
initNativeCore();
}
private void initNativePlatform()
{
if (mPlatformInitialized)
return;
final boolean isInstallationIdFound = setInstallationIdToCrashlytics();
initTracker();
String settingsPath = getSettingsPath();
mLogger.d(TAG, "onCreate(), setting path = " + settingsPath);
String tempPath = getTempPath();
mLogger.d(TAG, "onCreate(), temp path = " + tempPath);
createPlatformDirectories(settingsPath, tempPath);
// First we need initialize paths and platform to have access to settings and other components.
nativePreparePlatform(settingsPath);
nativeInitPlatform(getApkPath(), getStoragePath(settingsPath), getTempPath(), getObbGooglePath(),
BuildConfig.FLAVOR, BuildConfig.BUILD_TYPE, UiUtils.isTablet());
@SuppressWarnings("unused")
Statistics s = Statistics.INSTANCE;
if (!isInstallationIdFound)
setInstallationIdToCrashlytics();
mBackgroundTracker.addListener(mBackgroundListener);
TrackRecorder.init();
Editor.init();
mPlatformInitialized = true;
}
private void createPlatformDirectories(@NonNull String settingsPath, @NonNull String tempPath)
{
createPlatformDirectory(settingsPath);
createPlatformDirectory(tempPath);
}
private void createPlatformDirectory(@NonNull String path)
{
File directory = new File(path);
if (!directory.exists() && !directory.mkdirs())
{
boolean isPermissionGranted = PermissionsUtils.isExternalStorageGranted();
Throwable error = new IllegalStateException("Can't create directories for: " + path
+ " state = " + Environment.getExternalStorageState()
+ " isPermissionGranted = " + isPermissionGranted);
LoggerFactory.INSTANCE.getLogger(LoggerFactory.Type.STORAGE)
.e(TAG, "Can't create directories for: " + path
+ " state = " + Environment.getExternalStorageState()
+ " isPermissionGranted = " + isPermissionGranted);
CrashlyticsUtils.logException(error);
}
}
private void initNativeCore()
{
if (mFrameworkInitialized)
return;
nativeInitFramework();
MapManager.nativeSubscribe(mStorageCallbacks);
initNativeStrings();
BookmarkManager.nativeLoadBookmarks();
TtsPlayer.INSTANCE.init(this);
ThemeSwitcher.restart(false);
LocationHelper.INSTANCE.initialize();
RoutingController.get().initialize();
TrafficManager.INSTANCE.initialize();
mFrameworkInitialized = true;
}
private void initNativeStrings()
{
nativeAddLocalization("country_status_added_to_queue", getString(R.string.country_status_added_to_queue));
nativeAddLocalization("country_status_downloading", getString(R.string.country_status_downloading));
nativeAddLocalization("country_status_download", getString(R.string.country_status_download));
nativeAddLocalization("country_status_download_without_routing", getString(R.string.country_status_download_without_routing));
nativeAddLocalization("country_status_download_failed", getString(R.string.country_status_download_failed));
nativeAddLocalization("try_again", getString(R.string.try_again));
nativeAddLocalization("not_enough_free_space_on_sdcard", getString(R.string.not_enough_free_space_on_sdcard));
nativeAddLocalization("placepage_unknown_place", getString(R.string.placepage_unknown_place));
nativeAddLocalization("my_places", getString(R.string.my_places));
nativeAddLocalization("my_position", getString(R.string.my_position));
nativeAddLocalization("routes", getString(R.string.routes));
nativeAddLocalization("cancel", getString(R.string.cancel));
nativeAddLocalization("wifi", getString(R.string.wifi));
nativeAddLocalization("routing_failed_unknown_my_position", getString(R.string.routing_failed_unknown_my_position));
nativeAddLocalization("routing_failed_has_no_routing_file", getString(R.string.routing_failed_has_no_routing_file));
nativeAddLocalization("routing_failed_start_point_not_found", getString(R.string.routing_failed_start_point_not_found));
nativeAddLocalization("routing_failed_dst_point_not_found", getString(R.string.routing_failed_dst_point_not_found));
nativeAddLocalization("routing_failed_cross_mwm_building", getString(R.string.routing_failed_cross_mwm_building));
nativeAddLocalization("routing_failed_route_not_found", getString(R.string.routing_failed_route_not_found));
nativeAddLocalization("routing_failed_internal_error", getString(R.string.routing_failed_internal_error));
nativeAddLocalization("place_page_booking_rating", getString(R.string.place_page_booking_rating));
}
public void initCrashlytics()
{
if (!isCrashlyticsEnabled())
return;
if (isCrashlyticsInitialized())
return;
Fabric.with(this, new Crashlytics(), new CrashlyticsNdk());
nativeInitCrashlytics();
mCrashlyticsInitialized = true;
}
public boolean isCrashlyticsInitialized()
{
return mCrashlyticsInitialized;
}
private static boolean setInstallationIdToCrashlytics()
{
if (!isCrashlyticsEnabled())
return false;
final String installationId = Utils.getInstallationId();
// If installation id is not found this means id was not
// generated by alohalytics yet and it is a first run.
if (TextUtils.isEmpty(installationId))
return false;
Crashlytics.setString("AlohalyticsInstallationId", installationId);
return true;
}
public boolean arePlatformAndCoreInitialized()
{
return mFrameworkInitialized && mPlatformInitialized;
}
public String getApkPath()
{
try
{
return getPackageManager().getApplicationInfo(BuildConfig.APPLICATION_ID, 0).sourceDir;
} catch (final NameNotFoundException e)
{
mLogger.e(TAG, "Can't get apk path from PackageManager", e);
return "";
}
}
public static String getSettingsPath()
{
return Environment.getExternalStorageDirectory().getAbsolutePath() + Constants.MWM_DIR_POSTFIX;
}
private static String getStoragePath(String settingsPath)
{
String path = Config.getStoragePath();
if (!TextUtils.isEmpty(path))
{
File f = new File(path);
if (f.exists() && f.isDirectory())
return path;
path = new StoragePathManager().findMapsMeStorage(settingsPath);
Config.setStoragePath(path);
return path;
}
return settingsPath;
}
public String getTempPath()
{
final File cacheDir = getExternalCacheDir();
if (cacheDir != null)
return cacheDir.getAbsolutePath();
return Environment.getExternalStorageDirectory().getAbsolutePath() +
String.format(Constants.STORAGE_PATH, BuildConfig.APPLICATION_ID, Constants.CACHE_DIR);
}
private static String getObbGooglePath()
{
final String storagePath = Environment.getExternalStorageDirectory().getAbsolutePath();
return storagePath.concat(String.format(Constants.OBB_PATH, BuildConfig.APPLICATION_ID));
}
static
{
System.loadLibrary("mapswithme");
}
private void initPushWoosh()
{
try
{
if (BuildConfig.PW_APPID.equals(PW_EMPTY_APP_ID))
return;
PushManager pushManager = PushManager.getInstance(this);
pushManager.onStartup(this);
pushManager.registerForPushNotifications();
PushwooshHelper.get().setContext(this);
PushwooshHelper.get().synchronize();
}
catch(Exception e)
{
mLogger.e("Pushwoosh", "Failed to init Pushwoosh", e);
}
}
@SuppressWarnings("unused")
void sendPushWooshTags(String tag, String[] values)
{
try
{
if (values.length == 1)
PushwooshHelper.get().sendTag(tag, values[0]);
else
PushwooshHelper.get().sendTag(tag, values);
}
catch(Exception e)
{
mLogger.e("Pushwoosh", "Failed to send pushwoosh tags", e);
}
}
private void initTracker()
{
MyTracker.setDebugMode(BuildConfig.DEBUG);
MyTracker.createTracker(PrivateVariables.myTrackerKey(), this);
final MyTrackerParams myParams = MyTracker.getTrackerParams();
myParams.setDefaultVendorAppPackage();
MyTracker.initTracker();
}
public static void onUpgrade()
{
Counters.resetAppSessionCounters();
}
@SuppressWarnings("unused")
void forwardToMainThread(final long functorPointer)
{
Message m = Message.obtain(mMainLoopHandler, new Runnable()
{
@Override
public void run()
{
nativeProcessFunctor(functorPointer);
}
});
m.obj = mMainQueueToken;
mMainLoopHandler.sendMessage(m);
}
private static native void nativePreparePlatform(String settingsPath);
private native void nativeInitPlatform(String apkPath, String storagePath, String tmpPath, String obbGooglePath,
String flavorName, String buildType, boolean isTablet);
private static native void nativeInitFramework();
private static native void nativeProcessFunctor(long functorPointer);
private static native void nativeAddLocalization(String name, String value);
@UiThread
private static native void nativeInitCrashlytics();
}
| apache-2.0 |
AaronChanSunny/DoubanMoview | app/src/main/java/com/aaron/doubanmovie/AppComponent.java | 406 | package com.aaron.doubanmovie;
import android.content.Context;
import javax.inject.Singleton;
import dagger.Component;
import me.aaron.dao.api.Api;
import me.aaron.dao.api.DaoModule;
/**
* Created by Chenll on 2016/10/13.
*/
@Singleton
@Component(modules = {
AppModule.class,
DaoModule.class
})
public interface AppComponent {
Context provideContext();
Api provideApi();
}
| apache-2.0 |
Edwin-Ran/es_source_read | src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java | 10708 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.nested;
import com.carrotsearch.hppc.IntArrayList;
import com.carrotsearch.hppc.IntObjectOpenHashMap;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.ReaderContextAware;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.index.cache.fixedbitset.FixedBitSetFilter;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
/**
*
*/
public class NestedAggregator extends SingleBucketAggregator implements ReaderContextAware {
private final String nestedPath;
private final Aggregator parentAggregator;
private FixedBitSetFilter parentFilter;
private final Filter childFilter;
private DocIdSetIterator childDocs;
private FixedBitSet parentDocs;
private AtomicReaderContext reader;
private FixedBitSet rootDocs;
private int currentRootDoc = -1;
private final IntObjectOpenHashMap<IntArrayList> childDocIdBuffers = new IntObjectOpenHashMap<>();
public NestedAggregator(String name, AggregatorFactories factories, String nestedPath, AggregationContext aggregationContext, Aggregator parentAggregator) {
super(name, factories, aggregationContext, parentAggregator);
this.nestedPath = nestedPath;
this.parentAggregator = parentAggregator;
MapperService.SmartNameObjectMapper mapper = aggregationContext.searchContext().smartNameObjectMapper(nestedPath);
if (mapper == null) {
throw new AggregationExecutionException("[nested] nested path [" + nestedPath + "] not found");
}
ObjectMapper objectMapper = mapper.mapper();
if (objectMapper == null) {
throw new AggregationExecutionException("[nested] nested path [" + nestedPath + "] not found");
}
if (!objectMapper.nested().isNested()) {
throw new AggregationExecutionException("[nested] nested path [" + nestedPath + "] is not nested");
}
// TODO: Revise the cache usage for childFilter
// Typical usage of the childFilter in this agg is that not all parent docs match and because this agg executes
// in order we are maybe better off not caching? We can then iterate over the posting list and benefit from skip pointers.
// Even if caching does make sense it is likely that it shouldn't be forced as is today, but based on heuristics that
// the filter cache maintains that the childFilter should be cached.
// By caching the childFilter we're consistent with other features and previous versions.
childFilter = aggregationContext.searchContext().filterCache().cache(objectMapper.nestedTypeFilter());
// The childDocs need to be consumed in docId order, this ensures that:
aggregationContext.ensureScoreDocsInOrder();
}
@Override
public void setNextReader(AtomicReaderContext reader) {
// Reset parentFilter, so we resolve the parentDocs for each new segment being searched
this.parentFilter = null;
this.reader = reader;
try {
// In ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here.
DocIdSet childDocIdSet = childFilter.getDocIdSet(reader, null);
if (DocIdSets.isEmpty(childDocIdSet)) {
childDocs = null;
} else {
childDocs = childDocIdSet.iterator();
}
rootDocs = context.searchContext().fixedBitSetFilterCache().getFixedBitSetFilter(NonNestedDocsFilter.INSTANCE).getDocIdSet(reader, null);
// We need to reset the current root doc, otherwise we may emit incorrect child docs if the next segment happen to start with the same root doc id value
currentRootDoc = -1;
childDocIdBuffers.clear();
} catch (IOException ioe) {
throw new AggregationExecutionException("Failed to aggregate [" + name + "]", ioe);
}
}
@Override
public void collect(int parentDoc, long bucketOrd) throws IOException {
// here we translate the parent doc to a list of its nested docs, and then call super.collect for evey one of them so they'll be collected
// if parentDoc is 0 then this means that this parent doesn't have child docs (b/c these appear always before the parent doc), so we can skip:
if (parentDoc == 0 || childDocs == null) {
return;
}
if (parentFilter == null) {
// The aggs are instantiated in reverse, first the most inner nested aggs and lastly the top level aggs
// So at the time a nested 'nested' aggs is parsed its closest parent nested aggs hasn't been constructed.
// So the trick is to set at the last moment just before needed and we can use its child filter as the
// parent filter.
// Additional NOTE: Before this logic was performed in the setNextReader(...) method, but the the assumption
// that aggs instances are constructed in reverse doesn't hold when buckets are constructed lazily during
// aggs execution
Filter parentFilterNotCached = findClosestNestedPath(parentAggregator);
if (parentFilterNotCached == null) {
parentFilterNotCached = NonNestedDocsFilter.INSTANCE;
}
parentFilter = context.searchContext().fixedBitSetFilterCache().getFixedBitSetFilter(parentFilterNotCached);
parentDocs = parentFilter.getDocIdSet(reader, null);
}
int numChildren = 0;
IntArrayList iterator = getChildren(parentDoc);
final int[] buffer = iterator.buffer;
final int size = iterator.size();
for (int i = 0; i < size; i++) {
numChildren++;
collectBucketNoCounts(buffer[i], bucketOrd);
}
incrementBucketDocCount(bucketOrd, numChildren);
}
@Override
protected void doClose() {
childDocIdBuffers.clear();
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) {
return new InternalNested(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal));
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalNested(name, 0, buildEmptySubAggregations());
}
public String getNestedPath() {
return nestedPath;
}
private static Filter findClosestNestedPath(Aggregator parent) {
for (; parent != null; parent = parent.parent()) {
if (parent instanceof NestedAggregator) {
return ((NestedAggregator) parent).childFilter;
} else if (parent instanceof ReverseNestedAggregator) {
return ((ReverseNestedAggregator) parent).getParentFilter();
}
}
return null;
}
public static class Factory extends AggregatorFactory {
private final String path;
public Factory(String name, String path) {
super(name, InternalNested.TYPE.name());
this.path = path;
}
@Override
public Aggregator create(AggregationContext context, Aggregator parent, long expectedBucketsCount) {
return new NestedAggregator(name, factories, path, context, parent);
}
}
// The aggs framework can collect buckets for the same parent doc id more than once and because the children docs
// can only be consumed once we need to buffer the child docs. We only need to buffer child docs in the scope
// of the current root doc.
// Examples:
// 1) nested agg wrapped is by terms agg and multiple buckets per document are emitted
// 2) Multiple nested fields are defined. A nested agg joins back to another nested agg via the reverse_nested agg.
// For each child in the first nested agg the second nested agg gets invoked with the same buckets / docids
private IntArrayList getChildren(final int parentDocId) throws IOException {
int rootDocId = rootDocs.nextSetBit(parentDocId);
if (currentRootDoc == rootDocId) {
final IntArrayList childDocIdBuffer = childDocIdBuffers.get(parentDocId);
if (childDocIdBuffer != null) {
return childDocIdBuffer;
} else {
// here we translate the parent doc to a list of its nested docs,
// and then collect buckets for every one of them so they'll be collected
final IntArrayList newChildDocIdBuffer = new IntArrayList();
childDocIdBuffers.put(parentDocId, newChildDocIdBuffer);
int prevParentDoc = parentDocs.prevSetBit(parentDocId - 1);
int childDocId;
if (childDocs.docID() > prevParentDoc) {
childDocId = childDocs.docID();
} else {
childDocId = childDocs.advance(prevParentDoc + 1);
}
for (; childDocId < parentDocId; childDocId = childDocs.nextDoc()) {
newChildDocIdBuffer.add(childDocId);
}
return newChildDocIdBuffer;
}
} else {
this.currentRootDoc = rootDocId;
childDocIdBuffers.clear();
return getChildren(parentDocId);
}
}
}
| apache-2.0 |
TamirTian/DaDaSDK | src/main/java/top/inject/dada/model/out/DaDaCity.java | 849 | package top.inject.dada.model.out;
public class DaDaCity {
/**
* 城市名称
*/
private String cityCode;
/**
* 城市编码
*/
private String cityName;
public String getCityCode() {
return cityCode;
}
public String getCityName() {
return cityName;
}
public DaDaCity setCityCode(String cityCode) {
this.cityCode = cityCode;
return this;
}
public DaDaCity setCityName(String cityName) {
this.cityName = cityName;
return this;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("DaDaCity{");
sb.append("cityCode='").append(cityCode).append('\'');
sb.append(", cityName='").append(cityName).append('\'');
sb.append('}');
return sb.toString();
}
}
| apache-2.0 |
akquinet/osgi-deployment-admin | maven-dp-plugin-it/src/test/java/de/akquinet/gomobile/deployment/tests/BadProcessorTest.java | 1592 | package de.akquinet.gomobile.deployment.tests;
import java.io.File;
import java.io.IOException;
import org.apache.maven.it.VerificationException;
import org.apache.maven.it.Verifier;
import org.junit.Before;
import org.junit.Test;
public class BadProcessorTest {
File testDir = new File("src/test/resources/bad-processor");
@Before
public void setUp() throws VerificationException, IOException {
Verifier verifier;
/*
* We must first make sure that any artifact created
* by this test has been removed from the local
* repository. Failing to do this could cause
* unstable test results. Fortunately, the verifier
* makes it easy to do this.
*/
verifier = new Verifier( testDir.getAbsolutePath() );
verifier.deleteArtifact( Helper.TEST_GROUP_ID, "test-bad-processor", Helper.TEST_VERSION, "dp" );
verifier.executeGoal( "clean" );
}
@Test(expected=VerificationException.class)
public void testPackage() throws IOException, VerificationException {
Verifier verifier = new Verifier( testDir.getAbsolutePath() );
/*
* The Command Line Options (CLI) are passed to the
* verifier as a list. This is handy for things like
* redefining the local repository if needed. In
* this case, we use the -N flag so that Maven won't
* recurse. We are only installing the parent pom to
* the local repo here.
*/
verifier.executeGoal( "package" );
verifier.verifyErrorFreeLog();
}
}
| apache-2.0 |
salviof/SuperBits_FrameWork | SB_FRAMEWORK/SBWebPaginasSemTagLib/src/main/java/com/super_bits/modulosSB/webPaginas/JSFManagedBeans/formularios/interfaces/ItfB_Pagina.java | 8154 | /*
* Super-Bits.com CODE CNPJ 20.019.971/0001-90
*/
package com.super_bits.modulosSB.webPaginas.JSFManagedBeans.formularios.interfaces;
import com.super_bits.modulosSB.SBCore.modulos.Controller.Interfaces.ItfParametroRequisicao;
import com.super_bits.modulosSB.SBCore.modulos.Controller.Interfaces.ItfParametroRequisicaoInstanciado;
import com.super_bits.modulosSB.SBCore.modulos.Controller.Interfaces.acoes.ItfAcaoDoSistema;
import com.super_bits.modulosSB.SBCore.modulos.Controller.Interfaces.permissoes.ItfAcaoGerenciarEntidade;
import com.super_bits.modulosSB.SBCore.modulos.comunicacao.ComunicacaoAcaoSistema;
import com.super_bits.modulosSB.SBCore.modulos.comunicacao.FabTipoRespostaComunicacao;
import com.super_bits.modulosSB.SBCore.modulos.comunicacao.ItfComunicacao;
import com.super_bits.modulosSB.SBCore.modulos.comunicacao.ItfTipoRespostaComunicacao;
import com.super_bits.modulosSB.SBCore.modulos.objetos.InfoCampos.campoInstanciado.ItfCampoInstanciado;
import com.super_bits.modulosSB.SBCore.modulos.objetos.registro.Interfaces.basico.ItfBeanSimples;
import com.super_bits.modulosSB.webPaginas.JSFManagedBeans.formularios.B_Pagina;
import com.super_bits.modulosSB.webPaginas.JSFManagedBeans.formularios.InfoDesignFormulario;
import com.super_bits.modulosSB.webPaginas.controller.servletes.servletWebPaginas.ConfiguracoesDeFormularioPorUrl;
import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
import org.primefaces.event.SelectEvent;
/**
*
* @author Salvio
*/
public interface ItfB_Pagina extends ItfB_PaginaSimples {
public void fecharPagina();
/**
*
* Fecha a pagina, e mata o viewScope,
*
*/
public void fecharPaginaMatandoViewScoped();
public void abrePagina();
public void selAcaoDeCtrParaUltimoFrm();
/**
*
* Uma pagina de gestão (PgManagedBean) possui tags associadas a sua função.
*
* estas tags servem para montar os links de pagina e para facilitar
* recursos de SEO
*
* @return Tags vinculadas a pagina
*/
public List<String> getTags();
/**
*
* Um nome curto facilita o acesso via url para determinada pagina de gestão
*
* @return Nome curto da pagina
*/
public String getNomeCurto();
/**
*
* REtorna o nome de chamada para criação de links para esta pagina (Função
* antiga e hoje praticamente descessária, será excluida no futuro)
*
* Motivo principal:(para você que ficou curioso) os componentes para
* ciração de botão de ação como <sbComp:botaoDeAcao fazem este trabalho com
* mais eficiencia.
*
* @return
*/
@Deprecated
public String getLinkRotulo();
/**
*
* Retorna a tag de url (do getTags)que foi utilizada para carregar a pagina
*
* @see ItfB_Pagina#getTags()
*
* @return tag utilizada para carregar a pagina
*/
public String getTagUsada();
/**
*
*
* @deprecated (nomeclatura ruim, pois retorna a Url de acordo com o
* parametro selecionado e não de acordo com o padrão
* @return Url padrão para acesso a esta pagina
*/
@Deprecated
public String getUrlPadrao();
/**
*
* Parametros de Url são parametros que podem ser configurados via chamada
* de URL
*
* O fato de ser possível executar uma ação da pagina pela url informando um
* parametro em seguida, tornou este recurso quase obsoleto, porém não
* chegamos a conclusão sobre sua extinção, comente sobre issso em
* wiki.superbits.org.br
*
* @return a lista de todos os parametros nescessários para esta pagina
*/
public List<ItfParametroRequisicaoInstanciado> getParametrosURL();
/**
*
* @see ItfB_Pagina#getParametrosURL()
*
*
* @param pNome nome do parametro
* @return O parametro especifico levando em conta o nome do parametro :P
*/
public ItfParametroRequisicaoInstanciado getParametroByNome(String pNome);
/**
*
* @see ItfB_Pagina#getParametrosURL()
*
* @param pId Inteiro relacionado ao parametro
* @return retorna um parametro especifico baseado em seu id;
*/
public String getNomeParametroById(int pId);
/**
*
* Este método altera a tag utilizada, deveria estar protegido, será
* excluido no futuro..
*
* @param tagUsada
*/
@Deprecated
public void setTagUsada(String tagUsada);
/**
*
* Retorna o parametro de acordo com o nome da entidade
*
* @see ItfB_Pagina#getParametrosURL()
*
* @param nomeEntidade nome da entidade vinculada ao parametro
* @return O parametro vinculado a entidade
*/
public ItfParametroRequisicaoInstanciado getParametrobyTipoEntidade(String nomeEntidade);
public ItfParametroRequisicaoInstanciado getParametroInstanciado(ItfParametroRequisicao pParametro);
/**
*
* @return @deprecated O controle de acesso agora é configurado pela
* AcaoVinculada
*/
@Deprecated
public boolean isAcessoLivre();
/**
*
* Todo PgAlgumacoisa é vinculado a auma ação de gestão esta ação de gestão
* determina quais ações existirão na pagina, nome da pagina e configurações
* de segurança, este metodo retorna a ação de gestão vinculada a pagina
*
*
* @return Ação de gestão vinculada a pagina
*/
public ItfAcaoGerenciarEntidade getAcaoVinculada();
/**
*
* Aplica valores de parametros passados por url no MB_pagina
*
* #DEveria ser protected
*
* @param valorStringPorParametro
*/
@Deprecated
public void aplicaValoresDeParametrosModoDesenvolvimento(Map<String, String> valorStringPorParametro);
/**
*
* @return Todas as ações declaradas no managed Bean
*/
public List<ItfAcaoDoSistema> getAcoesDaPagina();
/**
*
* @return A ação selecionada no momento
*/
public ItfAcaoDoSistema getAcaoSelecionada();
public ItfB_PaginaComEtapaVinculada getComoPaginaComEtapa();
/**
*
* Retorna um bean declarado do Managed bean.
*
* Um bean declarado possui informações de exibição de icone
*
* @param nomeBean
* @return
*/
public B_Pagina.BeanDeclarado getBeanDeclarado(String nomeBean);
public void aplicarUrlDeAcesso(ConfiguracoesDeFormularioPorUrl pConfig);
public ItfB_Modal getModalAtual();
public void setModalAtual(ItfB_Modal pModal);
public String getNomeMB();
public ItfBeanSimples getBeanSelecionado();
public void setBeanSelecionado(ItfBeanSimples pBeanSimples);
public void setCampoInstSelecionado(ItfCampoInstanciado pCampoInstanciado);
public ItfCampoInstanciado getCampoInstSelecionado();
public FabTipoRespostaComunicacao getRespostaAcaoAtual();
public void setTipoRespostaParaAcaoAtual(ItfTipoRespostaComunicacao pTipoResp);
public ComunicacaoAcaoSistema getComunicacaoTransientAcaoByIdModal(String pIdModal);
public void adicionarCodigoCoversa(String pCodigoConversa);
/**
* --> O Event possui set e get objetc para enviar e catpurar objetos -->
* Foi desabilitado por falhas em determinadas situações, que não foram
* possíveis de ser debugadas, precisa de apoio da comunicade do
* prime-faces, ou aguardar uma versão que torne este recursos funcional
*
* @param event Evento que captura a resposta do modal
*/
public void metodoRespostaModalPrimefaces(SelectEvent event);
/**
* Recebe parametros gerados pelo modal, como resposta para comunicação
* transient e outros
*
* A implementação padrão, registra a resposta e em seguida executa a ação
* selecionada
*
* @param pParametros
*/
public void metodoRespostaModal(Object... pParametros);
public EntityManager getEMPagina();
public ItfComunicacao getComunincacaoAguardandoResposta();
public void zerarDadosModal();
public InfoDesignFormulario getInfoLayout();
}
| apache-2.0 |
aleksandrTmk/TaskList | app/src/main/java/com/list/task/activity/BaseActivity.java | 5681 | package com.list.task.activity;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import com.list.task.model.UserItem;
import com.list.task.util.Blog;
import com.list.task.util.Constants;
import com.list.task.R;
import io.realm.Realm;
import io.realm.RealmResults;
/**
* Base Activity that provides access to Realm DB
*
* @author aleksandrTmk
*/
public abstract class BaseActivity extends AppCompatActivity {
private Realm realm;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setupRealmDB();
if (!didAppRunBefore()){
handleFirstAppRun();
}
}
//region DB methods
/**
* Adds the item to realm DB for persistent storage
*
* @param item
*/
public void addItemToRealm(UserItem item){
if (item == null){
Blog.e(BaseActivity.class, "Attempt to add null item to DB");
return;
}
if (realm == null){
setupRealmDB();
}
realm.beginTransaction();
realm.copyToRealmOrUpdate(item);
realm.commitTransaction();
}
/**
* Updates original item in realm DB with fields from updated item
*
* Note: Realm requires all object updates to happen inside a transaction, hence why we have to pass
* all the updated object into the method.
*
* @param originalItem
* @param updateItem
*/
public void updatetemInRealm(UserItem originalItem, UserItem updateItem){
if (originalItem == null || updateItem == null){
Blog.e(BaseActivity.class, "Attempt to update null item or updated values are null");
return;
}
if (realm == null){
setupRealmDB();
}
realm.beginTransaction();
if (updateItem.getName() != null) originalItem.setName(updateItem.getName());
if (updateItem.getDescription() != null) originalItem.setDescription(updateItem.getDescription());
if (updateItem.getQuantity() != null) originalItem.setQuantity(updateItem.getQuantity());
originalItem.setChecked(updateItem.isChecked());
realm.copyToRealmOrUpdate(originalItem);
realm.commitTransaction();
Blog.d(BaseActivity.class, "Item: " + originalItem.getName() + " added to DB");
}
/**
* Deletes an item from realm DB based on the position in results list
*
* @param pos
*/
public void deleteItemFromRealm(final int pos){
if (realm == null){
setupRealmDB();
}
realm.beginTransaction(); // Realm docs say to get results inside the transaction for sync safety
RealmResults<UserItem> allResult = getAllRealmItems();
if (allResult != null){
allResult.get(pos).deleteFromRealm();
}
realm.commitTransaction();
Blog.d(BaseActivity.class, "Item at: " + pos + " removed from DB");
}
/**
* Gets an item from realm DB based on the position in results list
*
* @param pos
* @return
*/
public UserItem getItemFromRealm(int pos){
RealmResults<UserItem> allResult = getAllRealmItems();
if (allResult == null){
Blog.e(BaseActivity.class, "Failed to get item at position: " + pos + " from DB");
return null;
}
Blog.d(BaseActivity.class, "Item at: " + pos + " returned from DB");
return allResult.get(pos);
}
/**
* Returns all shopping items stored in the DB
* @return
*/
public RealmResults<UserItem> getAllRealmItems(){
if (realm == null){
setupRealmDB();
}
// TODO maybe sort the items on timestamp of creation?
Blog.d(BaseActivity.class, "Returning all DB items");
return realm.where(UserItem.class).findAll();
}
//endregion
//region First App run methods
/**
* Check if this application has ran before. If it has, the Constants#PREF_APP_RAN_BEFORE flag will
* be true, otherwise we return false.
*
* @return True if app ran before, false otherwise
*/
private boolean didAppRunBefore(){
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
if (prefs == null){
return false;
}
return prefs.getBoolean(Constants.PREF_APP_RAN_BEFORE, false);
}
private void handleFirstAppRun(){
Blog.d(BaseActivity.class, "App running first time!");
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
SharedPreferences.Editor editor = prefs.edit();
if (editor != null ){
editor.putBoolean(Constants.PREF_APP_RAN_BEFORE, true);
editor.apply();
}
addSampleItemToDB();
}
/**
* Creates a sample list item as an example for the user and adds it to DB. This only happens the first time the
* application runs.
*/
private void addSampleItemToDB(){
Blog.d(BaseActivity.class, "Create sample item");
UserItem sampleItem = new UserItem();
sampleItem.setName(getString(R.string.sample_item_name));
sampleItem.setDescription(getString(R.string.sample_item_description));
sampleItem.setQuantity(getString(R.string.sample_item_quantity));
sampleItem.setChecked(true);
addItemToRealm(sampleItem);
}
private void setupRealmDB(){
if (realm == null){
realm = Realm.getDefaultInstance();
}
}
//endregion
}
| apache-2.0 |
mvs5465/jpo-ode | jpo-ode-plugins/src/test/java/us/dot/its/jpo/ode/plugin/j2735/oss/OssObstacleDetectionTest.java | 22584 | package us.dot.its.jpo.ode.plugin.j2735.oss;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import us.dot.its.jpo.ode.j2735.dsrc.DDateTime;
import us.dot.its.jpo.ode.j2735.dsrc.DDay;
import us.dot.its.jpo.ode.j2735.dsrc.DHour;
import us.dot.its.jpo.ode.j2735.dsrc.DMinute;
import us.dot.its.jpo.ode.j2735.dsrc.DMonth;
import us.dot.its.jpo.ode.j2735.dsrc.DOffset;
import us.dot.its.jpo.ode.j2735.dsrc.DSecond;
import us.dot.its.jpo.ode.j2735.dsrc.DYear;
import us.dot.its.jpo.ode.j2735.dsrc.ObstacleDetection;
import us.dot.its.jpo.ode.j2735.dsrc.ObstacleDirection;
import us.dot.its.jpo.ode.j2735.dsrc.ObstacleDistance;
import us.dot.its.jpo.ode.j2735.dsrc.VerticalAccelerationThreshold;
import us.dot.its.jpo.ode.plugin.j2735.J2735ObstacleDetection;
import us.dot.its.jpo.ode.plugin.j2735.J2735VertEvent;
/**
* -- Summary --
* JUnit test class for OssObstacleDetection
*
* Verifies correct conversion from generic ObstacleDetection to compliant J2735ObstactleDetection
*
* Notes:
* Tested elements:
* - obDist
* - vertEvent
*
* Untested elements: (handled by other testing classes)
* - description is a list of ITIS codes and are not part of the ASN specifications
* - obDirect is tested by OssAngleTest
* - locationDetails is tested by OssNamedNumberTest
* - dateTime is tested by OssDDateTimeTest
*
* -- Documentation --
* Data Frame: DF_ObstacleDetection
* Use: The DF_ObstacleDetection data frame is used to relate basic location information about a detect obstacle or
* a road hazard in a vehicles path.
* ASN.1 Representation:
* ObstacleDetection ::= SEQUENCE {
* obDist ObstacleDistance, -- Obstacle Distance
* obDirect ObstacleDirection, -- Obstacle Direction
* description ITIS.ITIScodes(523..541) OPTIONAL,
* -- Uses a limited set of ITIS codes
* locationDetails ITIS.GenericLocations OPTIONAL,
* dateTime DDateTime, -- Time detected
* vertEvent VerticalAccelerationThreshold OPTIONAL,
* -- Any wheels which have
* -- exceeded the acceleration point
* ...
* }
*
* Data Element: DE_ObstacleDistance
* Use: This data element draws from the output of a forward sensing system to report the presence of an obstacle
* and its measured distance from the vehicle detecting and reporting the obstacle. This information can be used
* by road authorities to investigate and remove the obstacle, as well as by other vehicles in advising drivers
* or on-board systems of the obstacle location. Distance is expressed in meters.
* ASN.1 Representation:
* ObstacleDistance ::= INTEGER (0..32767) -- LSB units of meters
*
* Data Element: DE_VerticalAccelerationThreshold
* Use: A bit string enumerating when a preset threshold for vertical acceleration is exceeded at each wheel.
* The "Wheel that exceeded Vertical G Threshold" data element is intended to inform Probe Data Users which vehicle
* wheel has exceeded a pre-determined threshold of a percent change in vertical G acceleration at the time a Probe
* Data snapshot was taken. This element is primarily intended to be used in the detection of potholes and similar
* road abnormalities. This element only provides information for four-wheeled vehicles. The element informs the user
* if the vehicle is not equipped with accelerometers on its wheels or that the system is off. When a wheel does
* exceed the threshold, the element provides details on the particular wheel by specifying Left Front, Left Rear,
* Right Front and Right Rear.
* ASN.1 Representation:
* VerticalAccelerationThreshold ::= BIT STRING {
* notEquipped (0), -- Not equipped or off
* leftFront (1), -- Left Front Event
* leftRear (2), -- Left Rear Event
* rightFront (3), -- Right Front Event
* rightRear (4) -- Right Rear Event
* } (SIZE(5))
*
*/
public class OssObstacleDetectionTest {
private DDateTime mockDDateTime;
/**
* Create a mock DDateTime object before tests are run. This is necessary to prevent null pointer exception
* upon OssObstacleDetection.genericObstacleDetection() method call.
*/
@Before
public void setUpMockDDateTime() {
DYear year = new DYear(0);
DMonth month = new DMonth(0);
DDay day = new DDay(0);
DHour hour = new DHour(0);
DMinute minute = new DMinute(0);
DSecond second = new DSecond(0);
DOffset offset = new DOffset(0);
mockDDateTime = new DDateTime(year, month, day, hour, minute, second, offset);
}
// Distance tests
/**
* Test that minimum distance value (0) returns (0)
*/
@Test
public void shouldReturnMinimumDistance() {
Integer testInput = 0;
Integer expectedValue = 0;
Integer testVertEventBitString = 0b00000;
byte[] testVertEventBytes = {testVertEventBitString.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(testInput);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
Integer actualValue = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.obDist;
assertEquals(expectedValue, actualValue);
}
/**
* Test corner case minimum distance value (1) returns (1)
*/
@Test
public void shouldReturnCornerCaseMinimumDistance() {
Integer testInput = 1;
Integer expectedValue = 1;
Integer testVertEventBitString = 0b00000;
byte[] testVertEventBytes = {testVertEventBitString.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(testInput);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
Integer actualValue = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.obDist;
assertEquals(expectedValue, actualValue);
}
/**
* Test known middle distance value (15012) returns (15012)
*/
@Test
public void shouldReturnMiddleDistance() {
Integer testInput = 15012;
Integer expectedValue = 15012;
Integer testVertEventBitString = 0b00000;
byte[] testVertEventBytes = {testVertEventBitString.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(testInput);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
Integer actualValue = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.obDist;
assertEquals(expectedValue, actualValue);
}
/**
* Test corner case maximum distance value (32766) returns (32766)
*/
@Test
public void shouldReturnCornerCaseMaximumDistance() {
Integer testInput = 32766;
Integer expectedValue = 32766;
Integer testVertEventBitString = 0b00000;
byte[] testVertEventBytes = {testVertEventBitString.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(testInput);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
Integer actualValue = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.obDist;
assertEquals(expectedValue, actualValue);
}
/**
* Test maximum distance value (32767) returns (32767)
*/
@Test
public void shouldReturnMaximumDistance() {
Integer testInput = 32767;
Integer expectedValue = 32767;
Integer testVertEventBitString = 0b00000;
byte[] testVertEventBytes = {testVertEventBitString.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(testInput);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
Integer actualValue = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.obDist;
assertEquals(expectedValue, actualValue);
}
/**
* Test that a distance value (-1) below the lower bound (0) throws IllegalArgumentException
*/
@Test
public void shouldThrowExceptionDistanceBelowLowerBound() {
Integer testInput = -1;
Integer testVertEventBitString = 0b00000;
byte[] testVertEventBytes = {testVertEventBitString.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(testInput);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
try {
J2735ObstacleDetection actualValue = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection);
fail("Expected IllegalArgumentException");
} catch (RuntimeException e) {
assertEquals(IllegalArgumentException.class, e.getClass());
}
}
/**
* Test that a distance value (32768) above the upper bound (32767) throws IllegalArgumentException
*/
@Test
public void shouldThrowExceptionDistanceAboveUpperBound() {
Integer testInput = 32768;
Integer testVertEventBitString = 0b00000;
byte[] testVertEventBytes = {testVertEventBitString.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(testInput);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
try {
J2735ObstacleDetection actualValue = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection);
fail("Expected IllegalArgumentException");
} catch (RuntimeException e) {
assertEquals(IllegalArgumentException.class, e.getClass());
}
}
// VerticalAccelerationEvent tests
/**
* Test bitstring value "00000" returns "false" for all vert event flags
*/
@Test
public void shouldCreateAllOffVertEvent() {
Integer testInput = 0b00000;
byte[] testVertEventBytes = {testInput.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(0);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
J2735VertEvent actualVertEvent = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.vertEvent;
for (Map.Entry<String, Boolean> curVal : actualVertEvent.entrySet()) {
assertFalse("Expected " + curVal.getKey() + " to be false", curVal.getValue());
}
}
/**
* Test bitstring input "11111" returns "true" for all vert event flags
*/
@Test
public void shouldCreateAllOnVertEvent() {
Integer testInput = 0b11111;
byte[] testVertEventBytes = {testInput.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(0);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
J2735VertEvent actualVertEvent = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.vertEvent;
for (Map.Entry<String, Boolean> curVal : actualVertEvent.entrySet()) {
assertTrue("Expected " + curVal.getKey() + " to be true", curVal.getValue());
}
}
/**
* Test input bitstring value "00001" returns true for "notEquipped" only
*/
@Test
public void shouldCreateNotEquippedVertEvent() {
Integer testInput = 0b00001;
String elementTested = "notEquipped";
byte[] testVertEventBytes = {testInput.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(0);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
J2735VertEvent actualVertEvent = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.vertEvent;
for (Map.Entry<String, Boolean> curVal : actualVertEvent.entrySet()) {
if(curVal.getKey() == elementTested) {
assertTrue("Expected " + curVal.getKey() + " to be true", curVal.getValue());
} else {
assertFalse("Expected " + curVal.getKey() + " to be false", curVal.getValue());
}
}
}
/**
* Test input bitstring "00010" returns "true" for "leftFront" only
*/
@Test
public void shouldCreateLeftFrontVertEvent() {
Integer testInput = 0b00010;
String elementTested = "leftFront";
byte[] testVertEventBytes = {testInput.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(0);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
J2735VertEvent actualVertEvent = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.vertEvent;
for (Map.Entry<String, Boolean> curVal : actualVertEvent.entrySet()) {
if(curVal.getKey() == elementTested) {
assertTrue("Expected " + curVal.getKey() + " to be true", curVal.getValue());
} else {
assertFalse("Expected " + curVal.getKey() + " to be false", curVal.getValue());
}
}
}
/**
* Test input bitstring value "01000" returns "true" for "rightFront" only
*/
@Test
public void shouldCreateRightFrontVertEvent() {
Integer testInput = 0b01000;
String elementTested = "rightFront";
byte[] testVertEventBytes = {testInput.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(0);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
J2735VertEvent actualVertEvent = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.vertEvent;
for (Map.Entry<String, Boolean> curVal : actualVertEvent.entrySet()) {
if(curVal.getKey() == elementTested) {
assertTrue("Expected " + curVal.getKey() + " to be true", curVal.getValue());
} else {
assertFalse("Expected " + curVal.getKey() + " to be false", curVal.getValue());
}
}
}
/**
* Test input bitstring value "10000" returns "true" for "rightRear" only
*/
@Test
public void shouldCreateRightRearVertEvent() {
Integer testInput = 0b10000;
String elementTested = "rightRear";
byte[] testVertEventBytes = {testInput.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(0);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
J2735VertEvent actualVertEvent = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.vertEvent;
for (Map.Entry<String, Boolean> curVal : actualVertEvent.entrySet()) {
if(curVal.getKey() == elementTested) {
assertTrue("Expected " + curVal.getKey() + " to be true", curVal.getValue());
} else {
assertFalse("Expected " + curVal.getKey() + " to be false", curVal.getValue());
}
}
}
/**
* Test input bitstring value "01010" returns "true" for "leftFront" and "rightFront" only
*/
@Test
public void shouldCreateTwoVertEvents() {
Integer testInput = 0b01010;
String elementTested1 = "leftFront";
String elementTested2 = "rightFront";
byte[] testVertEventBytes = {testInput.byteValue()};
ObstacleDistance testDist = new ObstacleDistance(0);
ObstacleDirection testDirect = new ObstacleDirection(0);
VerticalAccelerationThreshold testVertEvent = new VerticalAccelerationThreshold(testVertEventBytes);
ObstacleDetection testObstacleDetection = new ObstacleDetection();
testObstacleDetection.setObDist(testDist);
testObstacleDetection.setObDirect(testDirect);
testObstacleDetection.setVertEvent(testVertEvent);
testObstacleDetection.setDateTime(mockDDateTime);
J2735VertEvent actualVertEvent = OssObstacleDetection
.genericObstacleDetection(testObstacleDetection)
.vertEvent;
for (Map.Entry<String, Boolean> curVal : actualVertEvent.entrySet()) {
if(curVal.getKey() == elementTested1 || curVal.getKey() == elementTested2) {
assertTrue("Expected " + curVal.getKey() + " to be true", curVal.getValue());
} else {
assertFalse("Expected " + curVal.getKey() + " to be false", curVal.getValue());
}
}
}
}
| apache-2.0 |
tomkren/pikater | test/org/pikater/core/experiments/SearchOnly.java | 2676 | package org.pikater.core.experiments;
import org.pikater.core.agents.experiment.computing.Agent_WekaRBFNetworkCA;
import org.pikater.core.ontology.subtrees.batchdescription.*;
import org.pikater.core.ontology.subtrees.batchdescription.evaluationmethod.CrossValidation;
import org.pikater.core.ontology.subtrees.newoption.base.NewOption;
import org.pikater.core.ontology.subtrees.newoption.values.DoubleValue;
import org.pikater.core.ontology.subtrees.newoption.values.QuestionMarkRange;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Martin Pilat on 28.12.13.
*/
public class SearchOnly implements ITestExperiment {
public ComputationDescription createDescription() {
FileDataProvider fdp = new FileDataProvider();
fdp.setFileURI("iris.arff");
DataSourceDescription dsd = new DataSourceDescription();
dsd.setDataProvider(fdp);
ComputingAgent ca = new ComputingAgent();
ca.setTrainingData(dsd);
ca.setTestingData(dsd);
ca.setValidationData(dsd);
ca.setAgentType(Agent_WekaRBFNetworkCA.class.getName());
ca.setEvaluationMethod(new EvaluationMethod(CrossValidation.class.getName()));
//"whatever.mlp.is.in.MLP"
List<NewOption> options = new ArrayList<NewOption>();
NewOption lr = new NewOption("L",
new QuestionMarkRange(
new DoubleValue(0.0), new DoubleValue(0.0)));
NewOption hr = new NewOption( "H",4);
options.add(lr);
options.add(hr);
ca.setOptions(options);
CARecSearchComplex crsc = new CARecSearchComplex();
crsc.setComputingAgent(ca);
ErrorSourceDescription ed = new ErrorSourceDescription();
ed.setProvider(ca);
ed.setOutputType("mse");
List<ErrorSourceDescription> eds = new ArrayList<ErrorSourceDescription>();
eds.add(ed);
crsc.setErrors(eds);
Search sa = new Search();
sa.setAgentType("whatever.ea.is.in.EA");
List<NewOption> searchParameters = new ArrayList<NewOption>();
NewOption pr = new NewOption(
"ea.popSize",50);
NewOption ear = new NewOption( "ea.mutationRate",0.03);
searchParameters.add(pr);
searchParameters.add(ear);
crsc.setSearch(sa);
DataSourceDescription cAds = new DataSourceDescription();
cAds.setDataProvider(ca);
cAds.setOutputType("trained");
FileDataSaver fds = new FileDataSaver();
fds.setDataSource(cAds);
ComputationDescription cd = new ComputationDescription();
cd.addRootElement(fds);
return cd;
}
}
| apache-2.0 |
Panda-Programming-Language/Panda | panda-framework/src/test/java/org/panda_lang/panda/framework/language/interpreter/token/distributors/MatchableDistributorTest.java | 1632 | /*
* Copyright (c) 2015-2019 Dzikoysk
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.panda_lang.panda.framework.language.interpreter.token.distributors;
import org.junit.jupiter.api.Test;
import org.panda_lang.panda.framework.design.interpreter.lexer.Lexer;
import org.panda_lang.panda.framework.design.interpreter.source.Source;
import org.panda_lang.panda.framework.language.interpreter.lexer.PandaLexer;
import org.panda_lang.panda.framework.language.interpreter.source.PandaSource;
import org.panda_lang.panda.framework.language.resource.PandaSyntax;
public class MatchableDistributorTest {
private static final Source SOURCE = new PandaSource("MatchableDistributorTest", "test [ a { b } c ] element");
@Test
public void testMatchable() {
Lexer lexer = PandaLexer.of(new PandaSyntax()).build();
MatchableDistributor distributor = new MatchableDistributor(new TokenDistributor(lexer.convert(SOURCE)));
while (distributor.hasNext()) {
System.out.println(distributor.nextVerified() + " isMatchable=" + distributor.isMatchable());
}
}
}
| apache-2.0 |
project-zerus/cassandra | src/java/org/apache/cassandra/tools/SSTableMetadataViewer.java | 4661 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.tools;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.util.EnumSet;
import java.util.Map;
import org.apache.cassandra.io.sstable.Descriptor;
import org.apache.cassandra.io.sstable.metadata.*;
/**
* Shows the contents of sstable metadata
*/
public class SSTableMetadataViewer
{
/**
* @param args a list of sstables whose metadata we're interested in
*/
public static void main(String[] args) throws IOException
{
PrintStream out = System.out;
if (args.length == 0)
{
out.println("Usage: sstablemetadata <sstable filenames>");
System.exit(1);
}
for (String fname : args)
{
if (new File(fname).exists())
{
Descriptor descriptor = Descriptor.fromFilename(fname);
Map<MetadataType, MetadataComponent> metadata = descriptor.getMetadataSerializer().deserialize(descriptor, EnumSet.allOf(MetadataType.class));
ValidationMetadata validation = (ValidationMetadata) metadata.get(MetadataType.VALIDATION);
StatsMetadata stats = (StatsMetadata) metadata.get(MetadataType.STATS);
CompactionMetadata compaction = (CompactionMetadata) metadata.get(MetadataType.COMPACTION);
out.printf("SSTable: %s%n", descriptor);
if (validation != null)
{
out.printf("Partitioner: %s%n", validation.partitioner);
out.printf("Bloom Filter FP chance: %f%n", validation.bloomFilterFPChance);
}
if (stats != null)
{
out.printf("Maximum timestamp: %s%n", stats.maxTimestamp);
out.printf("SSTable max local deletion time: %s%n", stats.maxLocalDeletionTime);
out.printf("Compression ratio: %s%n", stats.compressionRatio);
out.printf("Estimated droppable tombstones: %s%n", stats.getEstimatedDroppableTombstoneRatio((int) (System.currentTimeMillis() / 1000)));
out.printf("SSTable Level: %d%n", stats.sstableLevel);
out.printf("Repaired at: %d%n", stats.repairedAt);
out.println(stats.replayPosition);
out.println("Estimated tombstone drop times:%n");
for (Map.Entry<Double, Long> entry : stats.estimatedTombstoneDropTime.getAsMap().entrySet())
{
out.printf("%-10s:%10s%n",entry.getKey().intValue(), entry.getValue());
}
printHistograms(stats, out);
}
if (compaction != null)
{
out.printf("Ancestors: %s%n", compaction.ancestors.toString());
out.printf("Estimated cardinality: %s%n", compaction.cardinalityEstimator.cardinality());
}
}
else
{
out.println("No such file: " + fname);
}
}
}
private static void printHistograms(StatsMetadata metadata, PrintStream out)
{
long[] offsets = metadata.estimatedRowSize.getBucketOffsets();
long[] ersh = metadata.estimatedRowSize.getBuckets(false);
long[] ecch = metadata.estimatedColumnCount.getBuckets(false);
out.println(String.format("%-10s%18s%18s",
"Count", "Row Size", "Cell Count"));
for (int i = 0; i < offsets.length; i++)
{
out.println(String.format("%-10d%18s%18s",
offsets[i],
(i < ersh.length ? ersh[i] : ""),
(i < ecch.length ? ecch[i] : "")));
}
}
}
| apache-2.0 |
krasserm/ipf | commons/ihe/xds/src/main/java/org/openehealth/ipf/commons/ihe/xds/iti62/Iti62ServerAuditStrategy.java | 2254 | /*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openehealth.ipf.commons.ihe.xds.iti62;
import org.openehealth.ipf.commons.ihe.core.atna.AuditorManager;
import org.openehealth.ipf.commons.ihe.xds.core.audit.XdsRemoveAuditDataset;
import org.openehealth.ipf.commons.ihe.xds.core.audit.XdsRemoveAuditStrategy30;
import org.openehealth.ipf.commons.ihe.xds.core.audit.XdsSubmitAuditDataset;
import org.openehealth.ipf.commons.ihe.xds.core.audit.XdsSubmitAuditStrategy30;
/**
* Server audit strategy for ITI-62.
* @author Boris Stanojevic
*/
public class Iti62ServerAuditStrategy extends XdsRemoveAuditStrategy30 {
private static final String[] NECESSARY_AUDIT_FIELDS = new String[] {
"EventOutcomeCode",
"ClientIpAddress",
"ServiceEndpointUrl"};
/**
* Constructs the audit strategy.
* @param allowIncompleteAudit
* whether this strategy should allow incomplete audit records
* (parameter initially configurable via endpoint URL).
*/
public Iti62ServerAuditStrategy(boolean allowIncompleteAudit) {
super(true, allowIncompleteAudit);
}
@Override
public void doAudit(XdsRemoveAuditDataset auditDataset) {
AuditorManager.getCustomXdsAuditor().auditServerIti62(
auditDataset.getEventOutcomeCode(),
auditDataset.getUserId(),
auditDataset.getClientIpAddress(),
auditDataset.getUserName(),
auditDataset.getServiceEndpointUrl(),
auditDataset.getPatientId());
}
@Override
public String[] getNecessaryAuditFieldNames() {
return NECESSARY_AUDIT_FIELDS;
}
}
| apache-2.0 |
Bodo1981/appkit | sample-viewstate/src/main/java/com/christianbahl/appkit/sampleviewstate/fragment_recyclerview/FragmentActivityRecyclerView.java | 651 | package com.christianbahl.appkit.sampleviewstate.fragment_recyclerview;
import android.content.Context;
import android.content.Intent;
import android.support.annotation.NonNull;
import android.support.v4.app.Fragment;
import com.christianbahl.appkit.core.activity.CBActivityFragment;
/**
* @author Christian Bahl
*/
public class FragmentActivityRecyclerView extends CBActivityFragment {
public static Intent getStartIntent(Context context) {
return new Intent(context, FragmentActivityRecyclerView.class);
}
@NonNull @Override protected Fragment createFragmentToDisplay() {
return FragmentRecyclerViewFragment.newInstance();
}
}
| apache-2.0 |
ptdunlap/cormac | webapp/src/main/java/org/bananaforscale/cormac/resource/DatabaseResource.java | 5039 | /*
* Copyright 2015 bananaforscale.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bananaforscale.cormac.resource;
import java.util.List;
import org.bananaforscale.cormac.dao.database.DatabaseDataService;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.bananaforscale.cormac.exception.datasource.DatasourceException;
import org.bananaforscale.cormac.exception.datasource.ExistsException;
import org.bananaforscale.cormac.exception.datasource.NotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Resource to handle Mongo Database operations
*
* @author Paul Dunlap
*/
@Path("database")
public class DatabaseResource {
private static final Logger logger = LoggerFactory.getLogger(DatabaseResource.class);
@Context
HttpServletRequest request;
DatabaseDataService dds;
public DatabaseResource(DatabaseDataService dds) {
this.dds = dds;
}
/**
* Returns a list of all database names present on the server.
*
* @return the names of databases present on this server
*/
@GET
@Path("")
@Produces(MediaType.APPLICATION_JSON)
public Response getDatabases() {
try {
List<String> databaseList = dds.getDatabases();
return Response.ok(databaseList).build();
} catch (DatasourceException ex) {
return Response.status(500).entity(ResourceUtil.buildJson("error", ex.getMessage())).build();
}
}
/**
* Creates a new database explicitly. Because MongoDB creates a database
* implicitly when the database is first referenced in a command, this
* method is not required for usage of said database.
*
* @param databaseName the database to create
* @return a status message with the outcome of the operation
*/
@PUT
@Path("{databaseName}")
@Produces(MediaType.APPLICATION_JSON)
public Response addDatabase(@PathParam("databaseName") String databaseName) {
try {
boolean result = dds.addDatabase(databaseName);
return Response.ok(ResourceUtil.buildJson("ok", String.valueOf(result))).build();
} catch (DatasourceException ex) {
return Response.status(500).entity(ResourceUtil.buildJson("error", ex.getMessage())).build();
} catch (ExistsException ex) {
return Response.status(400).entity(ResourceUtil.buildJson("error", ex.getMessage())).build();
}
}
/**
* Removes a database with a given name.
*
* @param databaseName the database
* @return a status message with the outcome of the operation
*/
@DELETE
@Path("{databaseName}")
@Produces(MediaType.APPLICATION_JSON)
public Response removeDatabase(@PathParam("databaseName") String databaseName) {
try {
boolean result = dds.removeDatabase(databaseName);
return Response.ok(ResourceUtil.buildJson("ok", String.valueOf(result))).build();
} catch (DatasourceException ex) {
return Response.status(500).entity(ResourceUtil.buildJson("error", ex.getMessage())).build();
} catch (NotFoundException ex) {
return Response.status(400).entity(ResourceUtil.buildJson("error", ex.getMessage())).build();
}
}
/**
* Returns statistics that reflect the use state of a single database.
*
* @param databaseName the database
* @return A document with statistics reflecting the database system’s
* state.
*/
@GET
@Path("{databaseName}/stats")
@Produces(MediaType.APPLICATION_JSON)
public Response getDatabaseStats(@PathParam("databaseName") String databaseName) {
try {
String result = dds.getDatabaseStats(databaseName);
return Response.ok(result).build();
} catch (DatasourceException ex) {
return Response.status(500).entity(ResourceUtil.buildJson("error", ex.getMessage())).build();
} catch (NotFoundException ex) {
return Response.status(400).entity(ResourceUtil.buildJson("error", ex.getMessage())).build();
}
}
}
| apache-2.0 |
lucjasniegota/java_pft | addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/tests/GroupCreationTests.java | 3428 | package ru.stqa.pft.addressbook.tests;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.thoughtworks.xstream.XStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import ru.stqa.pft.addressbook.model.GroupData;
import ru.stqa.pft.addressbook.model.Groups;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
public class GroupCreationTests extends TestBase {
@DataProvider
public Iterator<Object[]> validGroupsfromJSON() throws IOException {
List<Object[]> list = new ArrayList<Object[]>();
BufferedReader reader = new BufferedReader
(new FileReader(new File(app.properties.getProperty("web.groupFileJSON"))));
String json = "";
String line = reader.readLine();
while (line != null) {
json += line;
line = reader.readLine();
}
Gson gson = new Gson();
List<GroupData> groups = gson.fromJson(json, new TypeToken<List<GroupData>>() {
}.getType());
return groups.stream().map((g) -> new Object[]{g}).collect(Collectors.toList()).iterator();
}
@DataProvider
public Iterator<Object[]> validGroupsfromXML() throws IOException {
List<Object[]> list = new ArrayList<Object[]>();
BufferedReader reader = new BufferedReader(new FileReader(new File(app.properties.getProperty("web.groupFileXML"))));
String xml = "";
String line = reader.readLine();
while (line != null) {
xml += line;
line = reader.readLine();
}
XStream xstream = new XStream();
xstream.processAnnotations(GroupData.class);
List<GroupData> groups = (List<GroupData>) xstream.fromXML(xml);
return groups.stream().map((g) -> new Object[]{g}).collect(Collectors.toList()).iterator();
}
@DataProvider
public Iterator<Object[]> validGroupsfromCSV() throws IOException {
List<Object[]> list = new ArrayList<Object[]>();
BufferedReader reader = new BufferedReader(new FileReader(new File(app.properties.getProperty("web.groupFileCSV"))));
String line = reader.readLine();
while (line != null) {
String[] split = line.split(";");
list.add(new Object[]{new GroupData().withName(split[0]).withHeader(split[1]).withFooter(split[2])});
line = reader.readLine();
}
return list.iterator();
}
@Test(dataProvider = "validGroupsfromXML")
public void testGroupCreation(GroupData group) {
app.goTo().groupPage();
Groups before = app.db().groups();
app.group().create(group);
assertThat(app.group().count(), equalTo(before.size() + 1));
Groups after = app.db().groups();
assertThat(after, equalTo(before.withAdded(group.withId(after.stream().mapToInt((g) -> g.getId()).max().getAsInt()))));
}
@Test
public void testBadGroupCreation() {
app.goTo().groupPage();
Groups before = app.db().groups();
GroupData group = new GroupData().withName(app.properties.getProperty("web.badGroupName"));
app.group().create(group);
assertThat(app.group().count(), equalTo(before.size()));
Groups after = app.db().groups();
assertThat(after, equalTo(before));
}
}
| apache-2.0 |
AsuraTeam/asura | asura-sms/src/main/java/com/asrua/framework/sms/send/SmsSendWorker.java | 3591 | /**
* @FileName: SmsSendWorker.java
* @Package: com.asrua.framework.sms.send
* @author sence
* @created 10/10/2015 9:22 PM
* <p/>
* Copyright 2015 ziroom
*/
package com.asrua.framework.sms.send;
import com.asrua.framework.sms.conf.AbstractSmsSenderConfig;
import com.asrua.framework.sms.log.ISmsLogger;
import com.asrua.framework.sms.log.LogInfoBean;
import com.asura.framework.base.entity.DataTransferObject;
import com.asura.framework.base.util.Check;
import com.asura.framework.utils.LogUtil;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.concurrent.Callable;
/**
* <p></p>
* <p/>
* <PRE>
* <BR> 修改记录
* <BR>-----------------------------------------------
* <BR> 修改日期 修改人 修改内容
* </PRE>
*
* @author sence
* @version 1.0
* @since 1.0
*/
public class SmsSendWorker implements Callable<DataTransferObject> {
private Logger LOGGER = LoggerFactory.getLogger(AbstractHttpSmsSender.class);
private AbstractSmsSenderConfig config;
private HttpPost httpPost;
private SmsMessage smsMessage;
private ISmsLogger logger;
public SmsSendWorker(SmsMessage smsMessage, HttpPost httpPost, ISmsLogger logger, AbstractSmsSenderConfig config) {
this.smsMessage = smsMessage;
this.httpPost = httpPost;
this.logger = logger;
this.config = config;
}
@Override
public DataTransferObject call() throws Exception {
CloseableHttpClient httpClient = null;
DataTransferObject dto = new DataTransferObject();
CloseableHttpResponse response = null;
LogInfoBean logInfoBean = new LogInfoBean();
try {
/**
* HTTP connections are complex, stateful, thread-unsafe objects which need to be properly managed to function correctly.
* HTTP connections can only be used by one execution thread at a time
*/
httpClient = HttpClients.createDefault();
logInfoBean.setSmsMessage(smsMessage);
logInfoBean.setUrl(config.getSendUrl());
//根据请求设置URL
response = httpClient.execute(httpPost);
String result = handlerRespone(response, logInfoBean);
logger.saveLog(logInfoBean);
dto.putValue("data", result);
return dto;
} catch (IOException e) {
dto.setErrCode(1);
dto.setMsg("发送短信失败:" + e.getMessage());
LogUtil.error(LOGGER, "send error:{}", e);
logInfoBean.setReturnInfo(e.getMessage());
logger.saveLog(logInfoBean);
return dto;
} finally {
if (!Check.NuNObj(httpClient)) {
httpClient.close();
}
if (!Check.NuNObj(response)) {
response.close();
}
}
}
public String handlerRespone(CloseableHttpResponse response, LogInfoBean logInfoBean) throws IOException {
int status = response.getStatusLine().getStatusCode();
String resultStr = null;
if (status == 200) {
resultStr = EntityUtils.toString(response.getEntity());
}
logInfoBean.setReturnCode(status + "");
logInfoBean.setReturnInfo(resultStr);
return resultStr;
}
}
| apache-2.0 |
playaround88/sizzler | src/main/java/com/ai/sizzler/scan/redis/RedisImporter.java | 1747 | package com.ai.sizzler.scan.redis;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ai.sizzler.commons.JsonUtil;
import com.ai.sizzler.scan.component.AbstractImporter;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.Pipeline;
public class RedisImporter extends AbstractImporter{
private static Logger LOG=LoggerFactory.getLogger(RedisImporter.class);
private RedisDataSource ds;
private String queue;
private Jedis jedis;
@Override
public void load(Map map) {
super.load(map);
Map prop=JsonUtil.fromJson(getProps(), HashMap.class);
setQueue((String)prop.get("queue"));
this.ds=new RedisDataSource();
this.ds.load((Map)map.get("dataSource"));
}
@Override
public void init() {
jedis=new Jedis(ds.getHost(),ds.getPort());
if(ds.getPassword()!=null && !"".equals(ds.getPassword())){
jedis.auth(ds.getPassword());
}
}
@Override
public void destroy() {
if(jedis!=null){
jedis.close();
jedis=null;
}
}
@Override
public List<Object> scan(int size) {
if(!jedis.isConnected()){
jedis.connect();
}
List<Object> result = null;
try {
Pipeline pipe = jedis.pipelined();
for (int i = 0; i < size; i++) {
pipe.rpop(queue);
}
result = pipe.syncAndReturnAll();
} catch (Exception e) {
LOG.error("获取数据异常:",e);
} finally {
if(jedis!=null){
jedis.disconnect();
}
}
return result;
}
@Override
public int updateState(Object record) {
return 1;
}
public String getQueue() {
return queue;
}
public void setQueue(String queue) {
this.queue = queue;
}
}
| apache-2.0 |
sflpro/ms_payment | services/services_core/src/main/java/com/sfl/pms/services/payment/customer/method/adyen/CustomerPaymentMethodAdyenInformationService.java | 1638 | package com.sfl.pms.services.payment.customer.method.adyen;
import com.sfl.pms.services.payment.customer.method.AbstractCustomerPaymentMethodProviderInformationService;
import com.sfl.pms.services.payment.customer.method.model.adyen.CustomerPaymentMethodAdyenInformation;
import javax.annotation.Nonnull;
/**
* User: Ruben Dilanyan
* Company: SFL LLC
* Date: 1/16/15
* Time: 11:23 AM
*/
public interface CustomerPaymentMethodAdyenInformationService extends AbstractCustomerPaymentMethodProviderInformationService<CustomerPaymentMethodAdyenInformation> {
/**
* Checks if payment method information exists for recurring details reference
*
* @param recurringDetailReference
* @return exists
*/
boolean checkIfPaymentMethodInformationExistsForRecurringDetailReference(@Nonnull final String recurringDetailReference);
/**
* Finds customer payment method Adyen information by recurring details reference
*
* @param recurringDetailReference
* @return customerPaymentMethodAdyenInformation
*/
@Nonnull
CustomerPaymentMethodAdyenInformation getPaymentMethodInformationByRecurringDetailReference(@Nonnull final String recurringDetailReference);
/**
* Updates recurring details reference for Adyen payment method information
*
* @param informationId
* @param recurringDetailReference
* @return customerPaymentMethodAdyenInformation
*/
@Nonnull
CustomerPaymentMethodAdyenInformation updatePaymentMethodInformationRecurringDetailReference(@Nonnull final Long informationId, @Nonnull final String recurringDetailReference);
}
| apache-2.0 |