text
stringlengths 7
1.01M
|
|---|
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.genie.core.jpa.specifications;
import com.google.common.collect.Sets;
import com.netflix.genie.common.dto.JobStatus;
import com.netflix.genie.core.jpa.entities.ClusterEntity;
import com.netflix.genie.core.jpa.entities.CommandEntity;
import com.netflix.genie.core.jpa.entities.JobEntity;
import com.netflix.genie.core.jpa.entities.JobEntity_;
import com.netflix.genie.test.categories.UnitTest;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.Path;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import java.util.Date;
import java.util.Set;
import java.util.UUID;
/**
* Test the specifications generated by JobSpecs.
*
* @author tgianos
*/
@Category(UnitTest.class)
public class JpaJobSpecsUnitTests {
private static final String ID = UUID.randomUUID().toString();
private static final String JOB_NAME = "jobName";
private static final String USER_NAME = "tgianos";
private static final String CLUSTER_NAME = "hprod2";
private static final ClusterEntity CLUSTER = Mockito.mock(ClusterEntity.class);
private static final String COMMAND_NAME = "pig";
private static final CommandEntity COMMAND = Mockito.mock(CommandEntity.class);
private static final Set<String> TAGS = Sets.newHashSet();
private static final Set<JobStatus> STATUSES = Sets.newHashSet();
private static final String TAG = UUID.randomUUID().toString();
private static final Date MIN_STARTED = new Date();
private static final Date MAX_STARTED = new Date(MIN_STARTED.getTime() + 10);
private static final Date MIN_FINISHED = new Date(MAX_STARTED.getTime() + 10);
private static final Date MAX_FINISHED = new Date(MIN_FINISHED.getTime() + 10);
private Root<JobEntity> root;
private CriteriaBuilder cb;
private String tagLikeStatement;
/**
* Setup the mocks.
*/
@Before
@SuppressWarnings("unchecked")
public void setup() {
TAGS.clear();
TAGS.add(TAG);
STATUSES.clear();
STATUSES.add(JobStatus.INIT);
STATUSES.add(JobStatus.FAILED);
this.root = (Root<JobEntity>) Mockito.mock(Root.class);
this.cb = Mockito.mock(CriteriaBuilder.class);
final Path<String> idPath = (Path<String>) Mockito.mock(Path.class);
final Predicate likeIdPredicate = Mockito.mock(Predicate.class);
final Predicate equalIdPredicate = Mockito.mock(Predicate.class);
Mockito.when(this.root.get(JobEntity_.id)).thenReturn(idPath);
Mockito.when(this.cb.like(idPath, ID)).thenReturn(likeIdPredicate);
Mockito.when(this.cb.equal(idPath, ID)).thenReturn(equalIdPredicate);
final Path<String> jobNamePath = (Path<String>) Mockito.mock(Path.class);
final Predicate likeJobNamePredicate = Mockito.mock(Predicate.class);
final Predicate equalJobNamePredicate = Mockito.mock(Predicate.class);
Mockito.when(this.root.get(JobEntity_.name)).thenReturn(jobNamePath);
Mockito.when(this.cb.like(jobNamePath, JOB_NAME)).thenReturn(likeJobNamePredicate);
Mockito.when(this.cb.equal(jobNamePath, JOB_NAME)).thenReturn(equalJobNamePredicate);
final Path<String> userNamePath = (Path<String>) Mockito.mock(Path.class);
final Predicate equalUserNamePredicate = Mockito.mock(Predicate.class);
Mockito.when(this.root.get(JobEntity_.user)).thenReturn(userNamePath);
Mockito.when(this.cb.equal(userNamePath, USER_NAME)).thenReturn(equalUserNamePredicate);
final Path<JobStatus> statusPath = (Path<JobStatus>) Mockito.mock(Path.class);
final Predicate equalStatusPredicate = Mockito.mock(Predicate.class);
Mockito.when(this.root.get(JobEntity_.status)).thenReturn(statusPath);
Mockito.when(this.cb.equal(Mockito.eq(statusPath), Mockito.any(JobStatus.class)))
.thenReturn(equalStatusPredicate);
final Path<String> clusterNamePath = (Path<String>) Mockito.mock(Path.class);
final Predicate equalClusterNamePredicate = Mockito.mock(Predicate.class);
Mockito.when(this.root.get(JobEntity_.clusterName)).thenReturn(clusterNamePath);
Mockito.when(this.cb.equal(clusterNamePath, CLUSTER_NAME)).thenReturn(equalClusterNamePredicate);
final Path<ClusterEntity> clusterIdPath = (Path<ClusterEntity>) Mockito.mock(Path.class);
final Predicate equalClusterIdPredicate = Mockito.mock(Predicate.class);
Mockito.when(this.root.get(JobEntity_.cluster)).thenReturn(clusterIdPath);
Mockito.when(this.cb.equal(clusterIdPath, CLUSTER)).thenReturn(equalClusterIdPredicate);
final Path<String> commandNamePath = (Path<String>) Mockito.mock(Path.class);
final Predicate equalCommandNamePredicate = Mockito.mock(Predicate.class);
Mockito.when(this.root.get(JobEntity_.commandName)).thenReturn(commandNamePath);
Mockito.when(this.cb.equal(commandNamePath, COMMAND_NAME)).thenReturn(equalCommandNamePredicate);
final Path<CommandEntity> commandIdPath = (Path<CommandEntity>) Mockito.mock(Path.class);
final Predicate equalCommandIdPredicate = Mockito.mock(Predicate.class);
Mockito.when(this.root.get(JobEntity_.command)).thenReturn(commandIdPath);
Mockito.when(this.cb.equal(clusterIdPath, COMMAND)).thenReturn(equalCommandIdPredicate);
final Path<String> tagPath = (Path<String>) Mockito.mock(Path.class);
final Predicate likeTagPredicate = Mockito.mock(Predicate.class);
Mockito.when(this.root.get(JobEntity_.tags)).thenReturn(tagPath);
Mockito.when(this.cb.like(Mockito.eq(tagPath), Mockito.any(String.class))).thenReturn(likeTagPredicate);
this.tagLikeStatement = JpaSpecificationUtils.getTagLikeString(TAGS);
final Path<Date> startedPath = (Path<Date>) Mockito.mock(Path.class);
final Predicate minStartedPredicate = Mockito.mock(Predicate.class);
Mockito.when(this.root.get(JobEntity_.started)).thenReturn(startedPath);
Mockito
.when(this.cb.greaterThanOrEqualTo(Mockito.eq(startedPath), Mockito.eq(MIN_STARTED)))
.thenReturn(minStartedPredicate);
final Predicate maxStartedPredicate = Mockito.mock(Predicate.class);
Mockito
.when(this.cb.lessThan(Mockito.eq(startedPath), Mockito.eq(MAX_STARTED)))
.thenReturn(maxStartedPredicate);
final Path<Date> finishedPath = (Path<Date>) Mockito.mock(Path.class);
final Predicate minFinishedPredicate = Mockito.mock(Predicate.class);
Mockito.when(this.root.get(JobEntity_.finished)).thenReturn(finishedPath);
Mockito
.when(this.cb.greaterThanOrEqualTo(Mockito.eq(finishedPath), Mockito.eq(MIN_FINISHED)))
.thenReturn(minFinishedPredicate);
final Predicate maxFinishedPredicate = Mockito.mock(Predicate.class);
Mockito
.when(this.cb.lessThan(Mockito.eq(finishedPath), Mockito.eq(MAX_FINISHED)))
.thenReturn(maxFinishedPredicate);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithAll() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
STATUSES,
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithAllLikes() {
final String newId = ID + "%";
final String newName = JOB_NAME + "%";
final String newUserName = USER_NAME + "%";
final String newClusterName = CLUSTER_NAME + "%";
final String newCommandName = COMMAND_NAME + "%";
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
newId,
newName,
newUserName,
STATUSES,
TAGS,
newClusterName,
CLUSTER,
newCommandName,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.id), newId);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.name), newName);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.user), newUserName);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.clusterName), newClusterName);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.commandName), newCommandName);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutId() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
null,
JOB_NAME,
USER_NAME,
STATUSES,
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.never()).like(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.never()).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutJobName() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
null,
USER_NAME,
STATUSES,
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.never()).like(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.never()).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutUserName() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
null,
STATUSES,
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.never()).equal(this.root.get(JobEntity_.user), USER_NAME);
Mockito.verify(this.cb, Mockito.never()).like(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutStatus() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
null,
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.never()).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithEmptyStatus() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
Sets.newHashSet(),
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.never()).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutClusterName() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
STATUSES,
TAGS,
null,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.never()).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.never()).like(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutClusterId() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
STATUSES,
TAGS,
CLUSTER_NAME,
null,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.never()).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutCommandName() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
STATUSES,
TAGS,
CLUSTER_NAME,
CLUSTER,
null,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.never()).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.never()).like(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutCommandId() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
STATUSES,
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
null,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.never()).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutTags() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
STATUSES,
null,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.never()).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutMinStarted() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
STATUSES,
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
null,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.never()).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutMaxStarted() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
STATUSES,
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
null,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.never()).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutMinFinished() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
STATUSES,
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
null,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.never())
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithOutMaxFinished() {
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
STATUSES,
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
null
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.commandName), COMMAND_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.command), COMMAND);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.never()).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Test the find specification.
*/
@Test
public void testFindWithEmptyTag() {
TAGS.add("");
JpaJobSpecs.getFindPredicate(
this.root,
this.cb,
ID,
JOB_NAME,
USER_NAME,
STATUSES,
TAGS,
CLUSTER_NAME,
CLUSTER,
COMMAND_NAME,
COMMAND,
MIN_STARTED,
MAX_STARTED,
MIN_FINISHED,
MAX_FINISHED
);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.id), ID);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.name), JOB_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.user), USER_NAME);
for (final JobStatus status : STATUSES) {
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.status), status);
}
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.clusterName), CLUSTER_NAME);
Mockito.verify(this.cb, Mockito.times(1)).equal(this.root.get(JobEntity_.cluster), CLUSTER);
Mockito.verify(this.cb, Mockito.times(1)).like(this.root.get(JobEntity_.tags), this.tagLikeStatement);
Mockito.verify(this.cb, Mockito.times(1)).greaterThanOrEqualTo(this.root.get(JobEntity_.started), MIN_STARTED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.started), MAX_STARTED);
Mockito
.verify(this.cb, Mockito.times(1))
.greaterThanOrEqualTo(this.root.get(JobEntity_.finished), MIN_FINISHED);
Mockito.verify(this.cb, Mockito.times(1)).lessThan(this.root.get(JobEntity_.finished), MAX_FINISHED);
}
/**
* Just for completing coverage.
*/
@Test
public void testProtectedConstructor() {
Assert.assertNotNull(new JpaJobSpecs());
}
}
|
/*
* Copyright 2016 Karl Bennett
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shiver.me.timbers.http.mock;
import org.junit.Test;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertThat;
public class NullHttpMockMethodCallTest {
@Test
public void Can_invoke_a_null_http_mock_method_call() {
// When
final HttpMockResponse actual = new NullHttpMockMethodCall().invoke(new Object());
// Then
assertThat(actual, nullValue());
}
}
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.directconnect.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Container for the parameters to the AllocateConnectionOnInterconnect
* operation.
* </p>
*/
public class AllocateConnectionOnInterconnectRequest extends
AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* Bandwidth of the connection.
* </p>
* <p>
* Example: "<i>500Mbps</i>"
* </p>
* <p>
* Default: None
* </p>
* <p>
* Values: 50M, 100M, 200M, 300M, 400M, or 500M
* </p>
*/
private String bandwidth;
/**
* <p>
* Name of the provisioned connection.
* </p>
* <p>
* Example: "<i>500M Connection to AWS</i>"
* </p>
* <p>
* Default: None
* </p>
*/
private String connectionName;
/**
* <p>
* Numeric account Id of the customer for whom the connection will be
* provisioned.
* </p>
* <p>
* Example: 123443215678
* </p>
* <p>
* Default: None
* </p>
*/
private String ownerAccount;
/**
* <p>
* ID of the interconnect on which the connection will be provisioned.
* </p>
* <p>
* Example: dxcon-456abc78
* </p>
* <p>
* Default: None
* </p>
*/
private String interconnectId;
/**
* <p>
* The dedicated VLAN provisioned to the connection.
* </p>
* <p>
* Example: 101
* </p>
* <p>
* Default: None
* </p>
*/
private Integer vlan;
/**
* <p>
* Bandwidth of the connection.
* </p>
* <p>
* Example: "<i>500Mbps</i>"
* </p>
* <p>
* Default: None
* </p>
* <p>
* Values: 50M, 100M, 200M, 300M, 400M, or 500M
* </p>
*
* @param bandwidth
* Bandwidth of the connection.</p>
* <p>
* Example: "<i>500Mbps</i>"
* </p>
* <p>
* Default: None
* </p>
* <p>
* Values: 50M, 100M, 200M, 300M, 400M, or 500M
*/
public void setBandwidth(String bandwidth) {
this.bandwidth = bandwidth;
}
/**
* <p>
* Bandwidth of the connection.
* </p>
* <p>
* Example: "<i>500Mbps</i>"
* </p>
* <p>
* Default: None
* </p>
* <p>
* Values: 50M, 100M, 200M, 300M, 400M, or 500M
* </p>
*
* @return Bandwidth of the connection.</p>
* <p>
* Example: "<i>500Mbps</i>"
* </p>
* <p>
* Default: None
* </p>
* <p>
* Values: 50M, 100M, 200M, 300M, 400M, or 500M
*/
public String getBandwidth() {
return this.bandwidth;
}
/**
* <p>
* Bandwidth of the connection.
* </p>
* <p>
* Example: "<i>500Mbps</i>"
* </p>
* <p>
* Default: None
* </p>
* <p>
* Values: 50M, 100M, 200M, 300M, 400M, or 500M
* </p>
*
* @param bandwidth
* Bandwidth of the connection.</p>
* <p>
* Example: "<i>500Mbps</i>"
* </p>
* <p>
* Default: None
* </p>
* <p>
* Values: 50M, 100M, 200M, 300M, 400M, or 500M
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public AllocateConnectionOnInterconnectRequest withBandwidth(
String bandwidth) {
setBandwidth(bandwidth);
return this;
}
/**
* <p>
* Name of the provisioned connection.
* </p>
* <p>
* Example: "<i>500M Connection to AWS</i>"
* </p>
* <p>
* Default: None
* </p>
*
* @param connectionName
* Name of the provisioned connection.</p>
* <p>
* Example: "<i>500M Connection to AWS</i>"
* </p>
* <p>
* Default: None
*/
public void setConnectionName(String connectionName) {
this.connectionName = connectionName;
}
/**
* <p>
* Name of the provisioned connection.
* </p>
* <p>
* Example: "<i>500M Connection to AWS</i>"
* </p>
* <p>
* Default: None
* </p>
*
* @return Name of the provisioned connection.</p>
* <p>
* Example: "<i>500M Connection to AWS</i>"
* </p>
* <p>
* Default: None
*/
public String getConnectionName() {
return this.connectionName;
}
/**
* <p>
* Name of the provisioned connection.
* </p>
* <p>
* Example: "<i>500M Connection to AWS</i>"
* </p>
* <p>
* Default: None
* </p>
*
* @param connectionName
* Name of the provisioned connection.</p>
* <p>
* Example: "<i>500M Connection to AWS</i>"
* </p>
* <p>
* Default: None
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public AllocateConnectionOnInterconnectRequest withConnectionName(
String connectionName) {
setConnectionName(connectionName);
return this;
}
/**
* <p>
* Numeric account Id of the customer for whom the connection will be
* provisioned.
* </p>
* <p>
* Example: 123443215678
* </p>
* <p>
* Default: None
* </p>
*
* @param ownerAccount
* Numeric account Id of the customer for whom the connection will be
* provisioned.</p>
* <p>
* Example: 123443215678
* </p>
* <p>
* Default: None
*/
public void setOwnerAccount(String ownerAccount) {
this.ownerAccount = ownerAccount;
}
/**
* <p>
* Numeric account Id of the customer for whom the connection will be
* provisioned.
* </p>
* <p>
* Example: 123443215678
* </p>
* <p>
* Default: None
* </p>
*
* @return Numeric account Id of the customer for whom the connection will
* be provisioned.</p>
* <p>
* Example: 123443215678
* </p>
* <p>
* Default: None
*/
public String getOwnerAccount() {
return this.ownerAccount;
}
/**
* <p>
* Numeric account Id of the customer for whom the connection will be
* provisioned.
* </p>
* <p>
* Example: 123443215678
* </p>
* <p>
* Default: None
* </p>
*
* @param ownerAccount
* Numeric account Id of the customer for whom the connection will be
* provisioned.</p>
* <p>
* Example: 123443215678
* </p>
* <p>
* Default: None
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public AllocateConnectionOnInterconnectRequest withOwnerAccount(
String ownerAccount) {
setOwnerAccount(ownerAccount);
return this;
}
/**
* <p>
* ID of the interconnect on which the connection will be provisioned.
* </p>
* <p>
* Example: dxcon-456abc78
* </p>
* <p>
* Default: None
* </p>
*
* @param interconnectId
* ID of the interconnect on which the connection will be
* provisioned.</p>
* <p>
* Example: dxcon-456abc78
* </p>
* <p>
* Default: None
*/
public void setInterconnectId(String interconnectId) {
this.interconnectId = interconnectId;
}
/**
* <p>
* ID of the interconnect on which the connection will be provisioned.
* </p>
* <p>
* Example: dxcon-456abc78
* </p>
* <p>
* Default: None
* </p>
*
* @return ID of the interconnect on which the connection will be
* provisioned.</p>
* <p>
* Example: dxcon-456abc78
* </p>
* <p>
* Default: None
*/
public String getInterconnectId() {
return this.interconnectId;
}
/**
* <p>
* ID of the interconnect on which the connection will be provisioned.
* </p>
* <p>
* Example: dxcon-456abc78
* </p>
* <p>
* Default: None
* </p>
*
* @param interconnectId
* ID of the interconnect on which the connection will be
* provisioned.</p>
* <p>
* Example: dxcon-456abc78
* </p>
* <p>
* Default: None
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public AllocateConnectionOnInterconnectRequest withInterconnectId(
String interconnectId) {
setInterconnectId(interconnectId);
return this;
}
/**
* <p>
* The dedicated VLAN provisioned to the connection.
* </p>
* <p>
* Example: 101
* </p>
* <p>
* Default: None
* </p>
*
* @param vlan
* The dedicated VLAN provisioned to the connection.</p>
* <p>
* Example: 101
* </p>
* <p>
* Default: None
*/
public void setVlan(Integer vlan) {
this.vlan = vlan;
}
/**
* <p>
* The dedicated VLAN provisioned to the connection.
* </p>
* <p>
* Example: 101
* </p>
* <p>
* Default: None
* </p>
*
* @return The dedicated VLAN provisioned to the connection.</p>
* <p>
* Example: 101
* </p>
* <p>
* Default: None
*/
public Integer getVlan() {
return this.vlan;
}
/**
* <p>
* The dedicated VLAN provisioned to the connection.
* </p>
* <p>
* Example: 101
* </p>
* <p>
* Default: None
* </p>
*
* @param vlan
* The dedicated VLAN provisioned to the connection.</p>
* <p>
* Example: 101
* </p>
* <p>
* Default: None
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public AllocateConnectionOnInterconnectRequest withVlan(Integer vlan) {
setVlan(vlan);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getBandwidth() != null)
sb.append("Bandwidth: " + getBandwidth() + ",");
if (getConnectionName() != null)
sb.append("ConnectionName: " + getConnectionName() + ",");
if (getOwnerAccount() != null)
sb.append("OwnerAccount: " + getOwnerAccount() + ",");
if (getInterconnectId() != null)
sb.append("InterconnectId: " + getInterconnectId() + ",");
if (getVlan() != null)
sb.append("Vlan: " + getVlan());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AllocateConnectionOnInterconnectRequest == false)
return false;
AllocateConnectionOnInterconnectRequest other = (AllocateConnectionOnInterconnectRequest) obj;
if (other.getBandwidth() == null ^ this.getBandwidth() == null)
return false;
if (other.getBandwidth() != null
&& other.getBandwidth().equals(this.getBandwidth()) == false)
return false;
if (other.getConnectionName() == null
^ this.getConnectionName() == null)
return false;
if (other.getConnectionName() != null
&& other.getConnectionName().equals(this.getConnectionName()) == false)
return false;
if (other.getOwnerAccount() == null ^ this.getOwnerAccount() == null)
return false;
if (other.getOwnerAccount() != null
&& other.getOwnerAccount().equals(this.getOwnerAccount()) == false)
return false;
if (other.getInterconnectId() == null
^ this.getInterconnectId() == null)
return false;
if (other.getInterconnectId() != null
&& other.getInterconnectId().equals(this.getInterconnectId()) == false)
return false;
if (other.getVlan() == null ^ this.getVlan() == null)
return false;
if (other.getVlan() != null
&& other.getVlan().equals(this.getVlan()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getBandwidth() == null) ? 0 : getBandwidth().hashCode());
hashCode = prime
* hashCode
+ ((getConnectionName() == null) ? 0 : getConnectionName()
.hashCode());
hashCode = prime
* hashCode
+ ((getOwnerAccount() == null) ? 0 : getOwnerAccount()
.hashCode());
hashCode = prime
* hashCode
+ ((getInterconnectId() == null) ? 0 : getInterconnectId()
.hashCode());
hashCode = prime * hashCode
+ ((getVlan() == null) ? 0 : getVlan().hashCode());
return hashCode;
}
@Override
public AllocateConnectionOnInterconnectRequest clone() {
return (AllocateConnectionOnInterconnectRequest) super.clone();
}
}
|
/*
* Copyright (c) 2007 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockito.internal.matchers;
import org.junit.Test;
import org.mockitoutil.TestBase;
import java.math.BigDecimal;
public class ComparableMatchersTest extends TestBase {
@Test
public void testLessThan() {
test(new LessThan<String>("b"), true, false, false, "lt");
}
@Test
public void testGreaterThan() {
test(new GreaterThan<String>("b"), false, true, false, "gt");
}
@Test
public void testLessOrEqual() {
test(new LessOrEqual<String>("b"), true, false, true, "leq");
}
@Test
public void testGreaterOrEqual() {
test(new GreaterOrEqual<String>("b"), false, true, true, "geq");
}
@Test
public void testCompareEqual() {
test(new CompareEqual<String>("b"), false, false, true, "cmpEq");
// Make sure it works when equals provide a different result than compare
CompareEqual<BigDecimal> cmpEq = new CompareEqual<BigDecimal>(new BigDecimal("5.00"));
assertTrue(cmpEq.matches(new BigDecimal("5")));
}
private void test(CompareTo<String> compareTo, boolean lower, boolean higher,
boolean equals, String name) {
assertEquals(lower, compareTo.matches("a"));
assertEquals(equals, compareTo.matches("b"));
assertEquals(higher, compareTo.matches("c"));
assertEquals(name + "(b)", compareTo.toString());
}
}
|
/*
* Copyright 2011-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.appng.formtags;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
abstract class FormElementOwner {
private Form form;
protected List<FormElement> elements;
FormElementOwner(Form form) {
this.form = form;
this.elements = new ArrayList<>();
}
abstract FormElement addFormElement();
public boolean hasErrors() {
for (FormElement element : getElements()) {
if (!element.isValid()) {
return true;
}
}
return false;
}
public Form getForm() {
return form;
}
public void setForm(Form form) {
this.form = form;
}
public List<FormElement> getElements() {
return Collections.unmodifiableList(elements);
}
}
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.cpp;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.skylarkbuildapi.cpp.CcCompilationOutputsApi;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
/** A structured representation of the compilation outputs of a C++ rule. */
public class CcCompilationOutputs implements CcCompilationOutputsApi {
/**
* All .o files built by the target.
*/
private final ImmutableList<Artifact> objectFiles;
/**
* All .pic.o files built by the target.
*/
private final ImmutableList<Artifact> picObjectFiles;
/**
* Maps all .o bitcode files coming from a ThinLTO C(++) compilation under our control to the
* corresponding minimized bitcode files that can be used for the LTO indexing step.
*/
private final ImmutableMap<Artifact, Artifact> ltoBitcodeFiles;
/**
* All .dwo files built by the target, corresponding to .o outputs.
*/
private final ImmutableList<Artifact> dwoFiles;
/**
* All .pic.dwo files built by the target, corresponding to .pic.o outputs.
*/
private final ImmutableList<Artifact> picDwoFiles;
/**
* All artifacts that are created if "--save_temps" is true.
*/
private final NestedSet<Artifact> temps;
/**
* All token .h.processed files created when preprocessing or parsing headers.
*/
private final ImmutableList<Artifact> headerTokenFiles;
private final List<IncludeScannable> lipoScannables;
private CcCompilationOutputs(
ImmutableList<Artifact> objectFiles,
ImmutableList<Artifact> picObjectFiles,
ImmutableMap<Artifact, Artifact> ltoBitcodeFiles,
ImmutableList<Artifact> dwoFiles,
ImmutableList<Artifact> picDwoFiles,
NestedSet<Artifact> temps,
ImmutableList<Artifact> headerTokenFiles,
ImmutableList<IncludeScannable> lipoScannables) {
this.objectFiles = objectFiles;
this.picObjectFiles = picObjectFiles;
this.ltoBitcodeFiles = ltoBitcodeFiles;
this.dwoFiles = dwoFiles;
this.picDwoFiles = picDwoFiles;
this.temps = temps;
this.headerTokenFiles = headerTokenFiles;
this.lipoScannables = lipoScannables;
}
/**
* Returns whether this set of outputs has any object or .pic object files.
*/
public boolean isEmpty() {
return picObjectFiles.isEmpty() && objectFiles.isEmpty();
}
/**
* Returns an unmodifiable view of the .o or .pic.o files set.
*
* @param usePic whether to return .pic.o files
*/
public ImmutableList<Artifact> getObjectFiles(boolean usePic) {
return usePic ? picObjectFiles : objectFiles;
}
/** Returns unmodifiable map of bitcode object files resulting from compilation. */
public ImmutableMap<Artifact, Artifact> getLtoBitcodeFiles() {
return ltoBitcodeFiles;
}
/**
* Returns an unmodifiable view of the .dwo files set.
*/
public ImmutableList<Artifact> getDwoFiles() {
return dwoFiles;
}
/**
* Returns an unmodifiable view of the .pic.dwo files set.
*/
public ImmutableList<Artifact> getPicDwoFiles() {
return picDwoFiles;
}
/**
* Returns an unmodifiable view of the temp files set.
*/
public NestedSet<Artifact> getTemps() {
return temps;
}
/**
* Returns an unmodifiable view of the .h.processed files.
*/
public Iterable<Artifact> getHeaderTokenFiles() {
return headerTokenFiles;
}
/**
* Returns the {@link IncludeScannable} objects this C++ compile action contributes to a
* LIPO context collector.
*/
public List<IncludeScannable> getLipoScannables() {
return lipoScannables;
}
/**
* Returns the output files that are considered "compiled" by this C++ compile action.
*/
NestedSet<Artifact> getFilesToCompile(
boolean isLipoContextCollector, boolean parseHeaders, boolean usePic) {
if (isLipoContextCollector) {
return NestedSetBuilder.<Artifact>emptySet(Order.STABLE_ORDER);
}
NestedSetBuilder<Artifact> files = NestedSetBuilder.stableOrder();
files.addAll(getObjectFiles(usePic));
if (parseHeaders) {
files.addAll(getHeaderTokenFiles());
}
return files.build();
}
/** Builder for CcCompilationOutputs. */
public static final class Builder {
private final Set<Artifact> objectFiles = new LinkedHashSet<>();
private final Set<Artifact> picObjectFiles = new LinkedHashSet<>();
private final ImmutableMap.Builder<Artifact, Artifact> ltoBitcodeFiles = ImmutableMap.builder();
private final Set<Artifact> dwoFiles = new LinkedHashSet<>();
private final Set<Artifact> picDwoFiles = new LinkedHashSet<>();
private final NestedSetBuilder<Artifact> temps = NestedSetBuilder.stableOrder();
private final Set<Artifact> headerTokenFiles = new LinkedHashSet<>();
private final List<IncludeScannable> lipoScannables = new ArrayList<>();
public CcCompilationOutputs build() {
return new CcCompilationOutputs(
ImmutableList.copyOf(objectFiles),
ImmutableList.copyOf(picObjectFiles),
ltoBitcodeFiles.build(),
ImmutableList.copyOf(dwoFiles),
ImmutableList.copyOf(picDwoFiles),
temps.build(),
ImmutableList.copyOf(headerTokenFiles),
ImmutableList.copyOf(lipoScannables));
}
public Builder merge(CcCompilationOutputs outputs) {
this.objectFiles.addAll(outputs.objectFiles);
this.picObjectFiles.addAll(outputs.picObjectFiles);
this.dwoFiles.addAll(outputs.dwoFiles);
this.picDwoFiles.addAll(outputs.picDwoFiles);
this.temps.addTransitive(outputs.temps);
this.headerTokenFiles.addAll(outputs.headerTokenFiles);
this.lipoScannables.addAll(outputs.lipoScannables);
this.ltoBitcodeFiles.putAll(outputs.ltoBitcodeFiles);
return this;
}
/** Adds an object file. */
public Builder addObjectFile(Artifact artifact) {
// We skip file extension checks for TreeArtifacts because they represent directory artifacts
// without a file extension.
Preconditions.checkArgument(
artifact.isTreeArtifact() || Link.OBJECT_FILETYPES.matches(artifact.getFilename()));
objectFiles.add(artifact);
return this;
}
public Builder addObjectFiles(Iterable<Artifact> artifacts) {
for (Artifact artifact : artifacts) {
Preconditions.checkArgument(Link.OBJECT_FILETYPES.matches(artifact.getFilename()));
}
Iterables.addAll(objectFiles, artifacts);
return this;
}
/** Adds a pic object file. */
public Builder addPicObjectFile(Artifact artifact) {
picObjectFiles.add(artifact);
return this;
}
public Builder addLtoBitcodeFile(Artifact fullBitcode, Artifact ltoIndexingBitcode) {
ltoBitcodeFiles.put(fullBitcode, ltoIndexingBitcode);
return this;
}
public Builder addPicObjectFiles(Iterable<Artifact> artifacts) {
for (Artifact artifact : artifacts) {
Preconditions.checkArgument(Link.OBJECT_FILETYPES.matches(artifact.getFilename()));
}
Iterables.addAll(picObjectFiles, artifacts);
return this;
}
public Builder addDwoFile(Artifact artifact) {
dwoFiles.add(artifact);
return this;
}
public Builder addPicDwoFile(Artifact artifact) {
picDwoFiles.add(artifact);
return this;
}
/**
* Adds temp files.
*/
public Builder addTemps(Iterable<Artifact> artifacts) {
temps.addAll(artifacts);
return this;
}
public Builder addHeaderTokenFile(Artifact artifact) {
headerTokenFiles.add(artifact);
return this;
}
/**
* Adds an {@link IncludeScannable} that this compilation output object contributes to a
* LIPO context collector.
*/
public Builder addLipoScannable(IncludeScannable scannable) {
lipoScannables.add(scannable);
return this;
}
}
}
|
package com.sam_chordas.android.stockhawk.touch_helper;
/**
* Created by sam_chordas on 10/6/15.
* credit to Paul Burke (ipaulpro)
* Interface to enable swipe to delete
*/
public interface ItemTouchHelperAdapter {
void onItemDismiss(int position);
}
|
package org.odk.getin.android.formentry;
import android.Manifest;
import androidx.test.espresso.intent.rule.IntentsTestRule;
import androidx.test.rule.GrantPermissionRule;
import androidx.test.runner.AndroidJUnit4;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.RuleChain;
import org.junit.runner.RunWith;
import org.odk.getin.android.activities.FormEntryActivity;
import org.odk.getin.android.espressoutils.FormEntry;
import org.odk.getin.android.support.CopyFormRule;
import org.odk.getin.android.support.ResetStateRule;
import org.odk.getin.android.test.FormLoadingUtils;
@RunWith(AndroidJUnit4.class)
public class SelectMinimalTest {
@Rule
public IntentsTestRule<FormEntryActivity> activityTestRule = FormLoadingUtils.getFormActivityTestRuleFor("select_minimal.xml");
@Rule
public RuleChain copyFormChain = RuleChain
.outerRule(GrantPermissionRule.grant(
Manifest.permission.READ_EXTERNAL_STORAGE,
Manifest.permission.WRITE_EXTERNAL_STORAGE)
)
.around(new ResetStateRule())
.around(new CopyFormRule("select_minimal.xml"));
@Test
public void longLabelsShouldBeDisplayed() {
FormEntry.clickOnText("Select One Answer");
FormEntry.checkIsTextDisplayed("Integer a eleifend libero, sit amet tincidunt lacus. Donec orci tellus, facilisis et ultricies vel");
FormEntry.checkIsTextDisplayed("Nam varius, lectus non consectetur tincidunt, augue augue dapibus dolor, nec convallis ligula erat eget");
FormEntry.clickOnText("Integer a eleifend libero, sit amet tincidunt lacus. Donec orci tellus, facilisis et ultricies vel");
FormEntry.checkIsTextDisplayed("Integer a eleifend libero, sit amet tincidunt lacus. Donec orci tellus, facilisis et ultricies vel");
FormEntry.swipeToNextQuestion();
FormEntry.showSpinnerMultipleDialog();
FormEntry.checkIsTextDisplayed("Integer a eleifend libero, sit amet tincidunt lacus. Donec orci tellus, facilisis et ultricies vel");
FormEntry.checkIsTextDisplayed("Nam varius, lectus non consectetur tincidunt, augue augue dapibus dolor, nec convallis ligula erat eget");
FormEntry.clickOnText("Integer a eleifend libero, sit amet tincidunt lacus. Donec orci tellus, facilisis et ultricies vel");
FormEntry.clickOk();
FormEntry.checkIsTextDisplayed("Selected: Integer a eleifend libero, sit amet tincidunt lacus. Donec orci tellus, facilisis et ultricies vel");
}
}
|
/**
* @FileName:
* @Package: com.asura.framework.base.util
*
* @author sence
* @created 11/13/2014 11:29 AM
*
* Copyright 2011-2015 Asura
*/
package com.asura.framework.base.util;
import java.util.Random;
/**
*
* <p></p>
*
* <PRE>
* <BR> 修改记录
* <BR>-----------------------------------------------
* <BR> 修改日期 修改人 修改内容
* </PRE>
*
* @author sence
* @since 1.0
* @version 1.0
*/
public class RandomUtil {
/**
*
* 生成指定位数的数字串
*
* @author zhangshaobin
* @created 2013-6-6 下午7:07:24
*
* @param pwd_len
* @return
*/
public static String genRandomNum(int pwd_len) {
int maxNum = 1000;
int i; // 生成的随机数
int count = 0; // 生成的长度
char[] str = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' };
StringBuffer pwd = new StringBuffer("");
Random r = new Random();
while (count < pwd_len) {
// 生成随机数,取绝对值,防止生成负数,
i = Math.abs(r.nextInt(maxNum)); // 生成的数最大为36-1
i = i % 10;
if (i >= 0 && i < str.length) {
pwd.append(str[i]);
count++;
}
}
return pwd.toString();
}
}
|
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.internal;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.gemstone.gemfire.GemFireIOException;
import com.gemstone.gemfire.LogWriter;
import com.gemstone.gemfire.internal.shared.NativeCalls;
public class LinuxProcFsStatistics {
private enum CPU {
USER, NICE, SYSTEM, IDLE, IOWAIT, IRQ, SOFTIRQ,
/** stands for aggregation of all columns not present in the enum list*/
OTHER
}
private static final int DEFAULT_PAGESIZE = 4 * 1024;
private static final int OneMeg = 1024 * 1024;
private static final String pageSizeProperty = "gemfire.statistics.linux.pageSize";
private static CpuStat cpuStatSingleton;
private static int pageSize;
private static int sys_cpus;
private static boolean hasProcVmStat;
private static boolean hasDiskStats;
static SpaceTokenizer st;
/** The number of non-process files in /proc */
private static int nonPidFilesInProc;
/** /proc/stat tokens */
private static final String CPU_TOKEN = "cpu ";
private static final String PAGE = "page ";
private static final String SWAP = "swap ";
private static final String CTXT = "ctxt ";
private static final String PROCESSES = "processes ";
/** /proc/vmstat tokens */
private static final String PGPGIN = "pgpgin ";
private static final String PGPGOUT = "pgpgout ";
private static final String PSWPIN = "pswpin ";
private static final String PSWPOUT = "pswpout ";
private static File procFile;
private static FileChannel fchannel;
private static Reader procFileReader ;
//Do not create instances of this class
private LinuxProcFsStatistics() {
}
static int init() {
nonPidFilesInProc = getNumberOfNonProcessProcFiles();
sys_cpus = Runtime.getRuntime().availableProcessors();
pageSize = Integer.getInteger(pageSizeProperty, DEFAULT_PAGESIZE);
cpuStatSingleton = new CpuStat();
hasProcVmStat = new File("/proc/vmstat").exists();
hasDiskStats = new File("/proc/diskstats").exists();
st = new SpaceTokenizer();
procFile = new File( "/proc/" + NativeCalls.getInstance().getProcessId() + "/stat" );
try {
fchannel = new FileInputStream(procFile).getChannel();
} catch (FileNotFoundException e) {
throw new GemFireIOException(e.getMessage(), e);
}
procFileReader = Channels.newReader(fchannel, Charset.defaultCharset().newDecoder(), -1);
return 0;
}
static void close() {
cpuStatSingleton = null;
st = null;
try {
procFileReader.close();
} catch (IOException e) {
// ignore
}
try {
fchannel.close();
} catch (IOException e) {
// ignore
}
}
static void readyRefresh() {
}
/* get the statistics for the specified process.
* ( pid_rssSize, pid_imageSize )
* vsize is assumed to be in units of kbytes
* System property gemfire.statistics.pagesSize can be used to configure
* pageSize. This is the mem_unit member of the struct returned by sysinfo()
*
*/
static void refreshProcess(int pid, int[] ints, long[] longs, double[] doubles) {
//Just incase a pid is not available
if(pid == 0) return;
BufferedReader br = null;
try {
br = new BufferedReader(procFileReader, 2048);
String line = br.readLine();
if ( line == null ) {
return;
}
st.setString(line);
st.skipTokens(22);
ints[LinuxProcessStats.imageSizeINT] = (int) (st.nextTokenAsLong() / OneMeg);
ints[LinuxProcessStats.rssSizeINT] = (int) ((st.nextTokenAsLong()*pageSize)/OneMeg);
} catch ( NoSuchElementException nsee ) {
// It might just be a case of the process going away while we
// where trying to get its stats.
// So for now lets just ignore the failure and leave the stats
// as they are.
} catch ( IOException ioe ) {
// It might just be a case of the process going away while we
// where trying to get its stats.
// So for now lets just ignore the failure and leave the stats
// as they are.
} finally {
st.releaseResources();
if(br != null) try { br.close(); } catch(IOException ignore) {}
}
}
static void refreshSystem(int[] ints, long[] longs, double[] doubles) {
ints[LinuxSystemStats.processesINT] = getProcessCount();
ints[LinuxSystemStats.cpusINT] = sys_cpus;
InputStreamReader isr = null;
BufferedReader br = null;
try {
isr = new InputStreamReader( new FileInputStream( "/proc/stat" ));
br = new BufferedReader(isr);
String line = null;
while ( ( line = br.readLine() ) != null ) {
try {
if (line.startsWith(CPU_TOKEN)) {
int[] cpuData = cpuStatSingleton.calculateStats(line);
ints[LinuxSystemStats.cpuIdleINT] = cpuData[CPU.IDLE.ordinal()];
ints[LinuxSystemStats.cpuNiceINT] = cpuData[CPU.NICE.ordinal()];
ints[LinuxSystemStats.cpuSystemINT] = cpuData[CPU.SYSTEM.ordinal()];
ints[LinuxSystemStats.cpuUserINT] = cpuData[CPU.USER.ordinal()];
ints[LinuxSystemStats.iowaitINT] = cpuData[CPU.IOWAIT.ordinal()];
ints[LinuxSystemStats.irqINT] = cpuData[CPU.IRQ.ordinal()];
ints[LinuxSystemStats.softirqINT] = cpuData[CPU.SOFTIRQ.ordinal()];
ints[LinuxSystemStats.cpuActiveINT] = 100 - cpuData[CPU.IDLE.ordinal()];
ints[LinuxSystemStats.cpuNonUserINT] = cpuData[CPU.OTHER.ordinal()]
+ cpuData[CPU.SYSTEM.ordinal()]
+ cpuData[CPU.IOWAIT.ordinal()]
+ cpuData[CPU.IRQ.ordinal()]
+ cpuData[CPU.SOFTIRQ.ordinal()];
} else if ( !hasProcVmStat && line.startsWith(PAGE)) {
int secondIndex = line.indexOf(" ", PAGE.length());
longs[LinuxSystemStats.pagesPagedInLONG] = SpaceTokenizer.parseAsLong(line.substring(PAGE.length(), secondIndex));
longs[LinuxSystemStats.pagesPagedOutLONG] = SpaceTokenizer.parseAsLong(line.substring(secondIndex+1));
} else if ( !hasProcVmStat && line.startsWith(SWAP)) {
int secondIndex = line.indexOf(" ", SWAP.length());
longs[LinuxSystemStats.pagesSwappedInLONG] = SpaceTokenizer.parseAsLong(line.substring(SWAP.length(), secondIndex));
longs[LinuxSystemStats.pagesSwappedOutLONG] = SpaceTokenizer.parseAsLong(line.substring(secondIndex+1));
} else if ( line.startsWith(CTXT)) {
longs[LinuxSystemStats.contextSwitchesLONG] = SpaceTokenizer.parseAsLong(line.substring(CTXT.length()));
} else if ( line.startsWith(PROCESSES)) {
longs[LinuxSystemStats.processCreatesLONG] = SpaceTokenizer.parseAsInt(line.substring(PROCESSES.length()));
}
} catch ( NoSuchElementException nsee ) {
//this is the result of reading a partially formed file
//just do not update what ever entry had the problem
}
}
} catch ( IOException ioe ) {
} finally {
if(br != null) try { br.close(); } catch(IOException ignore) {}
}
getLoadAvg(doubles);
getMemInfo(ints);
getDiskStats(longs);
getNetStats(longs);
if(hasProcVmStat) {
getVmStats(longs);
}
st.releaseResources();
}
// Example of /proc/loadavg
// 0.00 0.00 0.07 1/218 7907
private static void getLoadAvg(double[] doubles) {
InputStreamReader isr = null;
BufferedReader br = null;
try {
isr = new InputStreamReader( new FileInputStream( "/proc/loadavg" ));
br = new BufferedReader(isr, 512);
String line = br.readLine();
if ( line == null ) {
return;
}
st.setString(line);
doubles[LinuxSystemStats.loadAverage1DOUBLE] = st.nextTokenAsDouble();
doubles[LinuxSystemStats.loadAverage5DOUBLE] = st.nextTokenAsDouble();
doubles[LinuxSystemStats.loadAverage15DOUBLE] = st.nextTokenAsDouble();
} catch ( NoSuchElementException nsee ) {
} catch (IOException ioe) {
} finally {
st.releaseResources();
if(br != null) try { br.close(); } catch(IOException ignore) {}
}
}
/**
* Returns the available system memory (free + cached).
* @param logger the logger
* @return the available memory in bytes
*/
public static long getAvailableMemory(LogWriter logger) {
try {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream("/proc/meminfo")));
try {
long free = 0;
Pattern p = Pattern.compile("(.*)?:\\s+(\\d+)( kB)?");
String line;
while ((line = br.readLine()) != null) {
Matcher m = p.matcher(line);
if (m.matches() && ("MemFree".equals(m.group(1)) || "Cached".equals(m.group(1)))) {
free += Long.parseLong(m.group(2));
}
}
// convert to bytes
return 1024 * free;
} finally {
br.close();
}
} catch (IOException e) {
logger.warning("Error determining free memory", e);
return Long.MAX_VALUE;
}
}
// Example of /proc/meminfo
// total: used: free: shared: buffers: cached:
//Mem: 4118380544 3816050688 302329856 0 109404160 3060326400
//Swap: 4194881536 127942656 4066938880
private static void getMemInfo(int[] ints) {
InputStreamReader isr = null;
BufferedReader br = null;
try {
isr = new InputStreamReader( new FileInputStream( "/proc/meminfo" ));
br = new BufferedReader(isr);
//Assume all values read in are in kB, convert to MB
String line = null;
while ( (line = br.readLine()) != null) {
try {
if ( line.startsWith("MemTotal: ")) {
st.setString(line);
st.skipToken(); //Burn initial token
ints[LinuxSystemStats.physicalMemoryINT] = (int) (st.nextTokenAsLong() / 1024);
} else if ( line.startsWith("MemFree: ")) {
st.setString(line);
st.skipToken(); //Burn initial token
ints[LinuxSystemStats.freeMemoryINT] = (int) (st.nextTokenAsLong() / 1024);
} else if ( line.startsWith("SharedMem: ")) {
st.setString(line);
st.skipToken(); //Burn initial token
ints[LinuxSystemStats.sharedMemoryINT] = (int) (st.nextTokenAsLong() / 1024);
} else if ( line.startsWith("Buffers: ")) {
st.setString(line);
st.nextToken(); //Burn initial token
ints[LinuxSystemStats.bufferMemoryINT] = (int) (st.nextTokenAsLong() / 1024);
} else if ( line.startsWith("SwapTotal: ")) {
st.setString(line);
st.skipToken(); //Burn initial token
ints[LinuxSystemStats.allocatedSwapINT] = (int) (st.nextTokenAsLong() / 1024);
} else if ( line.startsWith("SwapFree: ")) {
st.setString(line);
st.skipToken(); //Burn initial token
ints[LinuxSystemStats.unallocatedSwapINT] = (int) (st.nextTokenAsLong() / 1024);
} else if ( line.startsWith("Cached: ")) {
st.setString(line);
st.skipToken(); //Burn initial token
ints[LinuxSystemStats.cachedMemoryINT] = (int) (st.nextTokenAsLong() / 1024);
} else if ( line.startsWith("Dirty: ")) {
st.setString(line);
st.skipToken(); //Burn initial token
ints[LinuxSystemStats.dirtyMemoryINT] = (int) (st.nextTokenAsLong() / 1024);
} else if ( line.startsWith("Inact_dirty: ")) { // 2.4 kernels
st.setString(line);
st.skipToken(); //Burn initial token
ints[LinuxSystemStats.dirtyMemoryINT] = (int) (st.nextTokenAsLong() / 1024);
}
} catch(NoSuchElementException nsee) {
//ignore and let that stat not to be updated this time
}
}
} catch ( IOException ioe ) {
} finally {
st.releaseResources();
if(br != null) try { br.close(); } catch(IOException ignore) {}
}
}
/*
Inter-| Receive | Transmit
face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
lo:1908275823 326949246 0 0 0 0 0 0 1908275823 326949246 0 0 0 0 0 0
*/
private static void getNetStats(long[] longs) {
InputStreamReader isr = null;
BufferedReader br = null;
try {
isr = new InputStreamReader( new FileInputStream( "/proc/net/dev" ));
br = new BufferedReader(isr);
br.readLine(); // Discard header info
br.readLine(); // Discard header info
long lo_recv_packets = 0, lo_recv_bytes = 0;
long other_recv_packets = 0, other_recv_bytes = 0;
long other_recv_errs = 0, other_recv_drop = 0;
long other_xmit_packets = 0, other_xmit_bytes = 0;
long other_xmit_errs = 0, other_xmit_drop = 0, other_xmit_colls = 0;
String line = null;
while ( (line = br.readLine()) != null) {
int index = line.indexOf(":");
boolean isloopback = (line.indexOf("lo:") != -1);
st.setString(line.substring(index+1));
long recv_bytes = st.nextTokenAsLong();
long recv_packets = st.nextTokenAsLong();
long recv_errs = st.nextTokenAsLong();
long recv_drop = st.nextTokenAsLong();
st.skipTokens(4); //fifo, frame, compressed, multicast
long xmit_bytes = st.nextTokenAsLong();
long xmit_packets = st.nextTokenAsLong();
long xmit_errs = st.nextTokenAsLong();
long xmit_drop = st.nextTokenAsLong();
st.skipToken(); //fifo
long xmit_colls = st.nextTokenAsLong();
if (isloopback) {
lo_recv_packets = recv_packets;
lo_recv_bytes = recv_bytes;
}
else {
other_recv_packets += recv_packets;
other_recv_bytes += recv_bytes;
}
other_recv_errs += recv_errs;
other_recv_drop += recv_drop;
if (isloopback) {
/* loopback_xmit_packets = xmit_packets; */
} else {
other_xmit_packets += xmit_packets;
other_xmit_bytes += xmit_bytes;
}
other_xmit_errs += xmit_errs;
other_xmit_drop += xmit_drop;
other_xmit_colls += xmit_colls;
}
// fix for bug 43860
longs[LinuxSystemStats.loopbackPacketsLONG] = lo_recv_packets;
longs[LinuxSystemStats.loopbackBytesLONG] = lo_recv_bytes;
longs[LinuxSystemStats.recvPacketsLONG] = other_recv_packets;
longs[LinuxSystemStats.recvBytesLONG] = other_recv_bytes;
longs[LinuxSystemStats.recvErrorsLONG] = other_recv_errs;
longs[LinuxSystemStats.recvDropsLONG] = other_recv_drop;
longs[LinuxSystemStats.xmitPacketsLONG] = other_xmit_packets;
longs[LinuxSystemStats.xmitBytesLONG] = other_xmit_bytes;
longs[LinuxSystemStats.xmitErrorsLONG] = other_xmit_errs;
longs[LinuxSystemStats.xmitDropsLONG] = other_xmit_drop;
longs[LinuxSystemStats.xmitCollisionsLONG] = other_xmit_colls;
} catch (NoSuchElementException nsee) {
} catch (IOException ioe) {
} finally {
st.releaseResources();
if(br != null) try { br.close(); } catch(IOException ignore) {}
}
}
// example of /proc/diskstats
// 1 0 ram0 0 0 0 0 0 0 0 0 0 0 0
// 1 1 ram1 0 0 0 0 0 0 0 0 0 0 0
// 1 2 ram2 0 0 0 0 0 0 0 0 0 0 0
// 1 3 ram3 0 0 0 0 0 0 0 0 0 0 0
// 1 4 ram4 0 0 0 0 0 0 0 0 0 0 0
// 1 5 ram5 0 0 0 0 0 0 0 0 0 0 0
// 1 6 ram6 0 0 0 0 0 0 0 0 0 0 0
// 1 7 ram7 0 0 0 0 0 0 0 0 0 0 0
// 1 8 ram8 0 0 0 0 0 0 0 0 0 0 0
// 1 9 ram9 0 0 0 0 0 0 0 0 0 0 0
// 1 10 ram10 0 0 0 0 0 0 0 0 0 0 0
// 1 11 ram11 0 0 0 0 0 0 0 0 0 0 0
// 1 12 ram12 0 0 0 0 0 0 0 0 0 0 0
// 1 13 ram13 0 0 0 0 0 0 0 0 0 0 0
// 1 14 ram14 0 0 0 0 0 0 0 0 0 0 0
// 1 15 ram15 0 0 0 0 0 0 0 0 0 0 0
// 8 0 sda 1628761 56603 37715982 5690640 6073889 34091137 330349716 279787924 0 25235208 285650572
// 8 1 sda1 151 638 45 360
// 8 2 sda2 674840 11202608 8591346 68716852
// 8 3 sda3 1010409 26512312 31733575 253868616
// 8 16 sdb 12550386 47814 213085738 60429448 5529812 210792345 1731459040 1962038752 0 33797176 2024138028
// 8 17 sdb1 12601113 213085114 216407197 1731257800
// 3 0 hda 0 0 0 0 0 0 0 0 0 0 0
private static void getDiskStats(long[] longs) {
InputStreamReader isr = null;
BufferedReader br = null;
String line = null;
try {
if (hasDiskStats) {
// 2.6 kernel
isr = new InputStreamReader( new FileInputStream( "/proc/diskstats" ));
} else {
// 2.4 kernel
isr = new InputStreamReader( new FileInputStream( "/proc/partitions" ));
}
br = new BufferedReader(isr);
long readsCompleted = 0, readsMerged = 0;
long sectorsRead = 0, timeReading = 0;
long writesCompleted = 0, writesMerged = 0;
long sectorsWritten = 0, timeWriting = 0;
long iosInProgress = 0;
long timeIosInProgress = 0;
long ioTime = 0;
if (!hasDiskStats) {
br.readLine(); // Discard header info
br.readLine(); // Discard header info
}
while ( (line = br.readLine()) != null) {
st.setString(line);
{
// " 8 1 sdb" on 2.6
// " 8 1 452145145 sdb" on 2.4
String tok = st.nextToken();
if (tok.length() == 0 || Character.isWhitespace(tok.charAt(0))) {
// skip over first token since it is whitespace
tok = st.nextToken();
}
// skip first token it is some number
tok = st.nextToken();
// skip second token it is some number
tok = st.nextToken();
if (!hasDiskStats) {
// skip third token it is some number
tok = st.nextToken();
}
// Now tok should be the device name.
if (Character.isDigit(tok.charAt(tok.length()-1))) {
// If the last char is a digit
// skip this line since it is a partition of a device; not a device.
continue;
}
}
long tmp_readsCompleted = st.nextTokenAsLong();
long tmp_readsMerged = st.nextTokenAsLong();
long tmp_sectorsRead = st.nextTokenAsLong();
long tmp_timeReading = st.nextTokenAsLong();
if (st.hasMoreTokens()) {
// If we are on 2.6 then we might only have 4 longs; if so ignore this line
// Otherwise we should have 11 long tokens.
long tmp_writesCompleted = st.nextTokenAsLong();
long tmp_writesMerged = st.nextTokenAsLong();
long tmp_sectorsWritten = st.nextTokenAsLong();
long tmp_timeWriting = st.nextTokenAsLong();
long tmp_iosInProgress = st.nextTokenAsLong();
long tmp_timeIosInProgress = st.nextTokenAsLong();
long tmp_ioTime = st.nextTokenAsLong();
readsCompleted += tmp_readsCompleted;
readsMerged += tmp_readsMerged;
sectorsRead += tmp_sectorsRead;
timeReading += tmp_timeReading;
writesCompleted += tmp_writesCompleted;
writesMerged += tmp_writesMerged;
sectorsWritten += tmp_sectorsWritten;
timeWriting += tmp_timeWriting;
iosInProgress += tmp_iosInProgress;
timeIosInProgress += tmp_timeIosInProgress;
ioTime += tmp_ioTime;
}
} // while
final int SECTOR_SIZE = 512;
longs[LinuxSystemStats.readsCompletedLONG] = readsCompleted;
longs[LinuxSystemStats.readsMergedLONG] = readsMerged;
longs[LinuxSystemStats.bytesReadLONG] = sectorsRead * SECTOR_SIZE;
longs[LinuxSystemStats.timeReadingLONG] = timeReading;
longs[LinuxSystemStats.writesCompletedLONG] = writesCompleted;
longs[LinuxSystemStats.writesMergedLONG] = writesMerged;
longs[LinuxSystemStats.bytesWrittenLONG] = sectorsWritten * SECTOR_SIZE;
longs[LinuxSystemStats.timeWritingLONG] = timeWriting;
longs[LinuxSystemStats.iosInProgressLONG] = iosInProgress;
longs[LinuxSystemStats.timeIosInProgressLONG] = timeIosInProgress;
longs[LinuxSystemStats.ioTimeLONG] = ioTime;
} catch (NoSuchElementException nsee) {
// com.gemstone.gemfire.distributed.internal.InternalDistributedSystem.getAnyInstance().getLoggerI18n().fine("unexpected NoSuchElementException line=" + line, nsee);
} catch (IOException ioe) {
} finally {
st.releaseResources();
if(br != null) try { br.close(); } catch(IOException ignore) {}
}
}
//Example of /proc/vmstat
//...
//pgpgin 294333738
//pgpgout 1057420300
//pswpin 19422
//pswpout 14495
private static void getVmStats(long[] longs) {
assert hasProcVmStat != false : "getVmStats called when hasVmStat was false";
InputStreamReader isr = null;
BufferedReader br = null;
try {
isr = new InputStreamReader( new FileInputStream( "/proc/vmstat" ));
br = new BufferedReader(isr);
String line = null;
while((line = br.readLine()) != null) {
if(line.startsWith(PGPGIN)) {
longs[LinuxSystemStats.pagesPagedInLONG]
= SpaceTokenizer.parseAsLong(line.substring(PGPGIN.length()));
} else if(line.startsWith(PGPGOUT)) {
longs[LinuxSystemStats.pagesPagedOutLONG]
= SpaceTokenizer.parseAsLong(line.substring(PGPGOUT.length()));
} else if(line.startsWith(PSWPIN)) {
longs[LinuxSystemStats.pagesSwappedInLONG]
= SpaceTokenizer.parseAsLong(line.substring(PSWPIN.length()));
} else if(line.startsWith(PSWPOUT)) {
longs[LinuxSystemStats.pagesSwappedOutLONG]
= SpaceTokenizer.parseAsLong(line.substring(PSWPOUT.length()));
}
}
} catch (NoSuchElementException nsee) {
} catch (IOException ioe) {
} finally {
if(br != null) try { br.close(); } catch(IOException ignore) {}
}
}
/**
* Count the number of files in /proc that do not represent processes.
* This value is cached to make counting the number of running process a
* cheap operation. The assumption is that the contents of /proc will not
* change on a running system.
* @return the files in /proc that do NOT match /proc/[0-9]*
*/
private static int getNumberOfNonProcessProcFiles() {
File proc = new File("/proc");
String[] procFiles = proc.list();
int count = 0;
if(procFiles != null) {
for(String filename : procFiles) {
char c = filename.charAt(0);
if(! Character.isDigit(c)) {
if (c == '.') {
// see if the next char is a digit
if (filename.length() > 1) {
char c2 = filename.charAt(1);
if (Character.isDigit(c2)) {
// for bug 42091 do not count files that begin with a '.' followed by digits
continue;
}
}
}
count++;
}
}
}
return count;
}
/**
* @return the number of running processes on the system
*/
private static int getProcessCount() {
File proc = new File("/proc");
String[] procFiles = proc.list();
if(procFiles == null) {
//unknown error, continue without this stat
return 0;
}
return procFiles.length - nonPidFilesInProc;
}
//The array indices must be ordered as they appear in /proc/stats
// (user) (nice) (system) (idle) (iowait) (irq) (softirq)
// cpu 42813766 10844 8889075 1450764512 49963779 808244 3084872
//
private static class CpuStat {
private static boolean lastCpuStatsInvalid;
private static List<Long> lastCpuStats;
public CpuStat() {
lastCpuStatsInvalid = true;
}
public int[] calculateStats( String newStatLine ) {
st.setString(newStatLine);
st.skipToken(); //cpu name
final int MAX_CPU_STATS = CPU.values().length;
/* newer kernels now have 8 columns for cpu in
* /proc/stat (up from 7). This number may increase
* even further, hence we now use List in place of long[].
* We add up entries from all columns after 7 into CPU.OTHER
*/
List<Long> newStats = new ArrayList<Long>(8);
List<Long> diffs = new ArrayList<Long>(8);
long total_change = 0;
int actualCpuStats = 0;
long unaccountedCpuUtilization = 0;
while (st.hasMoreTokens()) {
newStats.add(st.nextTokenAsLong());
actualCpuStats++;
}
if ( lastCpuStatsInvalid ) {
lastCpuStats = newStats;
lastCpuStatsInvalid = false;
for (int i=0; i<MAX_CPU_STATS; i++) {
diffs.add(0L);
}
diffs.set(CPU.IDLE.ordinal(), 100L);
} else {
for (int i=0; i<actualCpuStats; i++) {
diffs.add(newStats.get(i) - lastCpuStats.get(i));
total_change += diffs.get(i);
lastCpuStats.set(i, newStats.get(i));
}
if(total_change == 0) {
//avoid divide by zero
total_change = 1;
}
for (int i = 0; i < MAX_CPU_STATS; i++) {
if (i < actualCpuStats) {
diffs.set(i, (diffs.get(i) * 100) / total_change);
}
}
for (int i=MAX_CPU_STATS; i<actualCpuStats; i++) {
unaccountedCpuUtilization += (diffs.get(i) * 100) / total_change;
}
}
int[] ret = new int[MAX_CPU_STATS];
for (int i = 0; i < MAX_CPU_STATS; i++) {
if (i < actualCpuStats) {
ret[i] = diffs.get(i).intValue();
}
}
ret[CPU.OTHER.ordinal()] += (int)unaccountedCpuUtilization;
return ret;
}
}
private static class SpaceTokenizer {
private String str;
private char[] rawChars;
private int beginIdx;
private int endIdx;
private int nextIdx;
protected SpaceTokenizer() {
endIdx = -1;
nextIdx = -1;
}
protected void releaseResources() {
str = null;
rawChars = null;
endIdx = -1;
nextIdx = -1;
}
private void nextIdx() {
int origin = nextIdx;
if(endIdx == rawChars.length || beginIdx == -1) {
endIdx = -1;
nextIdx = -1;
return;
}
endIdx = -1;
nextIdx = -1;
for(int i = origin+1; i < rawChars.length; i++) {
char c = rawChars[i];
//Add all delimiters here
if(c == ' ' || c == '\t' || c == '\n' || c == '\r') {
if(endIdx == -1) {
endIdx = i;
}
} else {
//this handles multiple consecutive delimiters
if(endIdx != -1) {
nextIdx = i;
return;
}
}
}
if( endIdx == -1 ) {
//indicates we were still reading white space at the end of the string
endIdx = rawChars.length;
}
}
protected boolean hasMoreTokens() {
return endIdx != -1;
}
protected void setString(String data) {
str = data;
rawChars = new char[str.length()];
str.getChars(0, str.length(), rawChars, 0);
beginIdx = 0;
endIdx = -1;
nextIdx = -1;
nextIdx();
}
protected boolean skipToken() {
if(hasMoreTokens()) {
beginIdx = nextIdx;
nextIdx();
return true;
}
return false;
}
protected String nextToken() {
if(hasMoreTokens()) {
final String ret = str.substring(beginIdx, endIdx);
beginIdx = nextIdx;
nextIdx();
return ret;
}
throw new NoSuchElementException();
}
protected String peekToken() {
if(hasMoreTokens()) {
return str.substring(beginIdx, endIdx);
}
throw new NoSuchElementException();
}
protected void skipTokens(int numberToSkip) {
int remaining = numberToSkip + 1;
while (--remaining > 0 && skipToken());
}
protected static long parseAsLong(String number) {
long l = 0L;
try {
l = Long.parseLong(number);
} catch(NumberFormatException nfe) {}
return l;
}
protected static int parseAsInt(String number) {
int i = 0;
try {
i = Integer.parseInt(number);
} catch(NumberFormatException nfe) {}
return i;
}
protected int nextTokenAsInt() {
int i = 0;
try {
i = Integer.parseInt(nextToken());
} catch(NumberFormatException nfe) {}
return i;
}
protected long nextTokenAsLong() {
long l = 0L;
try {
l = Long.parseLong(nextToken());
} catch(NumberFormatException nfe) {}
return l;
}
protected double nextTokenAsDouble() {
double d = 0;
try {
d = Double.parseDouble(nextToken());
} catch(NumberFormatException nfe) {}
return d;
}
}
}
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.prometheus;
import javax.annotation.Generated;
import com.amazonaws.services.prometheus.model.*;
import com.amazonaws.*;
import com.amazonaws.services.prometheus.waiters.AmazonPrometheusWaiters;
/**
* Abstract implementation of {@code AmazonPrometheus}. Convenient method forms pass through to the corresponding
* overload that takes a request object, which throws an {@code UnsupportedOperationException}.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AbstractAmazonPrometheus implements AmazonPrometheus {
protected AbstractAmazonPrometheus() {
}
@Override
public CreateAlertManagerDefinitionResult createAlertManagerDefinition(CreateAlertManagerDefinitionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateRuleGroupsNamespaceResult createRuleGroupsNamespace(CreateRuleGroupsNamespaceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateWorkspaceResult createWorkspace(CreateWorkspaceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteAlertManagerDefinitionResult deleteAlertManagerDefinition(DeleteAlertManagerDefinitionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteRuleGroupsNamespaceResult deleteRuleGroupsNamespace(DeleteRuleGroupsNamespaceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteWorkspaceResult deleteWorkspace(DeleteWorkspaceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeAlertManagerDefinitionResult describeAlertManagerDefinition(DescribeAlertManagerDefinitionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeRuleGroupsNamespaceResult describeRuleGroupsNamespace(DescribeRuleGroupsNamespaceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeWorkspaceResult describeWorkspace(DescribeWorkspaceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListRuleGroupsNamespacesResult listRuleGroupsNamespaces(ListRuleGroupsNamespacesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListTagsForResourceResult listTagsForResource(ListTagsForResourceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListWorkspacesResult listWorkspaces(ListWorkspacesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public PutAlertManagerDefinitionResult putAlertManagerDefinition(PutAlertManagerDefinitionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public PutRuleGroupsNamespaceResult putRuleGroupsNamespace(PutRuleGroupsNamespaceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public TagResourceResult tagResource(TagResourceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UntagResourceResult untagResource(UntagResourceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdateWorkspaceAliasResult updateWorkspaceAlias(UpdateWorkspaceAliasRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void shutdown() {
throw new java.lang.UnsupportedOperationException();
}
@Override
public com.amazonaws.ResponseMetadata getCachedResponseMetadata(com.amazonaws.AmazonWebServiceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AmazonPrometheusWaiters waiters() {
throw new java.lang.UnsupportedOperationException();
}
}
|
package com.triple.common;
import java.util.Objects;
/**
* @author 杨帆玉
* @date 2019/11/1 10:14 上午
*/
public class RDFTriple {
public String subject;
public String predicate;
public String object;
public RDFTriple() {
}
public RDFTriple(String subject, String predicate, String object) {
this.subject = subject;
this.predicate = predicate;
this.object = object;
}
public String getSubject() {
return subject;
}
public void setSubject(String subject) {
this.subject = subject;
}
public String getPredicate() {
return predicate;
}
public void setPredicate(String predicate) {
this.predicate = predicate;
}
public String getObject() {
return object;
}
public void setObject(String object) {
this.object = object;
}
@Override
public String toString() {
return "com.triple.common.RDFTriple{" +
"subject='" + subject + '\'' +
", predicate='" + predicate + '\'' +
", object='" + object + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RDFTriple rdfTriple = (RDFTriple) o;
return subject.equals(rdfTriple.subject) &&
predicate.equals(rdfTriple.predicate) &&
object.equals(rdfTriple.object);
}
@Override
public int hashCode() {
return Objects.hash(subject, predicate, object);
}
}
|
package org.openstack4j.openstack.gbp.internal;
import org.openstack4j.api.Apis;
import org.openstack4j.api.gbp.*;
import org.openstack4j.openstack.networking.internal.BaseNetworkingServices;
/**
* GBP services
*
* @author vinod borole
*/
public class GbpServiceImpl extends BaseNetworkingServices implements GbpService {
/**
* {@inheritDoc}
*/
@Override
public ExternalPolicyService externalPolicy() {
return Apis.get(ExternalPolicyService.class);
}
/**
* {@inheritDoc}
*/
@Override
public ExternalSegmentService externalSegment() {
return Apis.get(ExternalSegmentService.class);
}
/**
* {@inheritDoc}
*/
@Override
public GroupService group() {
return Apis.get(GroupService.class);
}
/**
* {@inheritDoc}
*/
@Override
public L2policyService l2Policy() {
return Apis.get(L2policyService.class);
}
/**
* {@inheritDoc}
*/
@Override
public L3policyService l3Policy() {
return Apis.get(L3policyService.class);
}
/**
* {@inheritDoc}
*/
@Override
public NatPoolService natPool() {
return Apis.get(NatPoolService.class);
}
/**
* {@inheritDoc}
*/
@Override
public NetworkPolicyService networkPolicyService() {
return Apis.get(NetworkPolicyService.class);
}
/**
* {@inheritDoc}
*/
@Override
public PolicyActionService policyAction() {
return Apis.get(PolicyActionService.class);
}
/**
* {@inheritDoc}
*/
@Override
public PolicyRuleService policyRule() {
return Apis.get(PolicyRuleService.class);
}
/**
* {@inheritDoc}
*/
@Override
public PolicyRuleSetService policyRuleSet() {
return Apis.get(PolicyRuleSetService.class);
}
/**
* {@inheritDoc}
*/
@Override
public PolicyTargetService policyTarget() {
return Apis.get(PolicyTargetService.class);
}
/**
* {@inheritDoc}
*/
@Override
public PolicyClassifierService policyClassifier() {
return Apis.get(PolicyClassifierService.class);
}
/**
* {@inheritDoc}
*/
@Override
public ServicechainService servicechain() {
return Apis.get(ServicechainService.class);
}
/**
* {@inheritDoc}
*/
@Override
public ServiceProfileService serviceProfile() {
return Apis.get(ServiceProfileService.class);
}
}
|
package study;
public class B12_SwapValue {
public static void main(String[] args) {
int a =10,b=20;
System.out.printf("a : %d, b : %d\n",a,b);
a=b;
b=a;
System.out.printf("a : %d, b : %d\n",a,b);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.felix.shell.remote;
import java.io.BufferedReader;
import java.io.IOException;
import java.net.Socket;
import org.apache.felix.shell.ShellService;
import org.apache.felix.service.command.CommandProcessor;
import org.apache.felix.service.command.CommandSession;
/**
* Implements the shell.
* <p>
* This class is instantiated by the {@link Listener} thread to handle a single
* remote connection in its own thread. The connection handler thread either
* terminates on request by the remote end or by the Remote Shell bundle being
* stopped. In the latter case, the {@link #terminate()} method is called, which
* closes the Socket used to handle the remote console. This causes a
* <code>SocketException</code> in the handler thread reading from the socket
* which in turn causes the {@link #run()} method to terminate and thus to
* end the handler thread.
*/
class Shell implements Runnable
{
private final Listener m_owner;
private final Socket m_socket;
private final AtomicInteger m_useCounter;
private volatile TerminalPrintStream m_out;
public Shell(Listener owner, Socket s, AtomicInteger counter)
{
m_owner = owner;
m_socket = s;
m_useCounter = counter;
}//constructor
void terminate()
{
// called by Listener.deactivate() to terminate this session
exit("\r\nFelix Remote Shell Console Terminating");
}//terminate
/**
* Runs the shell.
*/
public void run()
{
m_owner.registerConnection(this);
String msg = null;
try
{
m_out = new TerminalPrintStream(
m_owner.getServices(), m_socket.getOutputStream());
Object obj = null;
if ((obj = m_owner.getServices().getCommandProcessor(ServiceMediator.NO_WAIT))
!= null)
{
CommandProcessor cp = (CommandProcessor) obj;
CommandSession session =
cp.createSession(m_socket.getInputStream(), m_out, m_out);
startGogoShell(session);
}
else if ((obj = m_owner.getServices().getShellService(ServiceMediator.NO_WAIT))
!= null)
{
startFelixShell();
}
else
{
msg = "No shell services available...exiting.";
}
}
catch (IOException ex)
{
m_owner.getServices().error("Shell::run()", ex);
}
finally
{
// no need to clean up in/out, since exit does it all
exit(msg);
}
}//run
private void startGogoShell(CommandSession session)
{
try
{
session.execute("gosh --login --noshutdown");
}
catch (Exception e)
{
e.printStackTrace();
}
finally
{
session.close();
}
}
private void startFelixShell() throws IOException
{
BufferedReader in = new BufferedReader(
new TerminalReader(m_socket.getInputStream(), m_out));
ReentrantLock lock = new ReentrantLock();
// Print welcome banner.
m_out.println();
m_out.println("Felix Remote Shell Console:");
m_out.println("============================");
m_out.println("");
do
{
String line = "";
try
{
m_out.print("-> ");
line = in.readLine();
//make sure to capture end of stream
if (line == null)
{
m_out.println("exit");
return;
}
}
catch (Exception ex)
{
return;
}
line = line.trim();
if (line.equalsIgnoreCase("exit") || line.equalsIgnoreCase("disconnect"))
{
return;
}
ShellService shs = (ShellService)
m_owner.getServices().getShellService(ServiceMediator.NO_WAIT);
try
{
lock.acquire();
shs.executeCommand(line, m_out, m_out);
}
catch (Exception ex)
{
m_owner.getServices().error("Shell::run()", ex);
}
finally
{
lock.release();
}
}
while (true);
}
private void exit(String message)
{
// farewell message
if (message != null)
{
m_out.println(message);
}
m_out.println("Good Bye!");
m_out.close();
try
{
m_socket.close();
}
catch (IOException ex)
{
m_owner.getServices().error("Shell::exit()", ex);
}
m_owner.unregisterConnection(this);
m_useCounter.decrement();
}//exit
}//class Shell
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.emrcontainers.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.emrcontainers.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.protocol.Protocol;
import com.amazonaws.annotation.SdkInternalApi;
/**
* DeleteManagedEndpointRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class DeleteManagedEndpointRequestProtocolMarshaller implements Marshaller<Request<DeleteManagedEndpointRequest>, DeleteManagedEndpointRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.REST_JSON)
.requestUri("/virtualclusters/{virtualClusterId}/endpoints/{endpointId}").httpMethodName(HttpMethodName.DELETE).hasExplicitPayloadMember(false)
.hasPayloadMembers(false).serviceName("AmazonEMRContainers").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public DeleteManagedEndpointRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<DeleteManagedEndpointRequest> marshall(DeleteManagedEndpointRequest deleteManagedEndpointRequest) {
if (deleteManagedEndpointRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<DeleteManagedEndpointRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING,
deleteManagedEndpointRequest);
protocolMarshaller.startMarshalling();
DeleteManagedEndpointRequestMarshaller.getInstance().marshall(deleteManagedEndpointRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
|
/*
* Copyright 2017 Esri.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.esri.samples.na.service_area_task;
import javafx.application.Application;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.stage.Stage;
public class ServiceAreaTaskSample extends Application {
private static ServiceAreaTaskController controller;
@Override
public void start(Stage stage) throws Exception {
// set up the scene
FXMLLoader loader = new FXMLLoader(getClass().getResource("/fxml/service_area_task.fxml"));
Parent root = loader.load();
controller = loader.getController();
Scene scene = new Scene(root);
// set title, size, and add scene to stage
stage.setTitle("Service Area Task Sample");
stage.setWidth(800);
stage.setHeight(700);
stage.setScene(scene);
stage.show();
}
/**
* Stops and releases all resources used in application.
*/
@Override
public void stop() {
controller.terminate();
}
/**
* Opens and runs application.
*
* @param args arguments passed to this application
*/
public static void main(String[] args) {
Application.launch(args);
}
}
|
package com.sika.code.batch.listener.step;
import com.sika.code.common.log.util.LogUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.ItemProcessListener;
/**
* @author daiqi
* @create 2019-10-04 22:31
*/
@Slf4j
public class DefaultItemProcessListener<I, O> implements ItemProcessListener<I, O> {
@Override
public void afterProcess(I item, O result) {
LogUtil.info("afterProcess:item", item, log);
LogUtil.info("afterProcess:result", result, log);
}
@Override
public void beforeProcess(I item) {
LogUtil.info("beforeProcess:item", item, log);
}
@Override
public void onProcessError(I item, Exception e) {
LogUtil.info("onProcessError:item", item, log);
LogUtil.error("onProcessError:Exception", e, log);
}
}
|
/**
* Copyright (C) 2010-2015 Morgner UG (haftungsbeschränkt)
*
* This file is part of Structr <http://structr.org>.
*
* Structr is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* Structr is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Structr. If not, see <http://www.gnu.org/licenses/>.
*/
package org.structr.common.error;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import java.util.Locale;
import javax.tools.Diagnostic;
import javax.tools.JavaFileObject;
/**
*
* @author Christian Morgner
*/
public class DiagnosticErrorToken extends SemanticErrorToken {
private Diagnostic<? extends JavaFileObject> diagnostic = null;
public DiagnosticErrorToken(final Diagnostic<? extends JavaFileObject> diagnostic) {
super(base);
this.diagnostic = diagnostic;
}
@Override
public JsonElement getContent() {
final JsonObject obj = new JsonObject();
obj.add(diagnostic.getKind().name(), new JsonPrimitive(getErrorToken()));
return obj;
}
@Override
public String getErrorToken() {
return diagnostic.getMessage(Locale.ENGLISH);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache.xmlcache;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import javax.naming.Context;
import com.gemstone.gemfire.CancelCriterion;
import com.gemstone.gemfire.GemFireIOException;
import com.gemstone.gemfire.LogWriter;
import com.gemstone.gemfire.cache.AttributesFactory;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.CacheTransactionManager;
import com.gemstone.gemfire.cache.CacheWriterException;
import com.gemstone.gemfire.cache.Declarable;
import com.gemstone.gemfire.cache.DiskStore;
import com.gemstone.gemfire.cache.DiskStoreFactory;
import com.gemstone.gemfire.cache.DynamicRegionFactory;
import com.gemstone.gemfire.cache.GatewayException;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionAttributes;
import com.gemstone.gemfire.cache.RegionExistsException;
import com.gemstone.gemfire.cache.RegionFactory;
import com.gemstone.gemfire.cache.RegionShortcut;
import com.gemstone.gemfire.cache.TimeoutException;
import com.gemstone.gemfire.cache.asyncqueue.AsyncEventQueue;
import com.gemstone.gemfire.cache.asyncqueue.AsyncEventQueueFactory;
import com.gemstone.gemfire.cache.asyncqueue.internal.AsyncEventQueueFactoryImpl;
import com.gemstone.gemfire.cache.client.Pool;
import com.gemstone.gemfire.cache.client.PoolFactory;
import com.gemstone.gemfire.cache.client.PoolManager;
import com.gemstone.gemfire.cache.client.internal.PoolImpl;
import com.gemstone.gemfire.cache.execute.FunctionService;
import com.gemstone.gemfire.cache.query.CqAttributes;
import com.gemstone.gemfire.cache.query.CqException;
import com.gemstone.gemfire.cache.query.CqExistsException;
import com.gemstone.gemfire.cache.query.CqQuery;
import com.gemstone.gemfire.cache.query.CqServiceStatistics;
import com.gemstone.gemfire.cache.query.Index;
import com.gemstone.gemfire.cache.query.IndexExistsException;
import com.gemstone.gemfire.cache.query.IndexInvalidException;
import com.gemstone.gemfire.cache.query.IndexNameConflictException;
import com.gemstone.gemfire.cache.query.IndexType;
import com.gemstone.gemfire.cache.query.MultiIndexCreationException;
import com.gemstone.gemfire.cache.query.Query;
import com.gemstone.gemfire.cache.query.QueryInvalidException;
import com.gemstone.gemfire.cache.query.QueryService;
import com.gemstone.gemfire.cache.query.RegionNotFoundException;
import com.gemstone.gemfire.cache.query.internal.cq.CqService;
import com.gemstone.gemfire.cache.server.CacheServer;
import com.gemstone.gemfire.cache.snapshot.CacheSnapshotService;
import com.gemstone.gemfire.cache.util.GatewayConflictResolver;
import com.gemstone.gemfire.cache.wan.GatewayReceiver;
import com.gemstone.gemfire.cache.wan.GatewayReceiverFactory;
import com.gemstone.gemfire.cache.wan.GatewaySender;
import com.gemstone.gemfire.cache.wan.GatewaySenderFactory;
import com.gemstone.gemfire.cache.wan.GatewayTransportFilter;
import com.gemstone.gemfire.distributed.DistributedMember;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.i18n.LogWriterI18n;
import com.gemstone.gemfire.internal.Assert;
import com.gemstone.gemfire.cache.hdfs.HDFSStoreFactory;
import com.gemstone.gemfire.cache.hdfs.internal.HDFSIntegrationUtil;
import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreCreation;
import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreFactoryImpl;
import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreImpl;
import com.gemstone.gemfire.internal.cache.CacheServerImpl;
import com.gemstone.gemfire.internal.cache.CacheConfig;
import com.gemstone.gemfire.internal.cache.CacheServerLauncher;
import com.gemstone.gemfire.internal.cache.CacheService;
import com.gemstone.gemfire.internal.cache.DiskStoreFactoryImpl;
import com.gemstone.gemfire.internal.cache.DiskStoreImpl;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.InternalCache;
import com.gemstone.gemfire.internal.cache.PoolFactoryImpl;
import com.gemstone.gemfire.internal.cache.PoolManagerImpl;
import com.gemstone.gemfire.internal.cache.extension.Extensible;
import com.gemstone.gemfire.internal.cache.extension.ExtensionPoint;
import com.gemstone.gemfire.internal.cache.extension.SimpleExtensionPoint;
import com.gemstone.gemfire.internal.cache.ha.HARegionQueue;
import com.gemstone.gemfire.internal.cache.wan.AbstractGatewaySender;
import com.gemstone.gemfire.internal.cache.wan.WANServiceProvider;
import com.gemstone.gemfire.internal.cache.wan.InternalGatewaySenderFactory;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.jndi.JNDIInvoker;
import com.gemstone.gemfire.internal.logging.InternalLogWriter;
import com.gemstone.gemfire.internal.logging.LocalLogWriter;
import com.gemstone.gemfire.internal.logging.LogWriterFactory;
import com.gemstone.gemfire.pdx.PdxInstance;
import com.gemstone.gemfire.pdx.PdxInstanceFactory;
import com.gemstone.gemfire.pdx.PdxSerializer;
import com.gemstone.gemfire.pdx.internal.TypeRegistry;
/**
* Represents a {@link Cache} that is created declaratively. Notice
* that it implements the {@link Cache} interface so that this class
* must be updated when {@link Cache} is modified. This class is
* public for testing purposes.
*
* @author David Whitlock
*
* @since 3.0
*/
public class CacheCreation implements InternalCache {
/** The amount of time to wait for a distributed lock */
private int lockTimeout = GemFireCacheImpl.DEFAULT_LOCK_TIMEOUT;
private boolean hasLockTimeout = false;
/** The duration of a lease on a distributed lock */
private int lockLease = GemFireCacheImpl.DEFAULT_LOCK_LEASE;
private boolean hasLockLease = false;
/** The amount of time to wait for a <code>netSearch</code> */
private int searchTimeout = GemFireCacheImpl.DEFAULT_SEARCH_TIMEOUT;
private boolean hasSearchTimeout = false;
private boolean hasMessageSyncInterval = false;
/** This cache's roots keyed on name */
protected final Map roots = new LinkedHashMap();
/** Are dynamic regions enabled in this cache? */
private DynamicRegionFactory.Config dynamicRegionFactoryConfig = null;
private boolean hasDynamicRegionFactory = false;
/** Is this a cache server? */
private boolean isServer = false;
private boolean hasServer = false;
/** The bridge servers configured for this cache */
private final List bridgeServers = new ArrayList();
// Stores the properties used to initialize declarables.
private final Map<Declarable, Properties> declarablePropertiesMap = new HashMap<Declarable, Properties>();
private Set<GatewaySender> gatewaySenders = new HashSet<GatewaySender>();
private Set<GatewayReceiver> gatewayReceivers = new HashSet<GatewayReceiver>();
private Set<AsyncEventQueue> asyncEventQueues = new HashSet<AsyncEventQueue>();
private GatewayConflictResolver gatewayConflictResolver;
/** The copyOnRead attribute */
private boolean copyOnRead = GemFireCacheImpl.DEFAULT_COPY_ON_READ;
private boolean hasCopyOnRead = false;
/** The CacheTransactionManager representative for this Cache */
protected CacheTransactionManagerCreation txMgrCreation = null;
/** JNDI Context associated with the Gemfire */
// private static Context ctx;
/** The named region attributes associated with this cache */
private final Map namedRegionAttributes = new HashMap();
/** The names of the region attributes in the order in which they
* were added. Keeping track of this ensures that named region
* attributes are processed in the correct order. That is, "parent"
* named region attributes will be processed before "children" named
* region attributes. */
protected final List regionAttributesNames = new ArrayList();
/** The named disk store attributes associated with this cache.
* Made this linked so its iteration would be in insert order.
* This is important for unit testing 44914.
*/
protected final Map diskStores = new LinkedHashMap();
protected final Map hdfsStores = new LinkedHashMap();
private final List<File> backups = new ArrayList<File>();
private CacheConfig cacheConfig = new CacheConfig();
/** A logger that is used in debugging */
private InternalLogWriter logWriter =
new LocalLogWriter(InternalLogWriter.ALL_LEVEL, System.out);
private InternalLogWriter securityLogWriter =
LogWriterFactory.toSecurityLogWriter(logWriter);
/**
* {@link ExtensionPoint} support.
* @since 8.1
*/
private SimpleExtensionPoint<Cache> extensionPoint = new SimpleExtensionPoint<Cache>(this, this);
//////////////////////// Constructors ////////////////////////
/**
* Creates a new <code>CacheCreation</code> with no root regions
*/
public CacheCreation() {
this(false);
}
/** clear thread locals that may have been set by previous uses of CacheCreation */
public static void clearThreadLocals() {
createInProgress = new ThreadLocal<>();
}
/**
* @param forParsing if true then this creation is used for parsing xml;
* if false then it is used for generating xml.
* @since 5.7
*/
public CacheCreation(boolean forParsing) {
initializeRegionShortcuts();
if (!forParsing) {
createInProgress.set(this.pm);
}
}
/**
* @since 5.7
*/
public void startingGenerate() {
createInProgress.set(null);
}
////////////////////// Instance Methods //////////////////////
static final private RegionAttributes defaults = new AttributesFactory().create();
RegionAttributes getDefaultAttributes() {
return defaults;
}
protected void initializeRegionShortcuts() {
GemFireCacheImpl.initializeRegionShortcuts(this);
}
/**
* Sets the attributes of the root region
*
* @throws RegionExistsException
* If this cache already contains a region with the same
* name as <code>root</code>.
*/
void addRootRegion(RegionCreation root)
throws RegionExistsException {
String name = root.getName();
RegionCreation existing = (RegionCreation) this.roots.get(name);
if (existing != null) {
throw new RegionExistsException(existing);
} else {
this.roots.put(root.getName(), root);
}
}
public int getLockTimeout() {
return this.lockTimeout;
}
public void setLockTimeout(int seconds) {
this.lockTimeout = seconds;
this.hasLockTimeout = true;
}
boolean hasLockTimeout() {
return this.hasLockTimeout;
}
public int getLockLease() {
return this.lockLease;
}
public void setLockLease(int seconds) {
this.lockLease = seconds;
this.hasLockLease = true;
}
boolean hasLockLease() {
return this.hasLockLease;
}
public int getSearchTimeout() {
return this.searchTimeout;
}
public void setSearchTimeout(int seconds) {
this.searchTimeout = seconds;
this.hasSearchTimeout = true;
}
boolean hasSearchTimeout() {
return this.hasSearchTimeout;
}
public int getMessageSyncInterval()
{
return HARegionQueue.getMessageSyncInterval();
}
public void setMessageSyncInterval(int seconds)
{
if (seconds < 0) {
throw new IllegalArgumentException(LocalizedStrings.CacheCreation_THE_MESSAGESYNCINTERVAL_PROPERTY_FOR_CACHE_CANNOT_BE_NEGATIVE.toLocalizedString());
}
HARegionQueue.setMessageSyncInterval(seconds);
this.hasMessageSyncInterval = true;
}
boolean hasMessageSyncInterval()
{
return this.hasMessageSyncInterval;
}
public Set rootRegions() {
Set regions = new LinkedHashSet();
for (Iterator itr = this.roots.values().iterator(); itr.hasNext();) {
regions.add(itr.next());
}
return Collections.unmodifiableSet(regions);
}
/**
* create diskstore factory
*
* @since prPersistSprint2
*/
public DiskStoreFactory createDiskStoreFactory() {
return new DiskStoreFactoryImpl(this);
}
/**
* Store the current CacheCreation that is doing a create.
* Used from PoolManager to defer to CacheCreation as a manager of pools.
* @since 5.7
*/
private static ThreadLocal createInProgress = new ThreadLocal();
/**
* Returns null if the current thread is not doing a CacheCreation create.
* Otherwise returns the PoolManagerImpl of the CacheCreation of the
* create being invoked.
* @since 5.7
*/
public static final PoolManagerImpl getCurrentPoolManager() {
return (PoolManagerImpl)createInProgress.get();
}
/**
* Fills in the contents of a {@link Cache} based on this creation
* object's state.
*
* @throws TimeoutException
* @throws CacheWriterException
* @throws RegionExistsException
* @throws GatewayException
*/
void create(GemFireCacheImpl cache)
throws TimeoutException, CacheWriterException,
GatewayException,
RegionExistsException {
cache.setDeclarativeCacheConfig(cacheConfig);
if (cache.isClient()) {
throw new IllegalStateException("You must use client-cache in the cache.xml when ClientCacheFactory is used.");
}
if (this.hasLockLease()) {
cache.setLockLease(this.lockLease);
}
if (this.hasLockTimeout()) {
cache.setLockTimeout(this.lockTimeout);
}
if (this.hasSearchTimeout()) {
cache.setSearchTimeout(this.searchTimeout);
}
if (this.hasMessageSyncInterval()) {
cache.setMessageSyncInterval(this.getMessageSyncInterval());
}
if (this.gatewayConflictResolver != null) {
cache.setGatewayConflictResolver(this.gatewayConflictResolver);
}
// if (this.hasCopyOnRead()) {
// cache.setCopyOnRead(this.copyOnRead);
// }
{ // create connection pools
Map m = getPools();
if (!m.isEmpty()) {
Iterator it = m.values().iterator();
while (it.hasNext()) {
Pool cp = (Pool)it.next();
PoolFactoryImpl f;
f = (PoolFactoryImpl)PoolManager.createFactory();
f.init(cp);
PoolImpl p = (PoolImpl)f.create(cp.getName());
}
}
}
if (hasResourceManager()) {
// moved this up to fix bug 42128
getResourceManager().configure(cache.getResourceManager());
}
DiskStoreAttributesCreation pdxRegDSC = initializePdxDiskStore(cache);
cache.initializePdxRegistry();
for (Iterator iter = this.diskStores.values().iterator(); iter.hasNext();) {
DiskStoreAttributesCreation creation = (DiskStoreAttributesCreation) iter.next();
if (creation != pdxRegDSC) {
createDiskStore(creation, cache);
}
}
if (this.hasDynamicRegionFactory()) {
DynamicRegionFactory.get().open(this.getDynamicRegionFactoryConfig());
}
if (this.hasServer()) {
cache.setIsServer(this.isServer);
}
if (this.hasCopyOnRead()) {
cache.setCopyOnRead(this.copyOnRead);
}
if (this.txMgrCreation != null &&
this.txMgrCreation.getListeners().length > 0 &&
cache.getCacheTransactionManager()!=null) {
cache.getCacheTransactionManager().initListeners(this.txMgrCreation.getListeners());
}
if (this.txMgrCreation != null &&
cache.getCacheTransactionManager()!=null) {
cache.getCacheTransactionManager().setWriter(this.txMgrCreation.getWriter());
}
for (GatewaySender senderCreation : this.getGatewaySenders()) {
GatewaySenderFactory factory = (GatewaySenderFactory)cache
.createGatewaySenderFactory();
((InternalGatewaySenderFactory)factory).configureGatewaySender(senderCreation);
GatewaySender gatewaySender = factory.create(senderCreation.getId(),
senderCreation.getRemoteDSId());
// Start the sender if it is not set to manually start
if (gatewaySender.isManualStart()) {
cache
.getLoggerI18n()
.info(
LocalizedStrings.CacheCreation_0_IS_NOT_BEING_STARTED_SINCE_IT_IS_CONFIGURED_FOR_MANUAL_START,
gatewaySender);
}
}
for (AsyncEventQueue asyncEventQueueCreation : this.getAsyncEventQueues()) {
AsyncEventQueueFactoryImpl asyncQueueFactory =
(AsyncEventQueueFactoryImpl) cache.createAsyncEventQueueFactory();
asyncQueueFactory.configureAsyncEventQueue(asyncEventQueueCreation);
AsyncEventQueue asyncEventQueue = cache.getAsyncEventQueue(asyncEventQueueCreation.getId());
// AsyncEventQueue asyncEventQueue =
// asyncQueueFactory.create(asyncEventQueueCreation.getId(), asyncEventQueueCreation.getAsyncEventListener());
if (asyncEventQueue == null) {
asyncQueueFactory.create(asyncEventQueueCreation.getId(), asyncEventQueueCreation.getAsyncEventListener());
}
}
for (GatewayReceiver receiverCreation : this.getGatewayReceivers()) {
GatewayReceiverFactory factory = cache.createGatewayReceiverFactory();
factory.setBindAddress(receiverCreation.getBindAddress());
factory.setMaximumTimeBetweenPings(receiverCreation
.getMaximumTimeBetweenPings());
factory.setStartPort(receiverCreation.getStartPort());
factory.setEndPort(receiverCreation.getEndPort());
factory.setSocketBufferSize(receiverCreation.getSocketBufferSize());
factory.setManualStart(receiverCreation.isManualStart());
for (GatewayTransportFilter filter : receiverCreation
.getGatewayTransportFilters()) {
factory.addGatewayTransportFilter(filter);
}
factory.setHostnameForSenders(receiverCreation.getHost());
GatewayReceiver receiver = factory.create();
if (receiver.isManualStart()) {
cache
.getLoggerI18n()
.info(
LocalizedStrings.CacheCreation_0_IS_NOT_BEING_STARTED_SINCE_IT_IS_CONFIGURED_FOR_MANUAL_START,
receiver);
}
}
for(Iterator iter = this.hdfsStores.entrySet().iterator(); iter.hasNext(); ) {
Entry entry = (Entry) iter.next();
HDFSStoreCreation hdfsStoreCreation = (HDFSStoreCreation) entry.getValue();
HDFSStoreFactory storefactory = cache.createHDFSStoreFactory(hdfsStoreCreation);
storefactory.create((String) entry.getKey());
}
cache.initializePdxRegistry();
for (Iterator iter = this.regionAttributesNames.iterator();
iter.hasNext(); ) {
String id = (String) iter.next();
RegionAttributesCreation creation =
(RegionAttributesCreation) getRegionAttributes(id);
creation.inheritAttributes(cache, false);
// TODO: HDFS: HDFS store/queue will be mapped against region path and not
// the attribute id; don't really understand what this is trying to do
if (creation.getHDFSStoreName() != null)
{
HDFSStoreImpl store = cache.findHDFSStore(creation.getHDFSStoreName());
if(store == null) {
HDFSIntegrationUtil.createDefaultAsyncQueueForHDFS((Cache)cache, creation.getHDFSWriteOnly(), id);
}
}
if (creation.getHDFSStoreName() != null && creation.getPartitionAttributes().getColocatedWith() == null) {
creation.addAsyncEventQueueId(HDFSStoreFactoryImpl.getEventQueueName(id));
}
RegionAttributes attrs;
// Don't let the RegionAttributesCreation escape to the user
AttributesFactory factory = new AttributesFactory(creation);
attrs = factory.create();
cache.setRegionAttributes(id, attrs);
}
initializeRegions(this.roots, cache);
cache.readyDynamicRegionFactory();
// Create and start the BridgeServers. This code was moved from
// before region initialization to after it to fix bug 33587.
// Create and start the CacheServers after the gateways have been intialized
// to fix bug 39736.
Integer serverPort = CacheServerLauncher.getServerPort();
String serverBindAdd = CacheServerLauncher.getServerBindAddress();
Boolean disableDefaultServer = CacheServerLauncher.disableDefaultServer.get();
startCacheServers(this.getCacheServers(), cache, serverPort, serverBindAdd, disableDefaultServer);
cache.setBackupFiles(this.backups);
cache.addDeclarableProperties(this.declarablePropertiesMap);
runInitializer();
cache.setInitializer(getInitializer(), getInitializerProps());
// UnitTest CacheXml81Test.testCacheExtension
// Create all extensions
extensionPoint.fireCreate(cache);
}
protected void initializeRegions(Map declarativeRegions, Cache cache) {
Iterator it = declarativeRegions.values().iterator();
while (it.hasNext()) {
RegionCreation r = (RegionCreation)it.next();
r.createRoot(cache);
}
}
/**
* starts declarative cache servers if a server is not running on the port already.
* Also adds a default server to the param declarativeCacheServers if a serverPort is specified.
*/
protected void startCacheServers(List declarativeCacheServers, Cache cache, Integer serverPort, String serverBindAdd, Boolean disableDefaultServer) {
if (declarativeCacheServers.size() > 1
&& (serverPort != null || serverBindAdd != null)) {
throw new RuntimeException(
LocalizedStrings.CacheServerLauncher_SERVER_PORT_MORE_THAN_ONE_CACHE_SERVER
.toLocalizedString());
}
if (declarativeCacheServers.isEmpty()
&& (serverPort != null || serverBindAdd != null)
&& (disableDefaultServer == null || !disableDefaultServer)) {
boolean existingCacheServer = false;
List<CacheServer> cacheServers = cache.getCacheServers();
if (cacheServers != null) {
for(CacheServer cacheServer : cacheServers) {
if (serverPort == cacheServer.getPort()) {
existingCacheServer = true;
}
}
}
if (!existingCacheServer) {
declarativeCacheServers.add(new CacheServerCreation((GemFireCacheImpl)cache, false));
}
}
for (Iterator iter = declarativeCacheServers.iterator(); iter.hasNext();) {
CacheServerCreation declaredCacheServer = (CacheServerCreation)iter.next();
boolean startServer = true;
List<CacheServer> cacheServers = cache.getCacheServers();
if (cacheServers != null) {
for (CacheServer cacheServer : cacheServers) {
if (declaredCacheServer.getPort() == cacheServer.getPort()) {
startServer = false;
}
}
}
if (!startServer) {
continue;
}
CacheServerImpl impl = (CacheServerImpl)cache.addCacheServer();
impl.configureFrom(declaredCacheServer);
if (serverPort != null && serverPort != CacheServer.DEFAULT_PORT) {
impl.setPort(serverPort);
}
if (serverBindAdd != null) {
impl.setBindAddress(serverBindAdd.trim());
}
try {
if (!impl.isRunning())
impl.start();
}
catch (IOException ex) {
throw new GemFireIOException(
LocalizedStrings.CacheCreation_WHILE_STARTING_CACHE_SERVER_0
.toLocalizedString(impl), ex);
}
}
}
/**
* Returns a description of the disk store used by the pdx registry.
*/
protected DiskStoreAttributesCreation initializePdxDiskStore(GemFireCacheImpl cache) {
// to fix bug 44271 create the disk store used by the pdx registry first.
// If it is using the default disk store we need to create it now.
// If the cache has a pool then no need to create disk store.
DiskStoreAttributesCreation pdxRegDSC = null;
if (TypeRegistry.mayNeedDiskStore(cache)) {
String pdxRegDsName = cache.getPdxDiskStore();
if (pdxRegDsName == null) {
pdxRegDsName = DiskStoreFactory.DEFAULT_DISK_STORE_NAME;
}
// make sure pdxRegDSC gets set to fix for bug 44914
pdxRegDSC = (DiskStoreAttributesCreation)this.diskStores.get(pdxRegDsName);
if (pdxRegDSC == null) {
if (pdxRegDsName.equals(DiskStoreFactory.DEFAULT_DISK_STORE_NAME)) {
// need to create default disk store
cache.getOrCreateDefaultDiskStore();
}
} else {
createDiskStore(pdxRegDSC, cache);
}
}
return pdxRegDSC;
}
protected void createDiskStore(DiskStoreAttributesCreation creation, GemFireCacheImpl cache) {
// Don't let the DiskStoreAttributesCreation escape to the user
DiskStoreFactory factory = cache.createDiskStoreFactory(creation);
factory.create(creation.getName());
}
/**
* Returns whether or not this <code>CacheCreation</code> is
* equivalent to another <code>Cache</code>.
*/
public boolean sameAs(Cache other) {
boolean sameConfig =
other.getLockLease() == this.getLockLease() &&
other.getLockTimeout() == this.getLockTimeout() &&
other.getSearchTimeout() == this.getSearchTimeout() &&
other.getMessageSyncInterval() == this.getMessageSyncInterval() &&
other.getCopyOnRead() == this.getCopyOnRead() &&
other.isServer() == this.isServer();
if (!sameConfig) {
throw new RuntimeException(LocalizedStrings.CacheCreation_SAMECONFIG.toLocalizedString());
} else {
DynamicRegionFactory.Config drc1 = this.getDynamicRegionFactoryConfig();
if (drc1 != null) {
// we have a dynamic region factory
DynamicRegionFactory.Config drc2 = null;
if (other instanceof CacheCreation) {
drc2 = ((CacheCreation)other).getDynamicRegionFactoryConfig();
} else {
drc2 = DynamicRegionFactory.get().getConfig();
}
if (drc2 == null) {
return false;
}
if (!drc1.equals(drc2)) {
return false;
}
} else {
// we have no dynamic region factory; how about other?
if (other instanceof CacheCreation) {
if (((CacheCreation)other).getDynamicRegionFactoryConfig() != null) {
return false;
}
} else {
// other must be real cache in which case we compare to DynamicRegionFactory
if (DynamicRegionFactory.get().isOpen()) {
return false;
}
}
}
Collection myBridges = this.getCacheServers();
Collection otherBridges = other.getCacheServers();
if (myBridges.size() != otherBridges.size()) {
throw new RuntimeException(LocalizedStrings.CacheCreation_CACHESERVERS_SIZE.toLocalizedString());
}
for (Iterator myIter = myBridges.iterator(); myIter.hasNext(); ) {
CacheServerCreation myBridge =
(CacheServerCreation) myIter.next();
boolean found = false;
for (Iterator otherIter = otherBridges.iterator();
otherIter.hasNext(); ) {
CacheServer otherBridge = (CacheServer) otherIter.next();
if (myBridge.sameAs(otherBridge)) {
found = true;
break;
}
}
if (!found) {
throw new RuntimeException(LocalizedStrings.CacheCreation_CACHE_SERVER_0_NOT_FOUND.toLocalizedString(myBridge));
}
}
{ // compare connection pools
Map m1 = getPools();
Map m2 = (other instanceof CacheCreation)
? ((CacheCreation)other).getPools()
: PoolManager.getAll();
int m1Size = m1.size();
{
// ignore any gateway instances
Iterator it1 = m1.values().iterator();
while (it1.hasNext()) {
Pool cp = (Pool)it1.next();
if (((PoolImpl)cp).isUsedByGateway()) {
m1Size--;
}
}
}
int m2Size = m2.size();
{
// ignore any gateway instances
Iterator it2 = m2.values().iterator();
while (it2.hasNext()) {
Pool cp = (Pool)it2.next();
if (((PoolImpl)cp).isUsedByGateway()) {
m2Size--;
}
}
}
if (m2Size == 1) {
// if it is just the DEFAULT pool then ignore it
Pool p = (Pool)m2.values().iterator().next();
if (p.getName().equals("DEFAULT")) {
m2Size = 0;
}
}
if (m1Size != m2Size) {
throw new RuntimeException("pool sizes differ m1Size=" + m1Size
+ " m2Size=" + m2Size
+ " m1=" + m1.values()
+ " m2=" + m2.values());
}
if (m1Size > 0) {
Iterator it1 = m1.values().iterator();
while (it1.hasNext()) {
PoolImpl cp = (PoolImpl)it1.next();
// ignore any gateway instances
if (!(cp).isUsedByGateway()) {
cp.sameAs(m2.get(cp.getName()));
}
}
}
}
// compare disk stores
for (Iterator myIter = diskStores.values().iterator(); myIter.hasNext(); ) {
DiskStoreAttributesCreation dsac = (DiskStoreAttributesCreation)myIter.next();
String name = dsac.getName();
DiskStore ds = other.findDiskStore(name);
if (ds == null) {
getLogger().fine("Disk store " + name+" not found.");
throw new RuntimeException(LocalizedStrings.CacheCreation_DISKSTORE_NOTFOUND_0.toLocalizedString(name));
} else {
if (!dsac.sameAs(ds)) {
getLogger().fine("Attributes for disk store " + name + " do not match");
throw new RuntimeException(LocalizedStrings.CacheCreation_ATTRIBUTES_FOR_DISKSTORE_0_DO_NOT_MATCH.toLocalizedString(name));
}
}
}
Map myNamedAttributes = this.listRegionAttributes();
Map otherNamedAttributes = other.listRegionAttributes();
if (myNamedAttributes.size() != otherNamedAttributes.size()) {
throw new RuntimeException(LocalizedStrings.CacheCreation_NAMEDATTRIBUTES_SIZE.toLocalizedString());
}
for (Iterator myIter = myNamedAttributes.entrySet().iterator();
myIter.hasNext(); ) {
Map.Entry myEntry = (Map.Entry) myIter.next();
String myId = (String) myEntry.getKey();
Assert.assertTrue(myEntry.getValue() instanceof RegionAttributesCreation,
"Entry value is a " + myEntry.getValue().getClass().getName());
RegionAttributesCreation myAttrs =
(RegionAttributesCreation) myEntry.getValue();
RegionAttributes otherAttrs = other.getRegionAttributes(myId);
if (otherAttrs == null) {
getLogger().fine("No attributes for " + myId);
throw new RuntimeException(LocalizedStrings.CacheCreation_NO_ATTRIBUTES_FOR_0.toLocalizedString(myId));
} else {
if (!myAttrs.sameAs(otherAttrs)) {
getLogger().fine("Attributes for " + myId +
" do not match");
throw new RuntimeException(LocalizedStrings.CacheCreation_ATTRIBUTES_FOR_0_DO_NOT_MATCH.toLocalizedString(myId));
}
}
}
Collection myRoots = this.roots.values();
Collection otherRoots = other.rootRegions();
if (myRoots.size() != otherRoots.size()) {
throw new RuntimeException(LocalizedStrings.CacheCreation_ROOTS_SIZE.toLocalizedString());
}
Iterator it = myRoots.iterator();
while (it.hasNext()) {
RegionCreation r = (RegionCreation)it.next();
Region r2 = other.getRegion(r.getName());
if (r2 == null) {
throw new RuntimeException(LocalizedStrings.CacheCreation_NO_ROOT_0.toLocalizedString(r.getName()));
} else if (!r.sameAs(r2)) {
throw new RuntimeException(LocalizedStrings.CacheCreation_REGIONS_DIFFER.toLocalizedString());
}
}
// If both have a listener, make sure they are equal.
if (getCacheTransactionManager() != null) {
// Currently the GemFireCache always has a CacheTransactionManager,
// whereas that is not true for CacheTransactionManagerCreation.
List otherTxListeners =
Arrays.asList(other.getCacheTransactionManager().getListeners());
List thisTxListeners =
Arrays.asList(getCacheTransactionManager().getListeners());
if (!thisTxListeners.equals(otherTxListeners)) {
throw new RuntimeException(LocalizedStrings.CacheCreation_TXLISTENER.toLocalizedString());
}
}
}
if (hasResourceManager()) {
getResourceManager().sameAs(other.getResourceManager());
}
return true;
}
////////// Inherited methods that don't do anything //////////
public void close() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
public void close(boolean keepalive) {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
// public Region createRootRegion(RegionAttributes aRegionAttributes)
// throws RegionExistsException, TimeoutException {
//
// throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
// }
// see Cache.isReconnecting()
public boolean isReconnecting() {
throw new UnsupportedOperationException();
}
// see Cache.waitUntilReconnected(long, TimeUnit)
public boolean waitUntilReconnected(long time, TimeUnit units) throws InterruptedException {
throw new UnsupportedOperationException();
}
// see Cache.stopReconnecting()
public void stopReconnecting() {
throw new UnsupportedOperationException();
}
// see Cache.getReconnectedCache()
public Cache getReconnectedCache() {
throw new UnsupportedOperationException();
}
public LogWriter getLogger() {
return this.logWriter;
}
public LogWriter getSecurityLogger() {
return this.securityLogWriter;
}
public LogWriterI18n getLoggerI18n() {
return this.logWriter.convertToLogWriterI18n();
}
public LogWriterI18n getSecurityLoggerI18n() {
return this.securityLogWriter.convertToLogWriterI18n();
}
public DistributedSystem getDistributedSystem() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
public boolean isClosed(){
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
public String getName() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
public CancelCriterion getCancelCriterion() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
public com.gemstone.gemfire.cache.query.QueryService getQueryService() {
return queryService;
}
/**
* @since 6.5
*/
public <K,V> RegionFactory<K,V> createRegionFactory(RegionShortcut atts) {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
/**
* @since 6.5
*/
public <K,V> RegionFactory<K,V> createRegionFactory() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
/**
* @since 6.5
*/
public <K,V> RegionFactory<K,V> createRegionFactory(String regionAttributesId) {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
/**
* @since 6.5
*/
public <K,V> RegionFactory<K,V> createRegionFactory(RegionAttributes<K,V> regionAttributes) {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
public Region createVMRegion(String name, RegionAttributes attrs) throws RegionExistsException, TimeoutException {
return createRegion(name, attrs);
}
public Region createRegion(String name, RegionAttributes attrs) throws RegionExistsException, TimeoutException {
if (attrs instanceof RegionAttributesCreation) {
((RegionAttributesCreation) attrs).inheritAttributes(this);
((RegionAttributesCreation) attrs).prepareForValidation();
}
AttributesFactory.validateAttributes(attrs);
RegionCreation region = new RegionCreation(this, name, null);
region.setAttributes(attrs);
this.addRootRegion(region);
return region;
}
public Region createRegion(String name, String refid) throws RegionExistsException, TimeoutException {
RegionCreation region = new RegionCreation(this, name, refid);
this.addRootRegion(region);
return region;
}
public Region getRegion(String path) {
if (path.indexOf('/') != -1) {
throw new UnsupportedOperationException();
}
return (Region)this.roots.get(path);
}
public CacheServer addCacheServer() {
return addCacheServer(false);
}
public CacheServer addCacheServer(boolean isGatewayReceiver) {
CacheServer bridge = new CacheServerCreation(this, false);
this.bridgeServers.add(bridge);
return bridge;
}
public void addDeclarableProperties(final Declarable declarable, final Properties properties) {
this.declarablePropertiesMap.put(declarable, properties);
}
public List getCacheServers() {
return this.bridgeServers;
}
public GatewaySender addGatewaySender(GatewaySender sender){
this.gatewaySenders.add(sender);
return sender;
}
public GatewayReceiver addGatewayReceiver(GatewayReceiver receiver){
this.gatewayReceivers.add(receiver);
return receiver;
}
public AsyncEventQueue addAsyncEventQueue(AsyncEventQueue asyncEventQueue) {
this.asyncEventQueues.add(asyncEventQueue);
return asyncEventQueue;
}
public Set<GatewaySender> getGatewaySenders(){
Set<GatewaySender> tempSet = new HashSet<GatewaySender>();
for (GatewaySender sender : this.gatewaySenders) {
if (!((AbstractGatewaySender)sender).isForInternalUse()) {
tempSet.add(sender);
}
}
return tempSet;
}
public GatewaySender getGatewaySender(String Id) {
for (GatewaySender sender : this.gatewaySenders) {
if (sender.getId().equals(Id)) {
return sender;
}
}
return null;
}
// public GatewayReceiver addGatewayReceiver(){
// GatewayReceiverCreation receiver = new GatewayReceiverCreation();
// this.gatewayReceivers.add(receiver);
// return receiver;
// }
//
public Set<GatewayReceiver> getGatewayReceivers(){
return this.gatewayReceivers;
}
public Set<AsyncEventQueue> getAsyncEventQueues() {
return this.asyncEventQueues;
}
public AsyncEventQueue getAsyncEventQueue(String id) {
for (AsyncEventQueue asyncEventQueue : this.asyncEventQueues) {
if (asyncEventQueue.getId().equals(id)) {
return asyncEventQueue;
}
}
return null;
}
public void setIsServer(boolean isServer) {
this.isServer = isServer;
this.hasServer = true;
}
public boolean isServer() {
if (!this.isServer) {
return (this.bridgeServers.size() > 0);
}
else {
return true;
}
}
boolean hasServer() {
return this.hasServer;
}
public void setDynamicRegionFactoryConfig(DynamicRegionFactory.Config v) {
this.dynamicRegionFactoryConfig = v;
this.hasDynamicRegionFactory = true;
}
boolean hasDynamicRegionFactory() {
return this.hasDynamicRegionFactory;
}
public DynamicRegionFactory.Config getDynamicRegionFactoryConfig() {
return this.dynamicRegionFactoryConfig;
}
public CacheTransactionManager getCacheTransactionManager() {
return this.txMgrCreation;
}
/**
* Implementation of {@link Cache#setCopyOnRead}
* @since 4.0
*/
public void setCopyOnRead(boolean copyOnRead) {
this.copyOnRead = copyOnRead;
this.hasCopyOnRead = true;
}
/**
* Implementation of {@link Cache#getCopyOnRead}
* @since 4.0
*/
public boolean getCopyOnRead() {
return this.copyOnRead;
}
boolean hasCopyOnRead() {
return this.hasCopyOnRead;
}
/**
* Adds a CacheTransactionManagerCreation for this Cache (really just a
* placeholder since a CacheTransactionManager is really a Cache singleton)
* @since 4.0
* @see GemFireCacheImpl
*/
public void
addCacheTransactionManagerCreation(CacheTransactionManagerCreation txm) {
this.txMgrCreation = txm;
}
/**
* @return Context jndi context associated with the Cache.
*/
public Context getJNDIContext() {
return JNDIInvoker.getJNDIContext();
}
// It's not used
public DiskStore findDiskStore(String storeName) {
String s = storeName;
if (s == null) {
s = GemFireCacheImpl.getDefaultDiskStoreName();
}
return (DiskStore)this.diskStores.get(s);
}
public void addDiskStore(DiskStore ds) {
this.diskStores.put(ds.getName(), ds);
}
/**
* Returns the DiskStore list
*
* @since prPersistSprint2
*/
public Collection<DiskStoreImpl> listDiskStores() {
return this.diskStores.values();
}
public void setDiskStore(String name, DiskStoreAttributesCreation dsac) {
// Assert.assertTrue(ds instanceof DiskStoreAttributesCreation,
// "Attributes are a " + ds.getClass().getName());
this.diskStores.put(name, dsac);
}
public RegionAttributes getRegionAttributes(String id) {
return (RegionAttributes) this.namedRegionAttributes.get(id);
}
public void setRegionAttributes(String id, RegionAttributes attrs) {
RegionAttributes a = attrs;
if (!(a instanceof RegionAttributesCreation)) {
a = new RegionAttributesCreation(this, a, false);
}
this.namedRegionAttributes.put(id, a);
this.regionAttributesNames.add(id);
}
public Map listRegionAttributes() {
return Collections.unmodifiableMap(this.namedRegionAttributes);
}
public void loadCacheXml(InputStream is)
throws TimeoutException, CacheWriterException,
RegionExistsException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
public void readyForEvents() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
private final PoolManagerImpl pm = new PoolManagerImpl(false);
private volatile FunctionServiceCreation functionServiceCreation;
public Map getPools() {
return this.pm.getMap();
}
public PoolFactory createPoolFactory() {
return (new PoolFactoryImpl(this.pm)).setStartDisabled(true);
}
public Pool findPool(String name) {
return this.pm.find(name);
}
public void setFunctionServiceCreation(FunctionServiceCreation f) {
this.functionServiceCreation = f;
}
public FunctionServiceCreation getFunctionServiceCreation() {
return this.functionServiceCreation;
}
private volatile boolean hasResourceManager = false;
private volatile ResourceManagerCreation resourceManagerCreation;
public void setResourceManagerCreation(ResourceManagerCreation rmc) {
this.hasResourceManager = true;
this.resourceManagerCreation = rmc;
}
public ResourceManagerCreation getResourceManager() {
return this.resourceManagerCreation;
}
public boolean hasResourceManager() {
return this.hasResourceManager;
}
private volatile boolean hasSerializerRegistration = false;
private volatile SerializerCreation serializerCreation;
public void setSerializerCreation(SerializerCreation sc) {
this.hasSerializerRegistration = true;
this.serializerCreation = sc;
}
public SerializerCreation getSerializerCreation() {
return this.serializerCreation;
}
public boolean hasSerializerCreation() {
return this.hasSerializerRegistration;
}
public FunctionService getFunctionService(){
throw new UnsupportedOperationException();
}
public void addBackup(File backup) {
this.backups.add(backup);
}
public List<File> getBackupFiles() {
return Collections.unmodifiableList(this.backups);
}
public GatewaySenderFactory createGatewaySenderFactory(){
return WANServiceProvider.createGatewaySenderFactory(this);
}
public GatewayReceiverFactory createGatewayReceiverFactory() {
return WANServiceProvider.createGatewayReceiverFactory(this);
}
public AsyncEventQueueFactory createAsyncEventQueueFactory() {
return new AsyncEventQueueFactoryImpl(this);
}
public void setPdxReadSerialized(boolean readSerialized) {
cacheConfig.setPdxReadSerialized(readSerialized);
}
public void setPdxIgnoreUnreadFields(boolean ignore) {
cacheConfig.setPdxIgnoreUnreadFields(ignore);
}
public void setPdxSerializer(PdxSerializer serializer) {
cacheConfig.setPdxSerializer(serializer);
}
public void setPdxDiskStore(String diskStore) {
cacheConfig.setPdxDiskStore(diskStore);
}
public void setPdxPersistent(boolean persistent) {
cacheConfig.setPdxPersistent(persistent);
}
/**
* Returns whether PdxInstance is preferred for PDX types instead of Java object.
* @see com.gemstone.gemfire.cache.CacheFactory#setPdxReadSerialized(boolean)
*
* @since 6.6
*/
public boolean getPdxReadSerialized() {
return cacheConfig.isPdxReadSerialized();
}
public PdxSerializer getPdxSerializer() {
return cacheConfig.getPdxSerializer();
}
public String getPdxDiskStore() {
return cacheConfig.getPdxDiskStore();
}
public boolean getPdxPersistent() {
return cacheConfig.isPdxPersistent();
}
public boolean getPdxIgnoreUnreadFields() {
return cacheConfig.getPdxIgnoreUnreadFields();
}
public CacheConfig getCacheConfig() {
return cacheConfig;
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.cache.Cache#getMembers()
*/
public Set<DistributedMember> getMembers() {
return Collections.EMPTY_SET;
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.cache.Cache#getAdminMembers()
*/
public Set<DistributedMember> getAdminMembers() {
return Collections.EMPTY_SET;
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.cache.Cache#getMembers(com.gemstone.gemfire.cache.Region)
*/
public Set<DistributedMember> getMembers(Region r) {
return Collections.EMPTY_SET;
}
private Declarable initializer = null;
private Properties initializerProps = null;
public Declarable getInitializer() {
return this.initializer;
}
public Properties getInitializerProps() {
return this.initializerProps;
}
public void setInitializer(Declarable d, Properties props) {
this.initializer = d;
this.initializerProps = props;
}
protected void runInitializer() {
if (getInitializer() != null) {
getInitializer().init(getInitializerProps());
}
}
public void setGatewayConflictResolver(GatewayConflictResolver g) {
this.gatewayConflictResolver = g;
}
public GatewayConflictResolver getGatewayConflictResolver() {
return this.gatewayConflictResolver;
}
public PdxInstanceFactory createPdxInstanceFactory(String className) {
throw new UnsupportedOperationException();
}
public PdxInstanceFactory createPdxInstanceFactory(String className, boolean b) {
throw new UnsupportedOperationException();
}
public PdxInstance createPdxEnum(String className, String enumName, int enumOrdinal) {
throw new UnsupportedOperationException();
}
public CacheSnapshotService getSnapshotService() {
throw new UnsupportedOperationException();
}
/**
* @see Extensible#getExtensionPoint()
* @since 8.1
*/
@Override
public ExtensionPoint<Cache> getExtensionPoint() {
return extensionPoint;
}
@Override
public Collection<HDFSStoreImpl> getHDFSStores() {
return this.hdfsStores.values();
}
public void addHDFSStore(String name, HDFSStoreCreation hs) {
this.hdfsStores.put(name, hs);
}
@Override
public DistributedMember getMyId() {
return null;
}
@Override
public Collection<DiskStoreImpl> listDiskStoresIncludingDefault() {
return null;
}
@Override
public Collection<DiskStoreImpl> listDiskStoresIncludingRegionOwned() {
return null;
}
@Override
public CqService getCqService() {
return null;
}
public QueryService queryService = new com.gemstone.gemfire.cache.query.QueryService() {
private Map<String, List> indexes = new HashMap<String, List>();
@Override
public Query newQuery(String queryString) {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public Index createHashIndex(String indexName, String indexedExpression,
String regionPath) throws IndexInvalidException,
IndexNameConflictException, IndexExistsException,
RegionNotFoundException, UnsupportedOperationException {
return createHashIndex(indexName, indexedExpression, regionPath, "");
}
@Override
public Index createHashIndex(String indexName, String indexedExpression,
String regionPath, String imports) throws IndexInvalidException,
IndexNameConflictException, IndexExistsException,
RegionNotFoundException, UnsupportedOperationException {
return createIndex(indexName, IndexType.HASH, indexedExpression, regionPath, imports);
}
@Override
public Index createIndex(String indexName, IndexType indexType,
String indexedExpression, String fromClause)
throws IndexInvalidException, IndexNameConflictException,
IndexExistsException, RegionNotFoundException,
UnsupportedOperationException {
return createIndex(indexName, indexType, indexedExpression, fromClause, "");
}
@Override
/**
* Due to not having the full implementation to determine region names etc
* this implementation will only match a single region with no alias at this time
*/
public Index createIndex(String indexName, IndexType indexType,
String indexedExpression, String fromClause, String imports)
throws IndexInvalidException, IndexNameConflictException,
IndexExistsException, RegionNotFoundException,
UnsupportedOperationException {
IndexCreationData indexData = new IndexCreationData(indexName);
indexData.setFunctionalIndexData(fromClause, indexedExpression, imports);
indexData.setIndexType(indexType.toString());
List indexesForRegion = indexes.get(fromClause);
if (indexesForRegion == null) {
indexesForRegion = new ArrayList();
indexes.put(fromClause, indexesForRegion);
}
indexesForRegion.add(indexData);
return indexData;
}
@Override
public Index createIndex(String indexName, String indexedExpression,
String regionPath) throws IndexInvalidException,
IndexNameConflictException, IndexExistsException,
RegionNotFoundException, UnsupportedOperationException {
return createIndex(indexName, indexedExpression, regionPath, "");
}
@Override
public Index createIndex(String indexName, String indexedExpression,
String regionPath, String imports) throws IndexInvalidException,
IndexNameConflictException, IndexExistsException,
RegionNotFoundException, UnsupportedOperationException {
return createIndex(indexName, IndexType.FUNCTIONAL, indexedExpression, regionPath, imports);
}
@Override
public Index createKeyIndex(String indexName, String indexedExpression,
String regionPath) throws IndexInvalidException,
IndexNameConflictException, IndexExistsException,
RegionNotFoundException, UnsupportedOperationException {
return createIndex(indexName, IndexType.PRIMARY_KEY, indexedExpression, regionPath, "");
}
@Override
public Index getIndex(Region<?, ?> region, String indexName) {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public Collection<Index> getIndexes() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public Collection<Index> getIndexes(Region<?, ?> region) {
return indexes.get(region.getFullPath());
}
@Override
public Collection<Index> getIndexes(Region<?, ?> region,
IndexType indexType) {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void removeIndex(Index index) {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void removeIndexes() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void removeIndexes(Region<?, ?> region) {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public CqQuery newCq(String queryString, CqAttributes cqAttr)
throws QueryInvalidException, CqException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public CqQuery newCq(String queryString, CqAttributes cqAttr,
boolean isDurable) throws QueryInvalidException, CqException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public CqQuery newCq(String name, String queryString, CqAttributes cqAttr)
throws QueryInvalidException, CqExistsException, CqException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public CqQuery newCq(String name, String queryString,
CqAttributes cqAttr, boolean isDurable) throws QueryInvalidException,
CqExistsException, CqException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void closeCqs() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public CqQuery[] getCqs() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public CqQuery[] getCqs(String regionName) throws CqException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public CqQuery getCq(String cqName) {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void executeCqs() throws CqException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void stopCqs() throws CqException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void executeCqs(String regionName) throws CqException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void stopCqs(String regionName) throws CqException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public List<String> getAllDurableCqsFromServer() throws CqException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public CqServiceStatistics getCqStatistics() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void defineKeyIndex(String indexName, String indexedExpression,
String fromClause) throws RegionNotFoundException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void defineHashIndex(String indexName, String indexedExpression,
String regionPath) throws RegionNotFoundException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void defineHashIndex(String indexName, String indexedExpression,
String regionPath, String imports) throws RegionNotFoundException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void defineIndex(String indexName, String indexedExpression,
String regionPath) throws RegionNotFoundException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public void defineIndex(String indexName, String indexedExpression,
String regionPath, String imports) throws RegionNotFoundException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public List<Index> createDefinedIndexes() throws MultiIndexCreationException {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
@Override
public boolean clearDefinedIndexes() {
throw new UnsupportedOperationException(LocalizedStrings.SHOULDNT_INVOKE.toLocalizedString());
}
};
@Override
public <T extends CacheService> T getService(Class<T> clazz) {
throw new UnsupportedOperationException();
}
}
|
/*
* Copyright (c) 2001, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.imageio.plugins.jpeg;
import javax.imageio.IIOException;
import javax.imageio.metadata.IIOInvalidTreeException;
import javax.imageio.metadata.IIOMetadataNode;
import javax.imageio.stream.ImageOutputStream;
import java.io.IOException;
import org.w3c.dom.Node;
import org.w3c.dom.NamedNodeMap;
/**
* An Adobe APP14 (Application-Specific) marker segment.
*/
class AdobeMarkerSegment extends MarkerSegment {
int version;
int flags0;
int flags1;
int transform;
private static final int ID_SIZE = 5;
AdobeMarkerSegment(int transform) {
super(JPEG.APP14);
version = 101;
flags0 = 0;
flags1 = 0;
this.transform = transform;
}
AdobeMarkerSegment(JPEGBuffer buffer) throws IOException {
super(buffer);
buffer.bufPtr += ID_SIZE; // Skip the id
version = (buffer.buf[buffer.bufPtr++] & 0xff) << 8;
version |= buffer.buf[buffer.bufPtr++] & 0xff;
flags0 = (buffer.buf[buffer.bufPtr++] & 0xff) << 8;
flags0 |= buffer.buf[buffer.bufPtr++] & 0xff;
flags1 = (buffer.buf[buffer.bufPtr++] & 0xff) << 8;
flags1 |= buffer.buf[buffer.bufPtr++] & 0xff;
transform = buffer.buf[buffer.bufPtr++] & 0xff;
buffer.bufAvail -= length;
}
AdobeMarkerSegment(Node node) throws IIOInvalidTreeException {
this(0); // default transform will be changed
updateFromNativeNode(node, true);
}
IIOMetadataNode getNativeNode() {
IIOMetadataNode node = new IIOMetadataNode("app14Adobe");
node.setAttribute("version", Integer.toString(version));
node.setAttribute("flags0", Integer.toString(flags0));
node.setAttribute("flags1", Integer.toString(flags1));
node.setAttribute("transform", Integer.toString(transform));
return node;
}
void updateFromNativeNode(Node node, boolean fromScratch)
throws IIOInvalidTreeException {
// Only the transform is required
NamedNodeMap attrs = node.getAttributes();
transform = getAttributeValue(node, attrs, "transform", 0, 2, true);
int count = attrs.getLength();
if (count > 4) {
throw new IIOInvalidTreeException
("Adobe APP14 node cannot have > 4 attributes", node);
}
if (count > 1) {
int value = getAttributeValue(node, attrs, "version",
100, 255, false);
version = (value != -1) ? value : version;
value = getAttributeValue(node, attrs, "flags0", 0, 65535, false);
flags0 = (value != -1) ? value : flags0;
value = getAttributeValue(node, attrs, "flags1", 0, 65535, false);
flags1 = (value != -1) ? value : flags1;
}
}
/**
* Writes the data for this segment to the stream in
* valid JPEG format.
*/
void write(ImageOutputStream ios) throws IOException {
length = 14;
writeTag(ios);
byte [] id = {0x41, 0x64, 0x6F, 0x62, 0x65};
ios.write(id);
write2bytes(ios, version);
write2bytes(ios, flags0);
write2bytes(ios, flags1);
ios.write(transform);
}
static void writeAdobeSegment(ImageOutputStream ios, int transform)
throws IOException {
(new AdobeMarkerSegment(transform)).write(ios);
}
void print () {
printTag("Adobe APP14");
System.out.print("Version: ");
System.out.println(version);
System.out.print("Flags0: 0x");
System.out.println(Integer.toHexString(flags0));
System.out.print("Flags1: 0x");
System.out.println(Integer.toHexString(flags1));
System.out.print("Transform: ");
System.out.println(transform);
}
}
|
package org.coredb.portal.model;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.validation.annotation.Validated;
import javax.validation.Valid;
import javax.validation.constraints.*;
/**
* ServiceAccess
*/
@Validated
public class ServiceAccess {
@JsonProperty("enableShow")
private Boolean enableShow = null;
@JsonProperty("enableIdentity")
private Boolean enableIdentity = null;
@JsonProperty("enableProfile")
private Boolean enableProfile = null;
@JsonProperty("enableGroup")
private Boolean enableGroup = null;
@JsonProperty("enableShare")
private Boolean enableShare = null;
@JsonProperty("enablePrompt")
private Boolean enablePrompt = null;
@JsonProperty("enableService")
private Boolean enableService = null;
@JsonProperty("enableIndex")
private Boolean enableIndex = null;
@JsonProperty("enableUser")
private Boolean enableUser = null;
@JsonProperty("enableAccess")
private Boolean enableAccess = null;
@JsonProperty("enableAccount")
private Boolean enableAccount = null;
@JsonProperty("enableConversation")
private Boolean enableConversation = null;
public ServiceAccess enableShow(Boolean enableShow) {
this.enableShow = enableShow;
return this;
}
/**
* Get enableShow
* @return enableShow
**/
@ApiModelProperty(value = "")
public Boolean isEnableShow() {
return enableShow;
}
public void setEnableShow(Boolean enableShow) {
this.enableShow = enableShow;
}
public ServiceAccess enableIdentity(Boolean enableIdentity) {
this.enableIdentity = enableIdentity;
return this;
}
/**
* Get enableIdentity
* @return enableIdentity
**/
@ApiModelProperty(value = "")
public Boolean isEnableIdentity() {
return enableIdentity;
}
public void setEnableIdentity(Boolean enableIdentity) {
this.enableIdentity = enableIdentity;
}
public ServiceAccess enableProfile(Boolean enableProfile) {
this.enableProfile = enableProfile;
return this;
}
/**
* Get enableProfile
* @return enableProfile
**/
@ApiModelProperty(value = "")
public Boolean isEnableProfile() {
return enableProfile;
}
public void setEnableProfile(Boolean enableProfile) {
this.enableProfile = enableProfile;
}
public ServiceAccess enableGroup(Boolean enableGroup) {
this.enableGroup = enableGroup;
return this;
}
/**
* Get enableGroup
* @return enableGroup
**/
@ApiModelProperty(value = "")
public Boolean isEnableGroup() {
return enableGroup;
}
public void setEnableGroup(Boolean enableGroup) {
this.enableGroup = enableGroup;
}
public ServiceAccess enableShare(Boolean enableShare) {
this.enableShare = enableShare;
return this;
}
/**
* Get enableShare
* @return enableShare
**/
@ApiModelProperty(value = "")
public Boolean isEnableShare() {
return enableShare;
}
public void setEnableShare(Boolean enableShare) {
this.enableShare = enableShare;
}
public ServiceAccess enablePrompt(Boolean enablePrompt) {
this.enablePrompt = enablePrompt;
return this;
}
/**
* Get enablePrompt
* @return enablePrompt
**/
@ApiModelProperty(value = "")
public Boolean isEnablePrompt() {
return enablePrompt;
}
public void setEnablePrompt(Boolean enablePrompt) {
this.enablePrompt = enablePrompt;
}
public ServiceAccess enableService(Boolean enableService) {
this.enableService = enableService;
return this;
}
/**
* Get enableService
* @return enableService
**/
@ApiModelProperty(value = "")
public Boolean isEnableService() {
return enableService;
}
public void setEnableService(Boolean enableService) {
this.enableService = enableService;
}
public ServiceAccess enableIndex(Boolean enableIndex) {
this.enableIndex = enableIndex;
return this;
}
/**
* Get enableIndex
* @return enableIndex
**/
@ApiModelProperty(value = "")
public Boolean isEnableIndex() {
return enableIndex;
}
public void setEnableIndex(Boolean enableIndex) {
this.enableIndex = enableIndex;
}
public ServiceAccess enableUser(Boolean enableUser) {
this.enableUser = enableUser;
return this;
}
/**
* Get enableUser
* @return enableUser
**/
@ApiModelProperty(value = "")
public Boolean isEnableUser() {
return enableUser;
}
public void setEnableUser(Boolean enableUser) {
this.enableUser = enableUser;
}
public ServiceAccess enableAccess(Boolean enableAccess) {
this.enableAccess = enableAccess;
return this;
}
/**
* Get enableAccess
* @return enableAccess
**/
@ApiModelProperty(value = "")
public Boolean isEnableAccess() {
return enableAccess;
}
public void setEnableAccess(Boolean enableAccess) {
this.enableAccess = enableAccess;
}
public ServiceAccess enableAccount(Boolean enableAccount) {
this.enableAccount = enableAccount;
return this;
}
/**
* Get enableAccount
* @return enableAccount
**/
@ApiModelProperty(value = "")
public Boolean isEnableAccount() {
return enableAccount;
}
public void setEnableAccount(Boolean enableAccount) {
this.enableAccount = enableAccount;
}
public ServiceAccess enableConversation(Boolean enableConversation) {
this.enableConversation = enableConversation;
return this;
}
/**
* Get enableConversation
* @return enableConversation
**/
@ApiModelProperty(value = "")
public Boolean isEnableConversation() {
return enableConversation;
}
public void setEnableConversation(Boolean enableConversation) {
this.enableConversation = enableConversation;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ServiceAccess serviceAccess = (ServiceAccess) o;
return Objects.equals(this.enableShow, serviceAccess.enableShow) &&
Objects.equals(this.enableIdentity, serviceAccess.enableIdentity) &&
Objects.equals(this.enableProfile, serviceAccess.enableProfile) &&
Objects.equals(this.enableGroup, serviceAccess.enableGroup) &&
Objects.equals(this.enableShare, serviceAccess.enableShare) &&
Objects.equals(this.enablePrompt, serviceAccess.enablePrompt) &&
Objects.equals(this.enableService, serviceAccess.enableService) &&
Objects.equals(this.enableIndex, serviceAccess.enableIndex) &&
Objects.equals(this.enableUser, serviceAccess.enableUser) &&
Objects.equals(this.enableAccess, serviceAccess.enableAccess) &&
Objects.equals(this.enableAccount, serviceAccess.enableAccount) &&
Objects.equals(this.enableConversation, serviceAccess.enableConversation);
}
@Override
public int hashCode() {
return Objects.hash(enableShow, enableIdentity, enableProfile, enableGroup, enableShare, enablePrompt, enableService, enableIndex, enableUser, enableAccess, enableAccount, enableConversation);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ServiceAccess {\n");
sb.append(" enableShow: ").append(toIndentedString(enableShow)).append("\n");
sb.append(" enableIdentity: ").append(toIndentedString(enableIdentity)).append("\n");
sb.append(" enableProfile: ").append(toIndentedString(enableProfile)).append("\n");
sb.append(" enableGroup: ").append(toIndentedString(enableGroup)).append("\n");
sb.append(" enableShare: ").append(toIndentedString(enableShare)).append("\n");
sb.append(" enablePrompt: ").append(toIndentedString(enablePrompt)).append("\n");
sb.append(" enableService: ").append(toIndentedString(enableService)).append("\n");
sb.append(" enableIndex: ").append(toIndentedString(enableIndex)).append("\n");
sb.append(" enableUser: ").append(toIndentedString(enableUser)).append("\n");
sb.append(" enableAccess: ").append(toIndentedString(enableAccess)).append("\n");
sb.append(" enableAccount: ").append(toIndentedString(enableAccount)).append("\n");
sb.append(" enableConversation: ").append(toIndentedString(enableConversation)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
package uk.joshiejack.husbandry.entity.traits.happiness;
import com.mojang.blaze3d.matrix.MatrixStack;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.widget.Widget;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.api.distmarker.OnlyIn;
import uk.joshiejack.husbandry.Husbandry;
import uk.joshiejack.husbandry.api.IMobStats;
import uk.joshiejack.husbandry.api.trait.IIconTrait;
import uk.joshiejack.husbandry.api.trait.IRenderTrait;
import uk.joshiejack.penguinlib.util.icon.Icon;
import uk.joshiejack.penguinlib.util.icon.TextureIcon;
public class AbstractLoveableTrait implements IRenderTrait, IIconTrait {
private static final ResourceLocation MINECRAFT_ICONS = new ResourceLocation("minecraft", "textures/gui/icons.png");
public static final ResourceLocation HUSBANDRY_ICONS = new ResourceLocation(Husbandry.MODID, "textures/gui/icons.png");
public static final Icon ICON = new TextureIcon(HUSBANDRY_ICONS, 32, 0);
@Override
public Icon getIcon(IMobStats<?> stats) {
return stats.isUnloved() ? ICON.shadowed() : ICON;
}
@OnlyIn(Dist.CLIENT)
@Override
public void render(MatrixStack matrix, Widget widget, int x, int y, IMobStats<?> stats) {
for (int i = 0; i < 10; i++) {
Minecraft.getInstance().getTextureManager().bind(i >= stats.getMaxHearts() ? HUSBANDRY_ICONS : MINECRAFT_ICONS);
widget.blit(matrix, x + 24 + 10 * i, y + 6, 16, 0, 9, 9);
if (i < stats.getHearts())
widget.blit(matrix, x + 24 + 10 * i, y + 6, 52, 0, 9, 9);
}
}
}
|
package org.kafkainaction.serde;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
import org.kafkainaction.model.Alert;
import java.nio.charset.StandardCharsets;
import java.util.Map;
public class AlertKeySerde implements Serializer<Alert>,
Deserializer<Alert> { //<1>
public byte[] serialize(String topic, Alert key) { //<2>
if (key == null) {
return null;
}
return key.getStageId().getBytes(StandardCharsets.UTF_8); //<3>
}
public Alert deserialize(String topic, byte[] value) { //<4>
//We will leave this part for later
return null;
}
@Override
public void configure(final Map<String, ?> configs, final boolean isKey) {
Serializer.super.configure(configs, isKey);
}
@Override
public void close() {
Serializer.super.close();
}
}
|
package com.example.a41448.huawu.base.comment;
public enum ShowType {
showNumber,hideNumber
}
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.*;
import com.intellij.codeInsight.completion.impl.CamelHumpMatcher;
import com.intellij.codeInsight.completion.scope.CompletionElement;
import com.intellij.codeInsight.completion.scope.JavaCompletionProcessor;
import com.intellij.codeInsight.completion.util.ParenthesesInsertHandler;
import com.intellij.codeInsight.daemon.impl.analysis.LambdaHighlightingUtil;
import com.intellij.codeInsight.editorActions.TabOutScopesTracker;
import com.intellij.codeInsight.guess.GuessManager;
import com.intellij.codeInsight.lookup.*;
import com.intellij.codeInspection.java15api.Java15APIUsageInspection;
import com.intellij.lang.StdLanguages;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.patterns.PsiJavaPatterns;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.SuggestedNameInfo;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.filters.ElementFilter;
import com.intellij.psi.impl.FakePsiElement;
import com.intellij.psi.impl.light.LightVariableBuilder;
import com.intellij.psi.impl.source.PostprocessReformattingAspect;
import com.intellij.psi.impl.source.PsiImmediateClassType;
import com.intellij.psi.scope.ElementClassHint;
import com.intellij.psi.scope.NameHint;
import com.intellij.psi.scope.PsiScopeProcessor;
import com.intellij.psi.scope.util.PsiScopesUtil;
import com.intellij.psi.util.*;
import com.intellij.psi.util.proximity.ReferenceListWeigher;
import com.intellij.ui.JBColor;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.ObjectUtils;
import com.intellij.util.PairFunction;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.JBIterable;
import com.siyeh.ig.psiutils.SideEffectChecker;
import gnu.trove.THashSet;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static com.intellij.codeInsight.completion.ReferenceExpressionCompletionContributor.findConstantsUsedInSwitch;
import static com.intellij.patterns.PlatformPatterns.psiElement;
import static com.intellij.psi.util.proximity.ReferenceListWeigher.ReferenceListApplicability.inapplicable;
public class JavaCompletionUtil {
public static final Key<Boolean> FORCE_SHOW_SIGNATURE_ATTR = Key.create("forceShowSignature");
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.completion.JavaCompletionUtil");
public static final Key<PairFunction<PsiExpression, CompletionParameters, PsiType>> DYNAMIC_TYPE_EVALUATOR = Key.create("DYNAMIC_TYPE_EVALUATOR");
private static final Key<PsiType> QUALIFIER_TYPE_ATTR = Key.create("qualifierType"); // SmartPsiElementPointer to PsiType of "qualifier"
static final NullableLazyKey<ExpectedTypeInfo[], CompletionLocation> EXPECTED_TYPES = NullableLazyKey.create("expectedTypes",
location -> {
if (PsiJavaPatterns.psiElement().beforeLeaf(PsiJavaPatterns.psiElement().withText("."))
.accepts(location.getCompletionParameters().getPosition())) {
return ExpectedTypeInfo.EMPTY_ARRAY;
}
return JavaSmartCompletionContributor.getExpectedTypes(location.getCompletionParameters());
});
public static final Key<Boolean> SUPER_METHOD_PARAMETERS = Key.create("SUPER_METHOD_PARAMETERS");
@Nullable
public static Set<PsiType> getExpectedTypes(final CompletionParameters parameters) {
final PsiExpression expr = PsiTreeUtil.getContextOfType(parameters.getPosition(), PsiExpression.class, true);
if (expr != null) {
final Set<PsiType> set = new THashSet<>();
for (final ExpectedTypeInfo expectedInfo : JavaSmartCompletionContributor.getExpectedTypes(parameters)) {
set.add(expectedInfo.getType());
}
return set;
}
return null;
}
private static final Key<List<SmartPsiElementPointer<PsiMethod>>> ALL_METHODS_ATTRIBUTE = Key.create("allMethods");
public static PsiType getQualifierType(LookupElement item) {
return item.getUserData(QUALIFIER_TYPE_ATTR);
}
public static void completeVariableNameForRefactoring(Project project, Set<LookupElement> set, String prefix, PsiType varType, VariableKind varKind) {
final CamelHumpMatcher camelHumpMatcher = new CamelHumpMatcher(prefix);
JavaMemberNameCompletionContributor.completeVariableNameForRefactoring(project, set, camelHumpMatcher, varType, varKind, true, false);
}
public static void putAllMethods(LookupElement item, List<? extends PsiMethod> methods) {
item.putUserData(ALL_METHODS_ATTRIBUTE, ContainerUtil.map(methods, method -> SmartPointerManager.getInstance(method.getProject()).createSmartPsiElementPointer(method)));
}
public static List<PsiMethod> getAllMethods(LookupElement item) {
List<SmartPsiElementPointer<PsiMethod>> pointers = item.getUserData(ALL_METHODS_ATTRIBUTE);
if (pointers == null) return null;
return ContainerUtil.mapNotNull(pointers, pointer -> pointer.getElement());
}
public static String[] completeVariableNameForRefactoring(JavaCodeStyleManager codeStyleManager, @Nullable final PsiType varType,
final VariableKind varKind,
SuggestedNameInfo suggestedNameInfo) {
return JavaMemberNameCompletionContributor
.completeVariableNameForRefactoring(codeStyleManager, new CamelHumpMatcher(""), varType, varKind, suggestedNameInfo, true, false);
}
public static boolean isInExcludedPackage(@NotNull final PsiMember member, boolean allowInstanceInnerClasses) {
final String name = PsiUtil.getMemberQualifiedName(member);
if (name == null) return false;
if (!member.hasModifierProperty(PsiModifier.STATIC)) {
if (member instanceof PsiMethod || member instanceof PsiField) {
return false;
}
if (allowInstanceInnerClasses && member instanceof PsiClass && member.getContainingClass() != null) {
return false;
}
}
return JavaProjectCodeInsightSettings.getSettings(member.getProject()).isExcluded(name);
}
@NotNull
public static <T extends PsiType> T originalize(@NotNull T type) {
if (!type.isValid()) {
return type;
}
T result = new PsiTypeMapper() {
private final Set<PsiClassType> myVisited = ContainerUtil.newIdentityTroveSet();
@Override
public PsiType visitClassType(final PsiClassType classType) {
if (!myVisited.add(classType)) return classType;
final PsiClassType.ClassResolveResult classResolveResult = classType.resolveGenerics();
final PsiClass psiClass = classResolveResult.getElement();
final PsiSubstitutor substitutor = classResolveResult.getSubstitutor();
if (psiClass == null) return classType;
return new PsiImmediateClassType(CompletionUtil.getOriginalOrSelf(psiClass), originalizeSubstitutor(substitutor));
}
private PsiSubstitutor originalizeSubstitutor(final PsiSubstitutor substitutor) {
PsiSubstitutor originalSubstitutor = PsiSubstitutor.EMPTY;
for (final Map.Entry<PsiTypeParameter, PsiType> entry : substitutor.getSubstitutionMap().entrySet()) {
final PsiType value = entry.getValue();
originalSubstitutor = originalSubstitutor.put(CompletionUtil.getOriginalOrSelf(entry.getKey()),
value == null ? null : mapType(value));
}
return originalSubstitutor;
}
@Override
public PsiType visitType(PsiType type) {
return type;
}
}.mapType(type);
if (result == null) {
throw new AssertionError("Null result for type " + type + " of class " + type.getClass());
}
return result;
}
@Nullable
public static List<? extends PsiElement> getAllPsiElements(final LookupElement item) {
List<PsiMethod> allMethods = getAllMethods(item);
if (allMethods != null) return allMethods;
if (item.getObject() instanceof PsiElement) return Collections.singletonList((PsiElement)item.getObject());
return null;
}
@Nullable
public static PsiType getLookupElementType(final LookupElement element) {
TypedLookupItem typed = element.as(TypedLookupItem.CLASS_CONDITION_KEY);
return typed != null ? typed.getType() : null;
}
@Nullable
public static PsiType getQualifiedMemberReferenceType(@Nullable PsiType qualifierType, @NotNull final PsiMember member) {
final Ref<PsiSubstitutor> subst = Ref.create(PsiSubstitutor.EMPTY);
class MyProcessor implements PsiScopeProcessor, NameHint, ElementClassHint {
@Override
public boolean execute(@NotNull PsiElement element, @NotNull ResolveState state) {
if (element == member) {
subst.set(state.get(PsiSubstitutor.KEY));
}
return true;
}
@Override
public String getName(@NotNull ResolveState state) {
return member.getName();
}
@Override
public boolean shouldProcess(@NotNull DeclarationKind kind) {
return member instanceof PsiEnumConstant ? kind == DeclarationKind.ENUM_CONST :
member instanceof PsiField ? kind == DeclarationKind.FIELD :
kind == DeclarationKind.METHOD;
}
@Override
public <T> T getHint(@NotNull Key<T> hintKey) {
//noinspection unchecked
return hintKey == NameHint.KEY || hintKey == ElementClassHint.KEY ? (T)this : null;
}
}
PsiScopesUtil.processTypeDeclarations(qualifierType, member, new MyProcessor());
PsiType rawType = member instanceof PsiField ? ((PsiField) member).getType() :
member instanceof PsiMethod ? ((PsiMethod) member).getReturnType() :
JavaPsiFacade.getElementFactory(member.getProject()).createType((PsiClass)member);
return subst.get().substitute(rawType);
}
public static Set<LookupElement> processJavaReference(final PsiElement element,
final PsiJavaReference javaReference,
final ElementFilter elementFilter,
final JavaCompletionProcessor.Options options,
final PrefixMatcher matcher,
final CompletionParameters parameters) {
PsiElement elementParent = element.getContext();
if (elementParent instanceof PsiReferenceExpression) {
final PsiExpression qualifierExpression = ((PsiReferenceExpression)elementParent).getQualifierExpression();
if (qualifierExpression instanceof PsiReferenceExpression) {
final PsiElement resolve = ((PsiReferenceExpression)qualifierExpression).resolve();
if (resolve instanceof PsiParameter) {
final PsiElement declarationScope = ((PsiParameter)resolve).getDeclarationScope();
if (((PsiParameter)resolve).getType() instanceof PsiLambdaParameterType) {
final PsiLambdaExpression lambdaExpression = (PsiLambdaExpression)declarationScope;
if (PsiTypesUtil.getExpectedTypeByParent(lambdaExpression) == null) {
final int parameterIndex = lambdaExpression.getParameterList().getParameterIndex((PsiParameter)resolve);
final Set<LookupElement> set = new LinkedHashSet<>();
final boolean overloadsFound = LambdaUtil.processParentOverloads(lambdaExpression, functionalInterfaceType -> {
PsiType qualifierType = LambdaUtil.getLambdaParameterFromType(functionalInterfaceType, parameterIndex);
if (qualifierType instanceof PsiWildcardType) {
qualifierType = ((PsiWildcardType)qualifierType).getBound();
}
if (qualifierType == null) return;
PsiReferenceExpression fakeRef = createReference("xxx.xxx", createContextWithXxxVariable(element, qualifierType));
set.addAll(processJavaQualifiedReference(fakeRef.getReferenceNameElement(), fakeRef, elementFilter, options, matcher, parameters));
});
if (overloadsFound) return set;
}
}
}
}
}
return processJavaQualifiedReference(element, javaReference, elementFilter, options, matcher, parameters);
}
private static Set<LookupElement> processJavaQualifiedReference(PsiElement element, PsiJavaReference javaReference, ElementFilter elementFilter,
JavaCompletionProcessor.Options options,
final PrefixMatcher matcher, CompletionParameters parameters) {
final Set<LookupElement> set = new LinkedHashSet<>();
final Condition<String> nameCondition = matcher::prefixMatches;
final JavaCompletionProcessor processor = new JavaCompletionProcessor(element, elementFilter, options, nameCondition);
final PsiType plainQualifier = processor.getQualifierType();
List<PsiType> runtimeQualifiers = getQualifierCastTypes(javaReference, parameters);
if (!runtimeQualifiers.isEmpty()) {
PsiType[] conjuncts = JBIterable.of(plainQualifier).append(runtimeQualifiers).toArray(PsiType.EMPTY_ARRAY);
PsiType composite = PsiIntersectionType.createIntersection(false, conjuncts);
PsiElement ctx = createContextWithXxxVariable(element, composite);
javaReference = createReference("xxx.xxx", ctx);
processor.setQualifierType(composite);
}
javaReference.processVariants(processor);
List<PsiTypeLookupItem> castItems = ContainerUtil.map(runtimeQualifiers, q -> PsiTypeLookupItem.createLookupItem(q, element));
final boolean pkgContext = inSomePackage(element);
PsiClass qualifierClass = PsiUtil.resolveClassInClassTypeOnly(plainQualifier);
final boolean honorExcludes = qualifierClass == null || !isInExcludedPackage(qualifierClass, false);
Set<PsiType> expectedTypes = ObjectUtils.coalesce(getExpectedTypes(parameters), Collections.emptySet());
final Set<PsiMember> mentioned = new THashSet<>();
for (CompletionElement completionElement : processor.getResults()) {
for (LookupElement item : createLookupElements(completionElement, javaReference)) {
item.putUserData(QUALIFIER_TYPE_ATTR, plainQualifier);
final Object o = item.getObject();
if (o instanceof PsiClass && !isSourceLevelAccessible(element, (PsiClass)o, pkgContext)) {
continue;
}
if (o instanceof PsiMember) {
if (honorExcludes && isInExcludedPackage((PsiMember)o, true)) {
continue;
}
mentioned.add(CompletionUtil.getOriginalOrSelf((PsiMember)o));
}
PsiTypeLookupItem qualifierCast = findQualifierCast(item, castItems, plainQualifier, processor, expectedTypes);
if (qualifierCast != null) item = castQualifier(item, qualifierCast);
set.add(highlightIfNeeded(qualifierCast != null ? qualifierCast.getType() : plainQualifier, item, o, element));
}
}
if (javaReference instanceof PsiJavaCodeReferenceElement) {
PsiElement refQualifier = ((PsiJavaCodeReferenceElement)javaReference).getQualifier();
if (refQualifier == null && PsiTreeUtil.getParentOfType(element, PsiPackageStatement.class, PsiImportStatementBase.class) == null) {
final StaticMemberProcessor memberProcessor = new JavaStaticMemberProcessor(parameters);
memberProcessor.processMembersOfRegisteredClasses(matcher, (member, psiClass) -> {
if (!mentioned.contains(member) && processor.satisfies(member, ResolveState.initial())) {
ContainerUtil.addIfNotNull(set, memberProcessor.createLookupElement(member, psiClass, true));
}
});
}
else if (refQualifier instanceof PsiSuperExpression && ((PsiSuperExpression)refQualifier).getQualifier() == null) {
set.addAll(SuperCalls.suggestQualifyingSuperCalls(element, javaReference, elementFilter, options, nameCondition));
}
}
return set;
}
@NotNull
static PsiReferenceExpression createReference(@NotNull String text, @NotNull PsiElement context) {
return (PsiReferenceExpression) JavaPsiFacade.getElementFactory(context.getProject()).createExpressionFromText(text, context);
}
@NotNull
private static List<PsiType> getQualifierCastTypes(PsiJavaReference javaReference, CompletionParameters parameters) {
if (javaReference instanceof PsiReferenceExpression) {
final PsiReferenceExpression refExpr = (PsiReferenceExpression)javaReference;
final PsiExpression qualifier = refExpr.getQualifierExpression();
if (qualifier != null) {
final Project project = qualifier.getProject();
PairFunction<PsiExpression, CompletionParameters, PsiType> evaluator = refExpr.getContainingFile().getCopyableUserData(DYNAMIC_TYPE_EVALUATOR);
if (evaluator != null) {
PsiType type = evaluator.fun(qualifier, parameters);
if (type != null) {
return Collections.singletonList(type);
}
}
return GuessManager.getInstance(project).getControlFlowExpressionTypeConjuncts(qualifier, parameters.getInvocationCount() > 1);
}
}
return Collections.emptyList();
}
private static boolean shouldCast(@NotNull LookupElement item,
@NotNull PsiTypeLookupItem castTypeItem,
@Nullable PsiType plainQualifier,
@NotNull JavaCompletionProcessor processor,
@NotNull Set<? extends PsiType> expectedTypes) {
PsiType castType = castTypeItem.getType();
if (plainQualifier != null) {
Object o = item.getObject();
if (o instanceof PsiMethod) {
if (plainQualifier instanceof PsiClassType && castType instanceof PsiClassType) {
PsiMethod method = (PsiMethod)o;
PsiClassType.ClassResolveResult plainResult = ((PsiClassType)plainQualifier).resolveGenerics();
PsiClass plainClass = plainResult.getElement();
HierarchicalMethodSignature signature = method.getHierarchicalMethodSignature();
PsiMethod plainMethod = plainClass == null ? null :
StreamEx.ofTree(signature, s -> StreamEx.of(s.getSuperSignatures()))
.map(sig -> MethodSignatureUtil.findMethodBySignature(plainClass, sig, true))
.filter(Objects::nonNull)
.findFirst().orElse(null);
if (plainMethod != null) {
PsiClassType.ClassResolveResult castResult = ((PsiClassType)castType).resolveGenerics();
PsiClass castClass = castResult.getElement();
if (castClass == null || !castClass.isInheritor(plainClass, true)) {
return false;
}
if (!processor.isAccessible(plainMethod)) {
return true;
}
PsiSubstitutor castSub = TypeConversionUtil.getSuperClassSubstitutor(plainClass, (PsiClassType)castType);
PsiType typeAfterCast = toRaw(castSub.substitute(method.getReturnType()));
PsiType typeDeclared = toRaw(plainResult.getSubstitutor().substitute(plainMethod.getReturnType()));
return typeAfterCast != null && typeDeclared != null &&
!typeAfterCast.equals(typeDeclared) &&
expectedTypes.stream().anyMatch(et -> et.isAssignableFrom(typeAfterCast) && !et.isAssignableFrom(typeDeclared));
}
}
}
return containsMember(castType, o, true) && !containsMember(plainQualifier, o, true);
}
return false;
}
@NotNull
private static LookupElement castQualifier(@NotNull LookupElement item, @NotNull PsiTypeLookupItem castTypeItem) {
return new LookupElementDecorator<LookupElement>(item) {
@Override
public void handleInsert(@NotNull InsertionContext context) {
final Document document = context.getEditor().getDocument();
context.commitDocument();
final PsiFile file = context.getFile();
final PsiJavaCodeReferenceElement ref =
PsiTreeUtil.findElementOfClassAtOffset(file, context.getStartOffset(), PsiJavaCodeReferenceElement.class, false);
if (ref != null) {
final PsiElement qualifier = ref.getQualifier();
if (qualifier != null) {
final CommonCodeStyleSettings settings = context.getCodeStyleSettings();
final String parenSpace = settings.SPACE_WITHIN_PARENTHESES ? " " : "";
document.insertString(qualifier.getTextRange().getEndOffset(), parenSpace + ")");
final String spaceWithin = settings.SPACE_WITHIN_CAST_PARENTHESES ? " " : "";
final String prefix = "(" + parenSpace + "(" + spaceWithin;
final String spaceAfter = settings.SPACE_AFTER_TYPE_CAST ? " " : "";
final int exprStart = qualifier.getTextRange().getStartOffset();
document.insertString(exprStart, prefix + spaceWithin + ")" + spaceAfter);
CompletionUtil.emulateInsertion(context, exprStart + prefix.length(), castTypeItem);
PsiDocumentManager.getInstance(file.getProject()).doPostponedOperationsAndUnblockDocument(document);
context.getEditor().getCaretModel().moveToOffset(context.getTailOffset());
}
}
super.handleInsert(context);
}
@Override
public void renderElement(LookupElementPresentation presentation) {
super.renderElement(presentation);
presentation.appendTailText(" on " + castTypeItem.getType().getPresentableText(), true);
}
};
}
private static PsiTypeLookupItem findQualifierCast(@NotNull LookupElement item,
@NotNull List<? extends PsiTypeLookupItem> castTypeItems,
@Nullable PsiType plainQualifier, JavaCompletionProcessor processor, Set<? extends PsiType> expectedTypes) {
return ContainerUtil.find(castTypeItems, c -> shouldCast(item, c, plainQualifier, processor, expectedTypes));
}
@Nullable
private static PsiType toRaw(@Nullable PsiType type) {
return type instanceof PsiClassType ? ((PsiClassType)type).rawType() : type;
}
@NotNull
public static LookupElement highlightIfNeeded(@Nullable PsiType qualifierType,
@NotNull LookupElement item,
@NotNull Object object,
@NotNull PsiElement place) {
if (shouldMarkRed(object, place)) {
return PrioritizedLookupElement.withExplicitProximity(LookupElementDecorator.withRenderer(item, new LookupElementRenderer<LookupElementDecorator<LookupElement>>() {
@Override
public void renderElement(LookupElementDecorator<LookupElement> element, LookupElementPresentation presentation) {
element.getDelegate().renderElement(presentation);
presentation.setItemTextForeground(JBColor.RED);
}
}), -1);
}
if (containsMember(qualifierType, object, false) && !qualifierType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) {
LookupElementDecorator<LookupElement> bold = LookupElementDecorator.withRenderer(item, new LookupElementRenderer<LookupElementDecorator<LookupElement>>() {
@Override
public void renderElement(LookupElementDecorator<LookupElement> element, LookupElementPresentation presentation) {
element.getDelegate().renderElement(presentation);
presentation.setItemTextBold(true);
}
});
return object instanceof PsiField ? bold : PrioritizedLookupElement.withExplicitProximity(bold, 1);
}
return item;
}
private static boolean shouldMarkRed(@NotNull Object object, @NotNull PsiElement place) {
if (!(object instanceof PsiMember)) return false;
if (Java15APIUsageInspection.getLastIncompatibleLanguageLevel((PsiMember)object, PsiUtil.getLanguageLevel(place)) != null) return true;
if (object instanceof PsiEnumConstant) {
return findConstantsUsedInSwitch(place).contains(CompletionUtil.getOriginalOrSelf((PsiEnumConstant)object));
}
if (object instanceof PsiClass && ReferenceListWeigher.INSTANCE.getApplicability((PsiClass)object, place) == inapplicable) {
return true;
}
return false;
}
@Contract("null, _, _ -> false")
private static boolean containsMember(@Nullable PsiType qualifierType, @NotNull Object object, boolean checkBases) {
if (!(object instanceof PsiMember)) return false;
if (qualifierType instanceof PsiArrayType) { //length and clone()
PsiFile file = ((PsiMember)object).getContainingFile();
if (file == null || file.getVirtualFile() == null) { //yes, they're a bit dummy
return true;
}
}
else if (qualifierType instanceof PsiClassType) {
PsiClass qualifierClass = ((PsiClassType)qualifierType).resolve();
if (qualifierClass == null) return false;
if (object instanceof PsiMethod && qualifierClass.findMethodBySignature((PsiMethod)object, checkBases) != null) {
return true;
}
PsiClass memberClass = ((PsiMember)object).getContainingClass();
return checkBases ? InheritanceUtil.isInheritorOrSelf(qualifierClass, memberClass, true) : qualifierClass.equals(memberClass);
}
return false;
}
static Iterable<? extends LookupElement> createLookupElements(CompletionElement completionElement, PsiJavaReference reference) {
Object completion = completionElement.getElement();
assert !(completion instanceof LookupElement);
if (reference instanceof PsiJavaCodeReferenceElement) {
if (completion instanceof PsiMethod &&
((PsiJavaCodeReferenceElement)reference).getParent() instanceof PsiImportStaticStatement) {
return Collections.singletonList(JavaLookupElementBuilder.forMethod((PsiMethod)completion, PsiSubstitutor.EMPTY));
}
if (completion instanceof PsiClass) {
List<JavaPsiClassReferenceElement> classItems = JavaClassNameCompletionContributor.createClassLookupItems(
CompletionUtil.getOriginalOrSelf((PsiClass)completion),
JavaClassNameCompletionContributor.AFTER_NEW.accepts(reference),
JavaClassNameInsertHandler.JAVA_CLASS_INSERT_HANDLER,
Conditions.alwaysTrue());
return JBIterable.from(classItems).flatMap(i -> JavaConstructorCallElement.wrap(i, reference.getElement()));
}
}
if (reference instanceof PsiMethodReferenceExpression && completion instanceof PsiMethod && ((PsiMethod)completion).isConstructor()) {
return Collections.singletonList(JavaLookupElementBuilder.forMethod((PsiMethod)completion, "new", PsiSubstitutor.EMPTY, null));
}
PsiSubstitutor substitutor = completionElement.getSubstitutor();
if (substitutor == null) substitutor = PsiSubstitutor.EMPTY;
if (completion instanceof PsiClass) {
JavaPsiClassReferenceElement classItem =
JavaClassNameCompletionContributor.createClassLookupItem((PsiClass)completion, true).setSubstitutor(substitutor);
return JavaConstructorCallElement.wrap(classItem, reference.getElement());
}
if (completion instanceof PsiMethod) {
JavaMethodCallElement item = new JavaMethodCallElement((PsiMethod)completion).setQualifierSubstitutor(substitutor);
item.setForcedQualifier(completionElement.getQualifierText());
return Collections.singletonList(item);
}
if (completion instanceof PsiVariable) {
return Collections.singletonList(new VariableLookupItem((PsiVariable)completion).setSubstitutor(substitutor));
}
if (completion instanceof PsiPackage) {
return Collections.singletonList(new PackageLookupItem((PsiPackage)completion, reference.getElement()));
}
return Collections.singletonList(LookupItemUtil.objectToLookupItem(completion));
}
public static boolean hasAccessibleConstructor(@NotNull PsiType type, @NotNull PsiElement place) {
if (type instanceof PsiArrayType) return true;
final PsiClass psiClass = PsiUtil.resolveClassInType(type);
if (psiClass == null || psiClass.isEnum() || psiClass.isAnnotationType()) return false;
PsiMethod[] methods = psiClass.getConstructors();
return methods.length == 0 || Arrays.stream(methods).anyMatch(constructor -> isConstructorCompletable(constructor, place));
}
private static boolean isConstructorCompletable(@NotNull PsiMethod constructor, @NotNull PsiElement place) {
if (!(constructor instanceof PsiCompiledElement)) return true; // it's possible to use a quick fix to make accessible after completion
if (constructor.hasModifierProperty(PsiModifier.PRIVATE)) return false;
if (constructor.hasModifierProperty(PsiModifier.PACKAGE_LOCAL)) return PsiUtil.isAccessible(constructor, place, null);
return true;
}
public static LinkedHashSet<String> getAllLookupStrings(@NotNull PsiMember member) {
LinkedHashSet<String> allLookupStrings = new LinkedHashSet<>();
String name = member.getName();
allLookupStrings.add(name);
PsiClass containingClass = member.getContainingClass();
while (containingClass != null) {
final String className = containingClass.getName();
if (className == null) {
break;
}
name = className + "." + name;
allLookupStrings.add(name);
final PsiElement parent = containingClass.getParent();
if (!(parent instanceof PsiClass)) {
break;
}
containingClass = (PsiClass)parent;
}
return allLookupStrings;
}
public static boolean mayHaveSideEffects(@Nullable final PsiElement element) {
return element instanceof PsiExpression && SideEffectChecker.mayHaveSideEffects((PsiExpression)element);
}
public static void insertClassReference(@NotNull PsiClass psiClass, @NotNull PsiFile file, int offset) {
insertClassReference(psiClass, file, offset, offset);
}
public static int insertClassReference(PsiClass psiClass, PsiFile file, int startOffset, int endOffset) {
final Project project = file.getProject();
PsiDocumentManager documentManager = PsiDocumentManager.getInstance(project);
documentManager.commitAllDocuments();
final PsiManager manager = file.getManager();
final Document document = FileDocumentManager.getInstance().getDocument(file.getViewProvider().getVirtualFile());
PsiReference reference = file.findReferenceAt(startOffset);
if (reference != null && manager.areElementsEquivalent(psiClass, reference.resolve())) {
return endOffset;
}
String name = psiClass.getName();
if (name == null) {
return endOffset;
}
if (reference != null && !psiClass.hasModifierProperty(PsiModifier.STATIC)) {
PsiClass containingClass = psiClass.getContainingClass();
if (containingClass != null && containingClass.hasTypeParameters()) {
PsiModifierListOwner enclosingStaticElement = PsiUtil.getEnclosingStaticElement(reference.getElement(), null);
if (enclosingStaticElement != null && !PsiTreeUtil.isAncestor(enclosingStaticElement, psiClass, false)) {
return endOffset;
}
}
}
assert document != null;
document.replaceString(startOffset, endOffset, name);
int newEndOffset = startOffset + name.length();
final RangeMarker toDelete = insertTemporary(newEndOffset, document, " ");
documentManager.commitAllDocuments();
PsiElement element = file.findElementAt(startOffset);
if (element instanceof PsiIdentifier) {
PsiElement parent = element.getParent();
if (parent instanceof PsiJavaCodeReferenceElement &&
!((PsiJavaCodeReferenceElement)parent).isQualified() &&
!(parent.getParent() instanceof PsiPackageStatement)) {
PsiJavaCodeReferenceElement ref = (PsiJavaCodeReferenceElement)parent;
if (psiClass.isValid() && !psiClass.getManager().areElementsEquivalent(psiClass, resolveReference(ref))) {
final boolean staticImport = ref instanceof PsiImportStaticReferenceElement;
PsiElement newElement;
try {
newElement = staticImport
? ((PsiImportStaticReferenceElement)ref).bindToTargetClass(psiClass)
: ref.bindToElement(psiClass);
}
catch (IncorrectOperationException e) {
return endOffset; // can happen if fqn contains reserved words, for example
}
final RangeMarker rangeMarker = document.createRangeMarker(newElement.getTextRange());
documentManager.doPostponedOperationsAndUnblockDocument(document);
documentManager.commitDocument(document);
newElement = CodeInsightUtilCore.findElementInRange(file, rangeMarker.getStartOffset(), rangeMarker.getEndOffset(),
PsiJavaCodeReferenceElement.class,
JavaLanguage.INSTANCE);
rangeMarker.dispose();
if (newElement != null) {
newEndOffset = newElement.getTextRange().getEndOffset();
if (!(newElement instanceof PsiReferenceExpression)) {
PsiReferenceParameterList parameterList = ((PsiJavaCodeReferenceElement)newElement).getParameterList();
if (parameterList != null) {
newEndOffset = parameterList.getTextRange().getStartOffset();
}
}
if (!staticImport &&
!psiClass.getManager().areElementsEquivalent(psiClass, resolveReference((PsiReference)newElement)) &&
!PsiUtil.isInnerClass(psiClass)) {
final String qName = psiClass.getQualifiedName();
if (qName != null) {
document.replaceString(newElement.getTextRange().getStartOffset(), newEndOffset, qName);
newEndOffset = newElement.getTextRange().getStartOffset() + qName.length();
}
}
}
}
}
}
if (toDelete != null && toDelete.isValid()) {
document.deleteString(toDelete.getStartOffset(), toDelete.getEndOffset());
}
return newEndOffset;
}
@Nullable
static PsiElement resolveReference(final PsiReference psiReference) {
if (psiReference instanceof PsiPolyVariantReference) {
final ResolveResult[] results = ((PsiPolyVariantReference)psiReference).multiResolve(true);
if (results.length == 1) return results[0].getElement();
}
return psiReference.resolve();
}
@Nullable
public static RangeMarker insertTemporary(int endOffset, Document document, String temporary) {
final CharSequence chars = document.getCharsSequence();
if (endOffset < chars.length() && Character.isJavaIdentifierPart(chars.charAt(endOffset))){
document.insertString(endOffset, temporary);
RangeMarker toDelete = document.createRangeMarker(endOffset, endOffset + 1);
toDelete.setGreedyToLeft(true);
toDelete.setGreedyToRight(true);
return toDelete;
}
return null;
}
public static void insertParentheses(@NotNull InsertionContext context,
@NotNull LookupElement item,
boolean overloadsMatter,
boolean hasParams) {
insertParentheses(context, item, overloadsMatter, hasParams, false);
}
public static void insertParentheses(@NotNull InsertionContext context,
@NotNull LookupElement item,
boolean overloadsMatter,
boolean hasParams,
final boolean forceClosingParenthesis) {
final Editor editor = context.getEditor();
final char completionChar = context.getCompletionChar();
final PsiFile file = context.getFile();
final TailType tailType = completionChar == '(' ? TailType.NONE :
completionChar == ':' ? TailType.COND_EXPR_COLON :
LookupItem.handleCompletionChar(context.getEditor(), item, completionChar);
final boolean hasTail = tailType != TailType.NONE && tailType != TailType.UNKNOWN;
final boolean smart = completionChar == Lookup.COMPLETE_STATEMENT_SELECT_CHAR;
if (completionChar == '(' || completionChar == '.' || completionChar == ',' || completionChar == ';' || completionChar == ':' || completionChar == ' ') {
context.setAddCompletionChar(false);
}
if (hasTail) {
hasParams = false;
}
final boolean needRightParenth = forceClosingParenthesis ||
!smart && (CodeInsightSettings.getInstance().AUTOINSERT_PAIR_BRACKET ||
!hasParams && completionChar != '(');
context.commitDocument();
final CommonCodeStyleSettings styleSettings = context.getCodeStyleSettings();
final PsiElement elementAt = file.findElementAt(context.getStartOffset());
if (elementAt == null || !(elementAt.getParent() instanceof PsiMethodReferenceExpression)) {
final boolean hasParameters = hasParams;
final boolean spaceBetweenParentheses = styleSettings.SPACE_WITHIN_METHOD_CALL_PARENTHESES && hasParams;
new ParenthesesInsertHandler<LookupElement>(styleSettings.SPACE_BEFORE_METHOD_CALL_PARENTHESES, spaceBetweenParentheses,
needRightParenth, styleSettings.METHOD_PARAMETERS_LPAREN_ON_NEXT_LINE) {
@Override
protected boolean placeCaretInsideParentheses(InsertionContext context1, LookupElement item1) {
return hasParameters;
}
@Override
protected PsiElement findExistingLeftParenthesis(@NotNull InsertionContext context) {
PsiElement token = super.findExistingLeftParenthesis(context);
return isPartOfLambda(token) ? null : token;
}
private boolean isPartOfLambda(PsiElement token) {
return token != null && token.getParent() instanceof PsiExpressionList &&
PsiUtilCore.getElementType(PsiTreeUtil.nextVisibleLeaf(token.getParent())) == JavaTokenType.ARROW;
}
}.handleInsert(context, item);
}
if (hasParams) {
// Invoke parameters popup
AutoPopupController.getInstance(file.getProject()).autoPopupParameterInfo(editor, overloadsMatter ? null : (PsiElement)item.getObject());
}
if (smart || !needRightParenth || !insertTail(context, item, tailType, hasTail)) {
return;
}
if (completionChar == '.') {
AutoPopupController.getInstance(file.getProject()).autoPopupMemberLookup(context.getEditor(), null);
} else if (completionChar == ',') {
AutoPopupController.getInstance(file.getProject()).autoPopupParameterInfo(context.getEditor(), null);
}
}
public static boolean insertTail(InsertionContext context, LookupElement item, TailType tailType, boolean hasTail) {
TailType toInsert = tailType;
LookupItem<?> lookupItem = item.as(LookupItem.CLASS_CONDITION_KEY);
if (lookupItem == null || lookupItem.getAttribute(LookupItem.TAIL_TYPE_ATTR) != TailType.UNKNOWN) {
if (!hasTail && item.getObject() instanceof PsiMethod && PsiType.VOID.equals(((PsiMethod)item.getObject()).getReturnType())) {
PsiDocumentManager.getInstance(context.getProject()).commitAllDocuments();
if (psiElement().beforeLeaf(psiElement().withText(".")).accepts(context.getFile().findElementAt(context.getTailOffset() - 1))) {
return false;
}
boolean insertAdditionalSemicolon = true;
PsiElement leaf = context.getFile().findElementAt(context.getStartOffset());
PsiElement composite = leaf == null ? null : leaf.getParent();
if (composite instanceof PsiMethodReferenceExpression && LambdaHighlightingUtil.insertSemicolon(composite.getParent())) {
insertAdditionalSemicolon = false;
}
else if (composite instanceof PsiReferenceExpression) {
PsiElement parent = composite.getParent();
if (parent instanceof PsiMethodCallExpression) {
parent = parent.getParent();
}
if (parent instanceof PsiLambdaExpression && !LambdaHighlightingUtil.insertSemicolonAfter((PsiLambdaExpression)parent)) {
insertAdditionalSemicolon = false;
}
if (parent instanceof PsiMethodReferenceExpression && LambdaHighlightingUtil.insertSemicolon(parent.getParent())) {
insertAdditionalSemicolon = false;
}
}
if (insertAdditionalSemicolon) {
toInsert = TailType.SEMICOLON;
}
}
}
Editor editor = context.getEditor();
int tailOffset = context.getTailOffset();
int afterTailOffset = toInsert.processTail(editor, tailOffset);
int caretOffset = editor.getCaretModel().getOffset();
if (afterTailOffset > tailOffset &&
tailOffset > caretOffset &&
TabOutScopesTracker.getInstance().removeScopeEndingAt(editor, caretOffset) > 0) {
TabOutScopesTracker.getInstance().registerEmptyScope(editor, caretOffset, afterTailOffset);
}
return true;
}
//need to shorten references in type argument list
public static void shortenReference(final PsiFile file, final int offset) throws IncorrectOperationException {
Project project = file.getProject();
final PsiDocumentManager manager = PsiDocumentManager.getInstance(project);
Document document = manager.getDocument(file);
if (document == null) {
PsiUtilCore.ensureValid(file);
LOG.error("No document for " + file);
return;
}
manager.commitDocument(document);
PsiReference ref = file.findReferenceAt(offset);
if (ref != null) {
JavaCodeStyleManager.getInstance(project).shortenClassReferences(ref.getElement());
PsiDocumentManager.getInstance(project).doPostponedOperationsAndUnblockDocument(document);
}
}
public static boolean inSomePackage(PsiElement context) {
PsiFile contextFile = context.getContainingFile();
return contextFile instanceof PsiClassOwner && StringUtil.isNotEmpty(((PsiClassOwner)contextFile).getPackageName());
}
public static boolean isSourceLevelAccessible(PsiElement context, PsiClass psiClass, final boolean pkgContext) {
if (!JavaPsiFacade.getInstance(psiClass.getProject()).getResolveHelper().isAccessible(psiClass, context, null)) {
return false;
}
if (pkgContext) {
PsiClass topLevel = PsiUtil.getTopLevelClass(psiClass);
if (topLevel != null) {
String fqName = topLevel.getQualifiedName();
if (fqName != null && StringUtil.isEmpty(StringUtil.getPackageName(fqName))) {
return false;
}
}
}
return true;
}
public static boolean promptTypeArgs(InsertionContext context, int offset) {
if (offset < 0) {
return false;
}
OffsetKey key = context.trackOffset(offset, false);
PostprocessReformattingAspect.getInstance(context.getProject()).doPostponedFormatting();
offset = context.getOffset(key);
if (offset < 0) {
return false;
}
String open = escapeXmlIfNeeded(context, "<");
context.getDocument().insertString(offset, open);
context.getEditor().getCaretModel().moveToOffset(offset + open.length());
if (CodeInsightSettings.getInstance().AUTOINSERT_PAIR_BRACKET) {
context.getDocument().insertString(offset + open.length(), escapeXmlIfNeeded(context, ">"));
}
if (context.getCompletionChar() != Lookup.COMPLETE_STATEMENT_SELECT_CHAR) {
context.setAddCompletionChar(false);
}
return true;
}
public static FakePsiElement createContextWithXxxVariable(@NotNull PsiElement place, @NotNull PsiType varType) {
return new FakePsiElement() {
@Override
public boolean processDeclarations(@NotNull PsiScopeProcessor processor,
@NotNull ResolveState state,
PsiElement lastParent,
@NotNull PsiElement place) {
return processor.execute(new LightVariableBuilder("xxx", varType, place), ResolveState.initial());
}
@Override
public PsiElement getParent() {
return place;
}
};
}
@NotNull
public static String escapeXmlIfNeeded(InsertionContext context, @NotNull String generics) {
if (context.getFile().getViewProvider().getBaseLanguage() == StdLanguages.JSPX) {
return StringUtil.escapeXmlEntities(generics);
}
return generics;
}
public static boolean isEffectivelyDeprecated(PsiDocCommentOwner member) {
if (member.isDeprecated()) {
return true;
}
PsiClass aClass = member.getContainingClass();
while (aClass != null) {
if (aClass.isDeprecated()) {
return true;
}
aClass = aClass.getContainingClass();
}
return false;
}
public static int findQualifiedNameStart(@NotNull InsertionContext context) {
int start = context.getTailOffset() - 1;
while (start >= 0) {
char ch = context.getDocument().getCharsSequence().charAt(start);
if (!Character.isJavaIdentifierPart(ch) && ch != '.') break;
start--;
}
return start + 1;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution;
import java.io.IOException;
import scala.collection.Iterator;
import scala.math.Ordering;
import com.google.common.annotations.VisibleForTesting;
import org.apache.spark.SparkEnv;
import org.apache.spark.TaskContext;
import org.apache.spark.sql.catalyst.util.AbstractScalaRowIterator;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.expressions.UnsafeProjection;
import org.apache.spark.sql.catalyst.expressions.UnsafeRow;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.unsafe.Platform;
import org.apache.spark.util.collection.unsafe.sort.PrefixComparator;
import org.apache.spark.util.collection.unsafe.sort.RecordComparator;
import org.apache.spark.util.collection.unsafe.sort.UnsafeExternalSorter;
import org.apache.spark.util.collection.unsafe.sort.UnsafeSorterIterator;
final class UnsafeExternalRowSorter {
/**
* If positive, forces records to be spilled to disk at the given frequency (measured in numbers
* of records). This is only intended to be used in tests.
*/
private int testSpillFrequency = 0;
private long numRowsInserted = 0;
private final StructType schema;
private final PrefixComputer prefixComputer;
private final UnsafeExternalSorter sorter;
public static abstract class PrefixComputer {
abstract long computePrefix(InternalRow row);
}
public UnsafeExternalRowSorter(
StructType schema,
Ordering<InternalRow> ordering,
PrefixComparator prefixComparator,
PrefixComputer prefixComputer,
long pageSizeBytes) throws IOException {
this.schema = schema;
this.prefixComputer = prefixComputer;
final SparkEnv sparkEnv = SparkEnv.get();
final TaskContext taskContext = TaskContext.get();
sorter = UnsafeExternalSorter.create(
taskContext.taskMemoryManager(),
sparkEnv.blockManager(),
taskContext,
new RowComparator(ordering, schema.length()),
prefixComparator,
/* initialSize */ 4096,
pageSizeBytes
);
}
/**
* Forces spills to occur every `frequency` records. Only for use in tests.
*/
@VisibleForTesting
void setTestSpillFrequency(int frequency) {
assert frequency > 0 : "Frequency must be positive";
testSpillFrequency = frequency;
}
@VisibleForTesting
void insertRow(UnsafeRow row) throws IOException {
final long prefix = prefixComputer.computePrefix(row);
sorter.insertRecord(
row.getBaseObject(),
row.getBaseOffset(),
row.getSizeInBytes(),
prefix
);
numRowsInserted++;
if (testSpillFrequency > 0 && (numRowsInserted % testSpillFrequency) == 0) {
sorter.spill();
}
}
/**
* Return the peak memory used so far, in bytes.
*/
public long getPeakMemoryUsage() {
return sorter.getPeakMemoryUsedBytes();
}
private void cleanupResources() {
sorter.cleanupResources();
}
@VisibleForTesting
Iterator<UnsafeRow> sort() throws IOException {
try {
final UnsafeSorterIterator sortedIterator = sorter.getSortedIterator();
if (!sortedIterator.hasNext()) {
// Since we won't ever call next() on an empty iterator, we need to clean up resources
// here in order to prevent memory leaks.
cleanupResources();
}
return new AbstractScalaRowIterator<UnsafeRow>() {
private final int numFields = schema.length();
private UnsafeRow row = new UnsafeRow();
@Override
public boolean hasNext() {
return sortedIterator.hasNext();
}
@Override
public UnsafeRow next() {
try {
sortedIterator.loadNext();
row.pointTo(
sortedIterator.getBaseObject(),
sortedIterator.getBaseOffset(),
numFields,
sortedIterator.getRecordLength());
if (!hasNext()) {
UnsafeRow copy = row.copy(); // so that we don't have dangling pointers to freed page
row = null; // so that we don't keep references to the base object
cleanupResources();
return copy;
} else {
return row;
}
} catch (IOException e) {
cleanupResources();
// Scala iterators don't declare any checked exceptions, so we need to use this hack
// to re-throw the exception:
Platform.throwException(e);
}
throw new RuntimeException("Exception should have been re-thrown in next()");
};
};
} catch (IOException e) {
cleanupResources();
throw e;
}
}
public Iterator<UnsafeRow> sort(Iterator<UnsafeRow> inputIterator) throws IOException {
while (inputIterator.hasNext()) {
insertRow(inputIterator.next());
}
return sort();
}
/**
* Return true if UnsafeExternalRowSorter can sort rows with the given schema, false otherwise.
*/
public static boolean supportsSchema(StructType schema) {
return UnsafeProjection.canSupport(schema);
}
private static final class RowComparator extends RecordComparator {
private final Ordering<InternalRow> ordering;
private final int numFields;
private final UnsafeRow row1 = new UnsafeRow();
private final UnsafeRow row2 = new UnsafeRow();
public RowComparator(Ordering<InternalRow> ordering, int numFields) {
this.numFields = numFields;
this.ordering = ordering;
}
@Override
public int compare(Object baseObj1, long baseOff1, Object baseObj2, long baseOff2) {
// TODO: Why are the sizes -1?
row1.pointTo(baseObj1, baseOff1, numFields, -1);
row2.pointTo(baseObj2, baseOff2, numFields, -1);
return ordering.compare(row1, row2);
}
}
}
|
/**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dyno;
import com.google.inject.Singleton;
import com.netflix.conductor.core.config.Configuration;
import com.netflix.discovery.DiscoveryClient;
import com.netflix.dyno.connectionpool.exception.DynoException;
import com.netflix.dyno.jedis.DynoJedisClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import javax.inject.Inject;
import redis.clients.jedis.JedisCommands;
import redis.clients.jedis.ScanParams;
import redis.clients.jedis.ScanResult;
import redis.clients.jedis.Tuple;
import redis.clients.jedis.params.sortedset.ZAddParams;
/**
*
* @author Viren Proxy for the Dynomite client
*/
@Singleton
public class DynoProxy {
private static Logger logger = LoggerFactory.getLogger(DynoProxy.class);
protected DiscoveryClient dc;
protected JedisCommands dynoClient;
@Inject
public DynoProxy(JedisCommands dynoClient) {
this.dynoClient = dynoClient;
}
@Deprecated
/**
* @deprecated The preferred method of construction for this use case is via DynoProxyDiscoveryProvider.
*/
public DynoProxy(DiscoveryClient dc, Configuration config) throws DynoException, InterruptedException, ExecutionException {
this.dc = dc;
String cluster = config.getProperty("workflow.dynomite.cluster", null);
String applicationName = config.getAppId();
this.dynoClient = new DynoJedisClient.Builder()
.withApplicationName(applicationName)
.withDynomiteClusterName(cluster)
.withDiscoveryClient(dc)
.build();
}
public Set<String> zrange(String key, long start, long end) {
return dynoClient.zrange(key, start, end);
}
public Set<Tuple> zrangeByScoreWithScores(String key, double maxScore, int count) {
return dynoClient.zrangeByScoreWithScores(key, 0, maxScore, 0, count);
}
public Set<String> zrangeByScore(String key, double maxScore, int count) {
return dynoClient.zrangeByScore(key, 0, maxScore, 0, count);
}
public Set<String> zrangeByScore(String key, double minScore, double maxScore, int count) {
return dynoClient.zrangeByScore(key, minScore, maxScore, 0, count);
}
public ScanResult<Tuple> zscan(String key, int cursor) {
return dynoClient.zscan(key, "" + cursor);
}
public String get(String key) {
return dynoClient.get(key);
}
public Long zcard(String key) {
return dynoClient.zcard(key);
}
public Long del(String key) {
return dynoClient.del(key);
}
public Long zrem(String key, String member) {
return dynoClient.zrem(key, member);
}
public String set(String key, String value) {
String retVal = dynoClient.set(key, value);
return retVal;
}
public Long setnx(String key, String value) {
Long added = dynoClient.setnx(key, value);
return added;
}
public Long zadd(String key, double score, String member) {
Long retVal = dynoClient.zadd(key, score, member);
return retVal;
}
public Long zaddnx(String key, double score, String member) {
ZAddParams params = ZAddParams.zAddParams().nx();
Long retVal = dynoClient.zadd(key, score, member, params);
return retVal;
}
public Long hset(String key, String field, String value) {
Long retVal = dynoClient.hset(key, field, value);
return retVal;
}
public Long hsetnx(String key, String field, String value) {
Long retVal = dynoClient.hsetnx(key, field, value);
return retVal;
}
public Long hlen(String key) {
Long retVal = dynoClient.hlen(key);
return retVal;
}
public String hget(String key, String field) {
return dynoClient.hget(key, field);
}
public Optional<String> optionalHget(String key, String field) {
return Optional.ofNullable(dynoClient.hget(key, field));
}
public Map<String, String> hscan(String key, int count) {
Map<String, String> m = new HashMap<>();
int cursor = 0;
do {
ScanResult<Entry<String, String>> sr = dynoClient.hscan(key, "" + cursor);
cursor = Integer.parseInt(sr.getStringCursor());
for (Entry<String, String> r : sr.getResult()) {
m.put(r.getKey(), r.getValue());
}
if (m.size() > count) {
break;
}
} while (cursor > 0);
return m;
}
public Map<String, String> hgetAll(String key) {
Map<String, String> m = new HashMap<>();
JedisCommands dyno = dynoClient;
int cursor = 0;
do {
ScanResult<Entry<String, String>> sr = dyno.hscan(key, "" + cursor);
cursor = Integer.parseInt(sr.getStringCursor());
for (Entry<String, String> r : sr.getResult()) {
m.put(r.getKey(), r.getValue());
}
} while (cursor > 0);
return m;
}
public List<String> hvals(String key) {
logger.trace("hvals {}", key);
return dynoClient.hvals(key);
}
public Set<String> hkeys(String key) {
logger.trace("hkeys {}", key);
JedisCommands client = dynoClient;
Set<String> keys = new HashSet<>();
int cursor = 0;
do {
ScanResult<Entry<String, String>> sr = client.hscan(key, "" + cursor);
cursor = Integer.parseInt(sr.getStringCursor());
List<Entry<String, String>> result = sr.getResult();
for (Entry<String, String> e : result) {
keys.add(e.getKey());
}
} while (cursor > 0);
return keys;
}
public Long hdel(String key, String... fields) {
logger.trace("hdel {} {}", key, fields[0]);
return dynoClient.hdel(key, fields);
}
public Long expire(String key, int seconds) {
return dynoClient.expire(key, seconds);
}
public Boolean hexists(String key, String field) {
return dynoClient.hexists(key, field);
}
public Long sadd(String key, String value) {
logger.trace("sadd {} {}", key, value);
Long retVal = dynoClient.sadd(key, value);
return retVal;
}
public Long srem(String key, String member) {
logger.trace("srem {} {}", key, member);
Long retVal = dynoClient.srem(key, member);
return retVal;
}
public boolean sismember(String key, String member) {
return dynoClient.sismember(key, member);
}
public Set<String> smembers(String key) {
logger.trace("smembers {}", key);
JedisCommands client = dynoClient;
Set<String> r = new HashSet<>();
int cursor = 0;
ScanParams sp = new ScanParams();
sp.count(50);
do {
ScanResult<String> sr = client.sscan(key, "" + cursor, sp);
cursor = Integer.parseInt(sr.getStringCursor());
r.addAll(sr.getResult());
} while (cursor > 0);
return r;
}
public Long scard(String key) {
return dynoClient.scard(key);
}
}
|
package com.baeldung.spring.spel.examples;
import com.baeldung.spring.spel.entity.Car;
import com.baeldung.spring.spel.entity.CarPark;
import org.springframework.expression.ExpressionParser;
import org.springframework.expression.spel.SpelParserConfiguration;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.expression.spel.support.StandardEvaluationContext;
public class SpelParser {
public static void main(String[] args) {
Car car = new Car();
car.setMake("Good manufacturer");
car.setModel("Model 3");
car.setYearOfProduction(2014);
CarPark carPark = new CarPark();
SpelParserConfiguration config = new SpelParserConfiguration(true, true);
StandardEvaluationContext context = new StandardEvaluationContext(carPark);
ExpressionParser expressionParser = new SpelExpressionParser(config);
expressionParser.parseExpression("cars[0]").setValue(context, car);
Car result = carPark.getCars().get(0);
System.out.println(result);
}
}
|
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.errorreporting.v1beta1.stub;
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.devtools.clouderrorreporting.v1beta1.ErrorGroup;
import com.google.devtools.clouderrorreporting.v1beta1.GetGroupRequest;
import com.google.devtools.clouderrorreporting.v1beta1.UpdateGroupRequest;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
import org.threeten.bp.Duration;
// AUTO-GENERATED DOCUMENTATION AND CLASS
/**
* Settings class to configure an instance of {@link ErrorGroupServiceStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (clouderrorreporting.googleapis.com) and default port (443) are
* used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of getGroup to 30 seconds:
*
* <pre>
* <code>
* ErrorGroupServiceStubSettings.Builder errorGroupServiceSettingsBuilder =
* ErrorGroupServiceStubSettings.newBuilder();
* errorGroupServiceSettingsBuilder.getGroupSettings().getRetrySettings().toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30));
* ErrorGroupServiceStubSettings errorGroupServiceSettings = errorGroupServiceSettingsBuilder.build();
* </code>
* </pre>
*/
@Generated("by gapic-generator")
@BetaApi
public class ErrorGroupServiceStubSettings extends StubSettings<ErrorGroupServiceStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final UnaryCallSettings<GetGroupRequest, ErrorGroup> getGroupSettings;
private final UnaryCallSettings<UpdateGroupRequest, ErrorGroup> updateGroupSettings;
/** Returns the object with the settings used for calls to getGroup. */
public UnaryCallSettings<GetGroupRequest, ErrorGroup> getGroupSettings() {
return getGroupSettings;
}
/** Returns the object with the settings used for calls to updateGroup. */
public UnaryCallSettings<UpdateGroupRequest, ErrorGroup> updateGroupSettings() {
return updateGroupSettings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public ErrorGroupServiceStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcErrorGroupServiceStub.create(this);
} else {
throw new UnsupportedOperationException(
"Transport not supported: " + getTransportChannelProvider().getTransportName());
}
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return "clouderrorreporting.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder().setScopesToApply(DEFAULT_SERVICE_SCOPES);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(ErrorGroupServiceStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected ErrorGroupServiceStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
getGroupSettings = settingsBuilder.getGroupSettings().build();
updateGroupSettings = settingsBuilder.updateGroupSettings().build();
}
/** Builder for ErrorGroupServiceStubSettings. */
public static class Builder extends StubSettings.Builder<ErrorGroupServiceStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<GetGroupRequest, ErrorGroup> getGroupSettings;
private final UnaryCallSettings.Builder<UpdateGroupRequest, ErrorGroup> updateGroupSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"idempotent",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE)));
definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelay(Duration.ofMillis(60000L))
.setInitialRpcTimeout(Duration.ofMillis(20000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(20000L))
.setTotalTimeout(Duration.ofMillis(600000L))
.build();
definitions.put("default", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this((ClientContext) null);
}
protected Builder(ClientContext clientContext) {
super(clientContext);
getGroupSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateGroupSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(getGroupSettings, updateGroupSettings);
initDefaults(this);
}
private static Builder createDefault() {
Builder builder = new Builder((ClientContext) null);
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setEndpoint(getDefaultEndpoint());
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.getGroupSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.updateGroupSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
return builder;
}
protected Builder(ErrorGroupServiceStubSettings settings) {
super(settings);
getGroupSettings = settings.getGroupSettings.toBuilder();
updateGroupSettings = settings.updateGroupSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(getGroupSettings, updateGroupSettings);
}
// NEXT_MAJOR_VER: remove 'throws Exception'
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) throws Exception {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to getGroup. */
public UnaryCallSettings.Builder<GetGroupRequest, ErrorGroup> getGroupSettings() {
return getGroupSettings;
}
/** Returns the builder for the settings used for calls to updateGroup. */
public UnaryCallSettings.Builder<UpdateGroupRequest, ErrorGroup> updateGroupSettings() {
return updateGroupSettings;
}
@Override
public ErrorGroupServiceStubSettings build() throws IOException {
return new ErrorGroupServiceStubSettings(this);
}
}
}
|
/**
This file is part of a jTEM project.
All jTEM projects are licensed under the FreeBSD license
or 2-clause BSD license (see http://www.opensource.org/licenses/bsd-license.php).
Copyright (c) 2002-2010, Technische Universität Berlin, jTEM
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
OF SUCH DAMAGE.
**/
package de.jtem.halfedgetools.algorithm.adaptivesubdivision.interpolators;
import de.jtem.halfedgetools.algorithm.adaptivesubdivision.util.Calculator;
import de.jtem.halfedgetools.jreality.node.JREdge;
import de.jtem.halfedgetools.jreality.node.JRFace;
import de.jtem.halfedgetools.jreality.node.JRVertex;
import de.jtem.halfedgetools.symmetry.decoration.HasLengthSquared;
/** Linearen Interpolation zur Bestimmung von
* Koordinaten neuer Vertice auf der Kantenmitte.
* @author Bernd Gonska
*/
public class LinearEdgeSubdivAlg extends Interpolator{
@Override
/** Lineare Interpolation setzt dem gegebenen Vertex
* die Koordinaten von der Mitte der Kante.
*/
public <
V extends JRVertex<V,E,F>,
E extends JREdge<V,E,F> & HasLengthSquared,
F extends JRFace<V,E,F>
>void interpolate(E e,V v) {
v.position=Calculator.linearCombination(
.5, e.getTargetVertex().position
,.5,e.getStartVertex().position);
}
}
|
/**
*
*/
package com.innovanon.simon.util.stream;
import java.util.Collection;
import java.util.Optional;
import java.util.function.Supplier;
/**
* @author gouldbergstein
*
*/
public class ErrorEatingSupplier<T> extends ErrorEater<T, Throwable> implements Supplier<Optional<T>> {
private Supplier<T> supplier;
/**
* @param exceptionClassesToCatch
* @param rememberErrors
*/
public ErrorEatingSupplier(Collection<Class<? extends Throwable>> exceptionClassesToCatch, boolean rememberErrors,
Supplier<T> supplier) {
super(exceptionClassesToCatch, rememberErrors);
this.supplier = supplier;
}
public Optional<T> get() {
return super.apply(supplier::get, x -> x);
}
}
|
package net.loganford.noideaengine.shape.intersectionHandlers;
import net.loganford.noideaengine.shape.Circle;
import net.loganford.noideaengine.shape.Point;
import net.loganford.noideaengine.utils.math.MathUtils;
public class PointCircleIntersectionHandler implements IntersectionHandler<Point, Circle> {
@Override
public boolean intersects(Point point, Circle circle) {
return MathUtils.distanceSqr(circle.getX(), circle.getY(), point.getX(), point.getY()) <=
(circle.getRadius()) * (circle.getRadius());
}
}
|
package systems.reformcloud.reformcloud2.signs.application.packets.in;
import systems.reformcloud.reformcloud2.executor.api.common.network.channel.PacketSender;
import systems.reformcloud.reformcloud2.executor.api.common.network.channel.handler.NetworkHandler;
import systems.reformcloud.reformcloud2.executor.api.common.network.channel.manager.DefaultChannelManager;
import systems.reformcloud.reformcloud2.executor.api.common.network.packet.Packet;
import systems.reformcloud.reformcloud2.signs.application.ReformCloudApplication;
import systems.reformcloud.reformcloud2.signs.application.packets.out.PacketOutDeleteSign;
import systems.reformcloud.reformcloud2.signs.packets.PacketUtil;
import systems.reformcloud.reformcloud2.signs.util.sign.CloudSign;
import javax.annotation.Nonnull;
import java.util.function.Consumer;
public class PacketInDeleteSign implements NetworkHandler {
@Override
public int getHandlingPacketID() {
return PacketUtil.SIGN_BUS + 1;
}
@Override
public void handlePacket(@Nonnull PacketSender packetSender, @Nonnull Packet packet, @Nonnull Consumer<Packet> responses) {
CloudSign sign = packet.content().get("sign", CloudSign.TYPE);
if (sign == null) {
return;
}
ReformCloudApplication.delete(sign);
DefaultChannelManager.INSTANCE.getAllSender().forEach(e -> e.sendPacket(new PacketOutDeleteSign(sign)));
}
}
|
package in.handyman.raven.lib;
import com.opencsv.CSVWriter;
import com.opencsv.ResultSetHelperService;
import com.zaxxer.hikari.HikariDataSource;
import in.handyman.raven.exception.HandymanException;
import in.handyman.raven.lambda.access.ResourceAccess;
import in.handyman.raven.lambda.action.ActionExecution;
import in.handyman.raven.lambda.action.IActionExecution;
import in.handyman.raven.lambda.doa.audit.ActionExecutionAudit;
import in.handyman.raven.lib.model.ExportCsv;
import in.handyman.raven.util.ExceptionUtil;
import org.apache.commons.text.StringEscapeUtils;
import org.jdbi.v3.core.Jdbi;
import org.slf4j.Logger;
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
/**
* Auto Generated By Raven
*/
@ActionExecution(
actionName = ExportCsvAction.EXPORT_CSV
)
public class ExportCsvAction implements IActionExecution {
protected static final String EXPORT_CSV = "ExportCsv";
private final ActionExecutionAudit actionExecutionAudit;
private final Logger log;
private final ExportCsv exportCsv;
private final Marker aMarker;
public ExportCsvAction(final ActionExecutionAudit actionExecutionAudit, final Logger log, final Object exportCsv) {
this.exportCsv = (ExportCsv) exportCsv;
this.actionExecutionAudit = actionExecutionAudit;
this.log = log;
this.aMarker = MarkerFactory.getMarker(" ExportCsv:" + this.exportCsv.getName());
}
@Override
public void execute() throws Exception {
var dbSrc = exportCsv.getSource();
var executionSource = exportCsv.getExecutionSource();
var location = exportCsv.getTargetLocation();
var execStmt = StringEscapeUtils.unescapeJava(exportCsv.getStmt());
log.info(aMarker, "Starting the execution with id {} dbSrc {} execStmt {} location {} executionSource {}", actionExecutionAudit.getActionId(), dbSrc, execStmt, location, executionSource);
var sql = new HashMap<String, String>();
if (dbSrc != null) {
final HikariDataSource hikariDataSource = ResourceAccess.rdbmsConn(dbSrc);
log.info(aMarker, "Created a hikariDataSource for rdbms connection src {}", dbSrc);
try (var con = hikariDataSource.getConnection()) {
con.setAutoCommit(false);
try (var stmt = con.createStatement()) {
log.info(aMarker, "Executing sql statement {}", execStmt);
var result = stmt.executeQuery(execStmt);
while (result.next()) {
sql.put(result.getString(1), result.getString(2));
}
}
con.commit();
} catch (SQLException ex) {
log.error(aMarker, "Stopping execution, General Error executing sql for {} with for campaign {}", execStmt, ex);
log.info(aMarker, execStmt + ".exception", ExceptionUtil.toString(ex));
throw new HandymanException("Process failed", ex);
}
}
if (!sql.isEmpty()) {
getSqlExecution(sql, executionSource, location);
} else if (execStmt != null) {
getSqlExecution(Collections.singletonMap("output_" + UUID.randomUUID(), execStmt), executionSource, location);
} else {
log.error("Sql stmts are empty");
}
}
private void getSqlExecution(final Map<String, String> sql, final String executionSource, final String location) {
final int payloadSize = Optional.ofNullable(exportCsv.getPayloadSize()).map(Integer::valueOf).orElse(10000);
log.info("payload {}", payloadSize);
for (var execStmt : sql.entrySet()) {
final HikariDataSource hikariDataSource = ResourceAccess.rdbmsConn(executionSource);
log.info(aMarker, "Created a hikariDataSource for rdbms connection src {}", executionSource);
try (var con = hikariDataSource.getConnection()) {
try (var stmt = con.createStatement()) {
log.info(aMarker, "Executing sql statement {}", execStmt);
var result = stmt.executeQuery(execStmt.getValue());
result.setFetchSize(payloadSize);
performWriteCsv(result, location + execStmt.getKey() + ".csv");
}
} catch (SQLException ex) {
log.error(aMarker, "Stopping execution, General Error executing sql for {} with for campaign {}", execStmt, ex);
log.info(aMarker, execStmt.getValue() + ".exception", ExceptionUtil.toString(ex));
throw new HandymanException("Process failed", ex);
}
}
}
private void performWriteCsv(final ResultSet resultSet, final String fileName) {
var file = new File(fileName);
if (file.getParentFile().exists()) {
log.info(aMarker, "Filename {} has been built", fileName);
try (var writer = new CSVWriter(new FileWriter(fileName))) {
var resultService = new ResultSetHelperService();
resultService.setDateFormat("yyyy-MM-dd");
resultService.setDateTimeFormat("yyyy-MM-dd HH:mm:ss");
writer.setResultService(resultService);
writer.writeAll(resultSet, true);
} catch (IOException | SQLException ex) {
throw new HandymanException(ex.toString());
}
} else {
throw new HandymanException("Directory not found");
}
}
@Override
public boolean executeIf() throws Exception {
return exportCsv.getCondition();
}
}
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.compositor.layouts;
import android.content.Context;
import android.view.View;
import org.chromium.chrome.browser.compositor.TitleCache;
import org.chromium.chrome.browser.fullscreen.ChromeFullscreenManager;
import org.chromium.content.browser.ContentViewCore;
/**
* This is the minimal interface of the host view from the layout side.
* Any of these functions may be called on the GL thread.
*/
public interface LayoutManagerHost {
/**
* If set to true, the time it takes for ContentView to become ready will be
* logged to the screen.
*/
static final boolean LOG_CHROME_VIEW_SHOW_TIME = false;
/**
* Requests a refresh of the visuals.
*/
void requestRender();
/**
* @return The Android context of the host view.
*/
Context getContext();
/**
* @see View#getWidth()
* @return The width of the host view.
*/
int getWidth();
/**
* @see View#getHeight()
* @return The height of the host view.
*/
int getHeight();
/**
* @return The associated {@link LayoutRenderHost} to be used from the GL Thread.
*/
LayoutRenderHost getLayoutRenderHost();
/**
* Sets the visibility of the content overlays.
* @param show True if the content overlays should be shown.
*/
void setContentOverlayVisibility(boolean show);
/**
* @return The {@link TitleCache} to use to store title bitmaps.
*/
TitleCache getTitleCache();
/**
* @return The manager in charge of handling fullscreen changes.
*/
ChromeFullscreenManager getFullscreenManager();
/**
* Called when a new {@link ContentViewCore} has been added to the list of current visible
* {@link ContentViewCore}s. While this {@link ContentViewCore} might not be drawing its
* contents at this time, it needs to be sized appropriately.
* @param content The {@link ContentViewCore} that was added to the current list of visible
* {@link ContentViewCore}s.
*/
void onContentViewCoreAdded(ContentViewCore content);
/**
* Called when the currently visible content has been changed.
*/
void onContentChanged();
/**
* Hides the the keyboard if it was opened for the ContentView.
* @param postHideTask A task to run after the keyboard is done hiding and the view's
* layout has been updated. If the keyboard was not shown, the task will run
* immediately.
*/
void hideKeyboard(Runnable postHideTask);
}
|
package module0655_internal.a;
import java.util.logging.*;
import java.util.zip.*;
import javax.annotation.processing.*;
/**
* Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut
* labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum.
* Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.
*
* @see javax.net.ssl.ExtendedSSLSession
* @see javax.rmi.ssl.SslRMIClientSocketFactory
* @see java.awt.datatransfer.DataFlavor
*/
@SuppressWarnings("all")
public abstract class Foo0<M> implements module0655_internal.a.IFoo0<M> {
java.beans.beancontext.BeanContext f0 = null;
java.io.File f1 = null;
java.rmi.Remote f2 = null;
public M element;
public static Foo0 instance;
public static Foo0 getInstance() {
return instance;
}
public static <T> T create(java.util.List<T> input) {
return null;
}
public String getName() {
return element.toString();
}
public void setName(String string) {
return;
}
public M get() {
return element;
}
public void set(Object element) {
this.element = (M)element;
}
public M call() throws Exception {
return (M)getInstance().call();
}
}
|
package org.devgateway.ocds.persistence.mongo;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyDescription;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import java.util.ArrayList;
import java.util.List;
/**
* Record
* <p>
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder({
"ocid",
"releases",
"compiledRelease",
"versionedRelease"
})
public class Record {
/**
* Open Contracting ID
* <p>
* A unique identifier that identifies the unique Open Contracting Process. For more information see:
* http://standard.open-contracting.org/latest/en/getting_started/contracting_process/
* (Required)
*/
@JsonProperty("ocid")
@JsonPropertyDescription("A unique identifier that identifies the unique Open Contracting Process. For more "
+ "information see: http://standard.open-contracting.org/latest/en/getting_started/contracting_process/")
private String ocid;
/**
* Releases
* <p>
* An array of linking identifiers or releases
* (Required)
*/
@JsonProperty("releases")
@JsonPropertyDescription("An array of linking identifiers or releases")
private List<Release> releases = new ArrayList<Release>();
/**
* Schema for an Open Contracting Release
* <p>
*/
@JsonProperty("compiledRelease")
private Release compiledRelease;
/**
* Schema for a compiled, versioned Open Contracting Release.
* <p>
*/
@JsonProperty("versionedRelease")
private Release versionedRelease;
/**
* Open Contracting ID
* <p>
* A unique identifier that identifies the unique Open Contracting Process. For more information see:
* http://standard.open-contracting.org/latest/en/getting_started/contracting_process/
* (Required)
*/
@JsonProperty("ocid")
public String getOcid() {
return ocid;
}
/**
* Open Contracting ID
* <p>
* A unique identifier that identifies the unique Open Contracting Process. For more information see:
* http://standard.open-contracting.org/latest/en/getting_started/contracting_process/
* (Required)
*/
@JsonProperty("ocid")
public void setOcid(String ocid) {
this.ocid = ocid;
}
/**
* Releases
* <p>
* An array of linking identifiers or releases
* (Required)
*/
@JsonProperty("releases")
public List<Release> getReleases() {
return releases;
}
/**
* Releases
* <p>
* An array of linking identifiers or releases
* (Required)
* @param releases
*/
@JsonProperty("releases")
public void setReleases(List<Release> releases) {
this.releases = releases;
}
/**
* Schema for an Open Contracting Release
* <p>
*/
@JsonProperty("compiledRelease")
public Release getCompiledRelease() {
return compiledRelease;
}
/**
* Schema for an Open Contracting Release
* <p>
*/
@JsonProperty("compiledRelease")
public void setCompiledRelease(Release compiledRelease) {
this.compiledRelease = compiledRelease;
}
/**
* Schema for a compiled, versioned Open Contracting Release.
* <p>
*/
@JsonProperty("versionedRelease")
public Release getVersionedRelease() {
return versionedRelease;
}
/**
* Schema for a compiled, versioned Open Contracting Release.
* <p>
*/
@JsonProperty("versionedRelease")
public void setVersionedRelease(Release versionedRelease) {
this.versionedRelease = versionedRelease;
}
@Override
public String toString() {
return new ToStringBuilder(this).append("ocid", ocid)
.append("releases", releases)
.append("compiledRelease", compiledRelease)
.append("versionedRelease", versionedRelease)
.toString();
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(compiledRelease)
.append(versionedRelease)
.append(ocid)
.append(releases)
.toHashCode();
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (!(other instanceof Record)) {
return false;
}
Record rhs = ((Record) other);
return new EqualsBuilder().append(compiledRelease, rhs.compiledRelease)
.append(versionedRelease, rhs.versionedRelease)
.append(ocid, rhs.ocid)
.append(releases, rhs.releases)
.isEquals();
}
}
|
package net.java.jsf.extjs.support.misc;
/**
* Mark exception class with this interface
* to indicate the the error must not be reported.
*
* @author anton.baukin@gmail.com
*/
public interface HiddenError
{}
|
package org.perfectable.introspection.query;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import org.checkerframework.checker.nullness.qual.Nullable;
import static org.perfectable.introspection.SimpleReflections.getConstructor;
import static org.perfectable.introspection.SimpleReflections.getField;
import static org.perfectable.introspection.SimpleReflections.getMethod;
public final class SubjectReflection {
static final String MESSAGE_METHOD_CALLED = "Test method should not be called";
static final String MESSAGE_CONSTRUCTOR_CALLED = "Test constructor should not be called";
public static final Field STRING_FIELD =
getField(Subject.class, "stringField");
public static final Field OBJECT_FIELD =
getField(Subject.class, "objectField");
public static final Field STATIC_FIELD =
getField(Subject.class, "staticField");
public static final Field PROTECTED_NUMBER_FIELD =
getField(Subject.class, "protectedNumberField");
public static final Field NESTED_INTERFACE_FIELD =
getField(Subject.NestedInterface.class, "STATIC_FIELD");
public static final Constructor<Subject> CONSTRUCTOR_NO_ARGS =
getConstructor(Subject.class);
public static final Constructor<Subject> CONSTRUCTOR_STRING =
getConstructor(Subject.class, String.class);
public static final Constructor<Subject> CONSTRUCTOR_ANNOTATED =
getConstructor(Subject.class, Number.class);
public static final Constructor<Subject> CONSTRUCTOR_PROTECTED =
getConstructor(Subject.class, Object.class, Object.class);
public static final Method NO_RESULT_NO_ARGUMENT =
getMethod(Subject.class, "noResultNoArgument"); // SUPPRESS MultipleStringLiterals
public static final Method NO_RESULT_SINGLE_ARGUMENT =
getMethod(Subject.class, "noResultSingleArgument", Object.class);
public static final Method NO_RESULT_PRIMITIVE_ARGUMENT =
getMethod(Subject.class, "noResultPrimitiveArgument", int.class);
public static final Method NO_RESULT_STRING_ARGUMENT =
getMethod(Subject.class, "noResultStringArgument", String.class);
public static final Method NO_RESULT_DOUBLE_ARGUMENT =
getMethod(Subject.class, "noResultDoubleArgument", Object.class, Object.class);
public static final Method NO_RESULT_STRING_NUMBER_ARGUMENT =
getMethod(Subject.class, "noResultStringNumberArgument", String.class, Number.class);
public static final Method NO_RESULT_TRIPLE_ARGUMENT =
getMethod(Subject.class, "noResultTripleArgument",
Object.class, Object.class, Object.class);
public static final Method NO_RESULT_VARARGS_ARGUMENT =
getMethod(Subject.class, "noResultVarargsArgument", Object[].class);
public static final Method NO_RESULT_VARARGS_DOUBLE_ARGUMENT =
getMethod(Subject.class, "noResultVarargsDoubleArgument", Object.class, Object.class, Object[].class);
public static final Method WITH_RESULT_NO_ARGUMENT =
getMethod(Subject.class, "withResultNoArgument");
public static final Method WITH_RESULT_SINGLE_ARGUMENT =
getMethod(Subject.class, "withResultSingleArgument", Object.class);
public static final Method WITH_RESULT_DOUBLE_ARGUMENT =
getMethod(Subject.class, "withResultDoubleArgument", Object.class, Object.class);
public static final Method WITH_RESULT_TRIPLE_ARGUMENT =
getMethod(Subject.class, "withResultTripleArgument",
Object.class, Object.class, Object.class);
public static final Method WITH_RESULT_VARARGS_ARGUMENT =
getMethod(Subject.class, "withResultVarargsArgument", Object[].class);
public static final Method METHOD_PROTECTED =
getMethod(Subject.class, "methodProtected"); // SUPPRESS MultipleStringLiterals
public static final Method METHOD_PACKAGE =
getMethod(Subject.class, "methodPackage"); // SUPPRESS MultipleStringLiterals
public static final Method METHOD_PRIVATE =
getMethod(Subject.class, "methodPrivate"); // SUPPRESS MultipleStringLiterals
public static final Method ANNOTATED_METHOD =
getMethod(Subject.class, "annotatedMethod");
public static final Method TO_STRING =
getMethod(Subject.class, "toString");
@SuppressWarnings("assignment.type.incompatible")
static final Subject.Special INSTANCE_SPECIAL =
Subject.class.getAnnotation(Subject.Special.class);
@SuppressWarnings("assignment.type.incompatible")
static final Subject.OtherAnnotation INSTANCE_OTHER =
Subject.class.getAnnotation(Subject.OtherAnnotation.class);
@SuppressWarnings("assignment.type.incompatible")
static final Subject.RepetitionContainer REPETITION_CONTAINER =
Subject.class.getAnnotation(Subject.RepetitionContainer.class);
@SuppressWarnings("assignment.type.incompatible")
static final Subject.Repetition[] REPETITIONS =
Subject.class.getDeclaredAnnotationsByType(Subject.Repetition.class);
@SuppressWarnings("assignment.type.incompatible")
static final Nullable INSTANCE_NULLABLE =
SubjectReflection.ANNOTATED_METHOD.getAnnotation(Nullable.class);
private SubjectReflection() {
// utility class
}
public static final class Extension {
public static final Method NO_RESULT_NO_ARGUMENT =
getMethod(Subject.Extension.class, "noResultNoArgument"); // SUPPRESS MultipleStringLiterals
public static final Method METHOD_PROTECTED =
getMethod(Subject.Extension.class, "methodProtected"); // SUPPRESS MultipleStringLiterals
public static final Method METHOD_PACKAGE =
getMethod(Subject.Extension.class, "methodPackage"); // SUPPRESS MultipleStringLiterals
public static final Method METHOD_PRIVATE =
getMethod(Subject.Extension.class, "methodPrivate"); // SUPPRESS MultipleStringLiterals
private Extension() {
// utility class
}
}
}
|
package org.spongycastle.crypto.params;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Enumeration;
import java.util.Hashtable;
import org.spongycastle.crypto.CipherParameters;
import org.spongycastle.crypto.digests.SkeinDigest;
import org.spongycastle.crypto.digests.SkeinEngine;
import org.spongycastle.crypto.macs.SkeinMac;
import org.spongycastle.util.Integers;
/**
* Parameters for the Skein hash function - a series of byte[] strings identified by integer tags.
* <p/>
* Parameterised Skein can be used for:
* <ul>
* <li>MAC generation, by providing a {@link SkeinParameters.Builder#setKey(byte[]) key}.</li>
* <li>Randomised hashing, by providing a {@link SkeinParameters.Builder#setNonce(byte[]) nonce}.</li>
* <li>A hash function for digital signatures, associating a
* {@link SkeinParameters.Builder#setPublicKey(byte[]) public key} with the message digest.</li>
* <li>A key derivation function, by providing a
* {@link SkeinParameters.Builder#setKeyIdentifier(byte[]) key identifier}.</li>
* <li>Personalised hashing, by providing a
* {@link SkeinParameters.Builder#setPersonalisation(Date, String, String) recommended format} or
* {@link SkeinParameters.Builder#setPersonalisation(byte[]) arbitrary} personalisation string.</li>
* </ul>
*
* @see SkeinEngine
* @see SkeinDigest
* @see SkeinMac
*/
public class SkeinParameters
implements CipherParameters
{
/**
* The parameter type for a secret key, supporting MAC or KDF functions: {@value
* #PARAM_TYPE_KEY}.
*/
public static final int PARAM_TYPE_KEY = 0;
/**
* The parameter type for the Skein configuration block: {@value #PARAM_TYPE_CONFIG}.
*/
public static final int PARAM_TYPE_CONFIG = 4;
/**
* The parameter type for a personalisation string: {@value #PARAM_TYPE_PERSONALISATION}.
*/
public static final int PARAM_TYPE_PERSONALISATION = 8;
/**
* The parameter type for a public key: {@value #PARAM_TYPE_PUBLIC_KEY}.
*/
public static final int PARAM_TYPE_PUBLIC_KEY = 12;
/**
* The parameter type for a key identifier string: {@value #PARAM_TYPE_KEY_IDENTIFIER}.
*/
public static final int PARAM_TYPE_KEY_IDENTIFIER = 16;
/**
* The parameter type for a nonce: {@value #PARAM_TYPE_NONCE}.
*/
public static final int PARAM_TYPE_NONCE = 20;
/**
* The parameter type for the message: {@value #PARAM_TYPE_MESSAGE}.
*/
public static final int PARAM_TYPE_MESSAGE = 48;
/**
* The parameter type for the output transformation: {@value #PARAM_TYPE_OUTPUT}.
*/
public static final int PARAM_TYPE_OUTPUT = 63;
private Hashtable parameters;
public SkeinParameters()
{
this(new Hashtable());
}
private SkeinParameters(final Hashtable parameters)
{
this.parameters = parameters;
}
/**
* Obtains a map of type (Integer) to value (byte[]) for the parameters tracked in this object.
*/
public Hashtable getParameters()
{
return parameters;
}
/**
* Obtains the value of the {@link #PARAM_TYPE_KEY key parameter}, or <code>null</code> if not
* set.
*/
public byte[] getKey()
{
return (byte[])parameters.get(Integers.valueOf(PARAM_TYPE_KEY));
}
/**
* Obtains the value of the {@link #PARAM_TYPE_PERSONALISATION personalisation parameter}, or
* <code>null</code> if not set.
*/
public byte[] getPersonalisation()
{
return (byte[])parameters.get(Integers.valueOf(PARAM_TYPE_PERSONALISATION));
}
/**
* Obtains the value of the {@link #PARAM_TYPE_PUBLIC_KEY public key parameter}, or
* <code>null</code> if not set.
*/
public byte[] getPublicKey()
{
return (byte[])parameters.get(Integers.valueOf(PARAM_TYPE_PUBLIC_KEY));
}
/**
* Obtains the value of the {@link #PARAM_TYPE_KEY_IDENTIFIER key identifier parameter}, or
* <code>null</code> if not set.
*/
public byte[] getKeyIdentifier()
{
return (byte[])parameters.get(Integers.valueOf(PARAM_TYPE_KEY_IDENTIFIER));
}
/**
* Obtains the value of the {@link #PARAM_TYPE_NONCE nonce parameter}, or <code>null</code> if
* not set.
*/
public byte[] getNonce()
{
return (byte[])parameters.get(Integers.valueOf(PARAM_TYPE_NONCE));
}
/**
* A builder for {@link SkeinParameters}.
*/
public static class Builder
{
private Hashtable parameters = new Hashtable();
public Builder()
{
}
public Builder(Hashtable paramsMap)
{
Enumeration keys = paramsMap.keys();
while (keys.hasMoreElements())
{
Integer key = (Integer)keys.nextElement();
parameters.put(key, paramsMap.get(key));
}
}
public Builder(SkeinParameters params)
{
Enumeration keys = params.parameters.keys();
while (keys.hasMoreElements())
{
Integer key = (Integer)keys.nextElement();
parameters.put(key, params.parameters.get(key));
}
}
/**
* Sets a parameters to apply to the Skein hash function.<br>
* Parameter types must be in the range 0,5..62, and cannot use the value {@value
* SkeinParameters#PARAM_TYPE_MESSAGE} (reserved for message body).
* <p/>
* Parameters with type < {@value SkeinParameters#PARAM_TYPE_MESSAGE} are processed before
* the message content, parameters with type > {@value SkeinParameters#PARAM_TYPE_MESSAGE}
* are processed after the message and prior to output.
*
* @param type the type of the parameter, in the range 5..62.
* @param value the byte sequence of the parameter.
* @return
*/
public Builder set(int type, byte[] value)
{
if (value == null)
{
throw new IllegalArgumentException("Parameter value must not be null.");
}
if ((type != PARAM_TYPE_KEY)
&& (type <= PARAM_TYPE_CONFIG || type >= PARAM_TYPE_OUTPUT || type == PARAM_TYPE_MESSAGE))
{
throw new IllegalArgumentException("Parameter types must be in the range 0,5..47,49..62.");
}
if (type == PARAM_TYPE_CONFIG)
{
throw new IllegalArgumentException("Parameter type " + PARAM_TYPE_CONFIG
+ " is reserved for internal use.");
}
this.parameters.put(Integers.valueOf(type), value);
return this;
}
/**
* Sets the {@link SkeinParameters#PARAM_TYPE_KEY} parameter.
*/
public Builder setKey(byte[] key)
{
return set(PARAM_TYPE_KEY, key);
}
/**
* Sets the {@link SkeinParameters#PARAM_TYPE_PERSONALISATION} parameter.
*/
public Builder setPersonalisation(byte[] personalisation)
{
return set(PARAM_TYPE_PERSONALISATION, personalisation);
}
/**
* Implements the recommended personalisation format for Skein defined in Section 4.11 of
* the Skein 1.3 specification.
* <p/>
* The format is <code>YYYYMMDD email@address distinguisher</code>, encoded to a byte
* sequence using UTF-8 encoding.
*
* @param date the date the personalised application of the Skein was defined.
* @param emailAddress the email address of the creation of the personalised application.
* @param distinguisher an arbitrary personalisation string distinguishing the application.
* @return
*/
public Builder setPersonalisation(Date date, String emailAddress, String distinguisher)
{
try
{
final ByteArrayOutputStream bout = new ByteArrayOutputStream();
final OutputStreamWriter out = new OutputStreamWriter(bout, "UTF-8");
final DateFormat format = new SimpleDateFormat("YYYYMMDD");
out.write(format.format(date));
out.write(" ");
out.write(emailAddress);
out.write(" ");
out.write(distinguisher);
out.close();
return set(PARAM_TYPE_PERSONALISATION, bout.toByteArray());
}
catch (IOException e)
{
throw new IllegalStateException("Byte I/O failed: " + e);
}
}
/**
* Sets the {@link SkeinParameters#PARAM_TYPE_KEY_IDENTIFIER} parameter.
*/
public Builder setPublicKey(byte[] publicKey)
{
return set(PARAM_TYPE_PUBLIC_KEY, publicKey);
}
/**
* Sets the {@link SkeinParameters#PARAM_TYPE_KEY_IDENTIFIER} parameter.
*/
public Builder setKeyIdentifier(byte[] keyIdentifier)
{
return set(PARAM_TYPE_KEY_IDENTIFIER, keyIdentifier);
}
/**
* Sets the {@link SkeinParameters#PARAM_TYPE_NONCE} parameter.
*/
public Builder setNonce(byte[] nonce)
{
return set(PARAM_TYPE_NONCE, nonce);
}
/**
* Constructs a new {@link SkeinParameters} instance with the parameters provided to this
* builder.
*/
public SkeinParameters build()
{
return new SkeinParameters(parameters);
}
}
}
|
package com.github.aha.poc.junit5.intro;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import com.github.aha.poc.junit.person.PersonService;
public class PrivateTest {
@Test
@DisplayName("check hidden private constructor")
public void testPrivateConstructor() {
assertThrows(NoSuchMethodException.class, () -> PersonService.class.getConstructor().newInstance());
}
}
|
package com.github.catalin.cretu.conference.api.event;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class EventView {
private Long id;
private PeriodView period;
private String title;
private String description;
private String location;
private AuthorView author;
}
|
package ru.job4j.list;
import org.junit.Test;
import java.util.Iterator;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
public class DynamicListTest {
private DynamicList<Object> list = new DynamicList<>();
@Test
public void testAdd() {
list.add(5);
list.add(8);
assertThat(list.get(1), is(8));
}
@Test
public void testIterator() {
list.add(1);
list.add(2);
list.add(3);
list.add(4);
Iterator<Object> iterator = list.iterator();
assertThat(iterator.hasNext(), is(true));
assertThat(iterator.next(), is(1));
assertThat(iterator.hasNext(), is(true));
assertThat(iterator.next(), is(2));
assertThat(iterator.hasNext(), is(true));
assertThat(iterator.next(), is(3));
assertThat(iterator.hasNext(), is(true));
assertThat(iterator.next(), is(4));
assertThat(iterator.hasNext(), is(true));
iterator.next();
assertThat(iterator.hasNext(), is(false));
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.client;
import org.apache.hudi.client.common.HoodieSparkEngineContext;
import org.apache.hudi.client.embedded.EmbeddedTimelineService;
import org.apache.hudi.client.utils.TransactionUtils;
import org.apache.hudi.common.engine.HoodieEngineContext;
import org.apache.hudi.common.fs.HoodieWrapperFileSystem;
import org.apache.hudi.common.metrics.Registry;
import org.apache.hudi.common.model.HoodieCommitMetadata;
import org.apache.hudi.common.model.HoodieFailedWritesCleaningPolicy;
import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.model.HoodieReplaceCommitMetadata;
import org.apache.hudi.common.model.HoodieWriteStat;
import org.apache.hudi.common.model.TableServiceType;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTableVersion;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieClusteringException;
import org.apache.hudi.exception.HoodieCommitException;
import org.apache.hudi.exception.HoodieMetadataException;
import org.apache.hudi.index.HoodieIndex;
import org.apache.hudi.index.SparkHoodieIndex;
import org.apache.hudi.metadata.HoodieTableMetadataWriter;
import org.apache.hudi.metadata.SparkHoodieBackedTableMetadataWriter;
import org.apache.hudi.metrics.DistributedRegistry;
import org.apache.hudi.table.BulkInsertPartitioner;
import org.apache.hudi.table.HoodieSparkTable;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.MarkerFiles;
import org.apache.hudi.table.action.HoodieWriteMetadata;
import org.apache.hudi.table.action.compact.SparkCompactHelpers;
import org.apache.hudi.table.upgrade.AbstractUpgradeDowngrade;
import org.apache.hudi.table.upgrade.SparkUpgradeDowngrade;
import com.codahale.metrics.Timer;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.List;
import java.util.Map;
@SuppressWarnings("checkstyle:LineLength")
public class SparkRDDWriteClient<T extends HoodieRecordPayload> extends
AbstractHoodieWriteClient<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> {
private static final Logger LOG = LogManager.getLogger(SparkRDDWriteClient.class);
public SparkRDDWriteClient(HoodieEngineContext context, HoodieWriteConfig clientConfig) {
super(context, clientConfig);
}
@Deprecated
public SparkRDDWriteClient(HoodieEngineContext context, HoodieWriteConfig writeConfig, boolean rollbackPending) {
super(context, writeConfig);
}
@Deprecated
public SparkRDDWriteClient(HoodieEngineContext context, HoodieWriteConfig writeConfig, boolean rollbackPending,
Option<EmbeddedTimelineService> timelineService) {
super(context, writeConfig, timelineService);
}
public SparkRDDWriteClient(HoodieEngineContext context, HoodieWriteConfig writeConfig,
Option<EmbeddedTimelineService> timelineService) {
super(context, writeConfig, timelineService);
}
/**
* Register hudi classes for Kryo serialization.
*
* @param conf instance of SparkConf
* @return SparkConf
*/
public static SparkConf registerClasses(SparkConf conf) {
conf.registerKryoClasses(new Class[]{HoodieWriteConfig.class, HoodieRecord.class, HoodieKey.class});
return conf;
}
@Override
protected HoodieIndex<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> createIndex(HoodieWriteConfig writeConfig) {
return SparkHoodieIndex.createIndex(config);
}
/**
* Complete changes performed at the given instantTime marker with specified action.
*/
@Override
public boolean commit(String instantTime, JavaRDD<WriteStatus> writeStatuses, Option<Map<String, String>> extraMetadata,
String commitActionType, Map<String, List<String>> partitionToReplacedFileIds) {
List<HoodieWriteStat> writeStats = writeStatuses.map(WriteStatus::getStat).collect();
return commitStats(instantTime, writeStats, extraMetadata, commitActionType, partitionToReplacedFileIds);
}
@Override
protected HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> createTable(HoodieWriteConfig config,
Configuration hadoopConf) {
return HoodieSparkTable.create(config, context);
}
@Override
public JavaRDD<HoodieRecord<T>> filterExists(JavaRDD<HoodieRecord<T>> hoodieRecords) {
// Create a Hoodie table which encapsulated the commits and files visible
HoodieSparkTable<T> table = HoodieSparkTable.create(config, context);
Timer.Context indexTimer = metrics.getIndexCtx();
JavaRDD<HoodieRecord<T>> recordsWithLocation = getIndex().tagLocation(hoodieRecords, context, table);
metrics.updateIndexMetrics(LOOKUP_STR, metrics.getDurationInMs(indexTimer == null ? 0L : indexTimer.stop()));
return recordsWithLocation.filter(v1 -> !v1.isCurrentLocationKnown());
}
/**
* Main API to run bootstrap to hudi.
*/
@Override
public void bootstrap(Option<Map<String, String>> extraMetadata) {
getTableAndInitCtx(WriteOperationType.UPSERT, HoodieTimeline.METADATA_BOOTSTRAP_INSTANT_TS).bootstrap(context, extraMetadata);
}
@Override
public JavaRDD<WriteStatus> upsert(JavaRDD<HoodieRecord<T>> records, String instantTime) {
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> table =
getTableAndInitCtx(WriteOperationType.UPSERT, instantTime);
table.validateUpsertSchema();
preWrite(instantTime, WriteOperationType.UPSERT, table.getMetaClient());
HoodieWriteMetadata<JavaRDD<WriteStatus>> result = table.upsert(context, instantTime, records);
if (result.getIndexLookupDuration().isPresent()) {
metrics.updateIndexMetrics(LOOKUP_STR, result.getIndexLookupDuration().get().toMillis());
}
return postWrite(result, instantTime, table);
}
@Override
public JavaRDD<WriteStatus> upsertPreppedRecords(JavaRDD<HoodieRecord<T>> preppedRecords, String instantTime) {
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> table =
getTableAndInitCtx(WriteOperationType.UPSERT_PREPPED, instantTime);
table.validateUpsertSchema();
preWrite(instantTime, WriteOperationType.UPSERT_PREPPED, table.getMetaClient());
HoodieWriteMetadata<JavaRDD<WriteStatus>> result = table.upsertPrepped(context,instantTime, preppedRecords);
return postWrite(result, instantTime, table);
}
@Override
public JavaRDD<WriteStatus> insert(JavaRDD<HoodieRecord<T>> records, String instantTime) {
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> table =
getTableAndInitCtx(WriteOperationType.INSERT, instantTime);
table.validateInsertSchema();
preWrite(instantTime, WriteOperationType.INSERT, table.getMetaClient());
HoodieWriteMetadata<JavaRDD<WriteStatus>> result = table.insert(context,instantTime, records);
return postWrite(result, instantTime, table);
}
@Override
public JavaRDD<WriteStatus> insertPreppedRecords(JavaRDD<HoodieRecord<T>> preppedRecords, String instantTime) {
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> table =
getTableAndInitCtx(WriteOperationType.INSERT_PREPPED, instantTime);
table.validateInsertSchema();
preWrite(instantTime, WriteOperationType.INSERT_PREPPED, table.getMetaClient());
HoodieWriteMetadata<JavaRDD<WriteStatus>> result = table.insertPrepped(context,instantTime, preppedRecords);
return postWrite(result, instantTime, table);
}
/**
* Removes all existing records from the partitions affected and inserts the given HoodieRecords, into the table.
* @param records HoodieRecords to insert
* @param instantTime Instant time of the commit
* @return JavaRDD[WriteStatus] - RDD of WriteStatus to inspect errors and counts
*/
public HoodieWriteResult insertOverwrite(JavaRDD<HoodieRecord<T>> records, final String instantTime) {
HoodieTable table = getTableAndInitCtx(WriteOperationType.INSERT_OVERWRITE, instantTime);
table.validateInsertSchema();
preWrite(instantTime, WriteOperationType.INSERT_OVERWRITE, table.getMetaClient());
HoodieWriteMetadata result = table.insertOverwrite(context, instantTime, records);
return new HoodieWriteResult(postWrite(result, instantTime, table), result.getPartitionToReplaceFileIds());
}
/**
* Removes all existing records of the Hoodie table and inserts the given HoodieRecords, into the table.
* @param records HoodieRecords to insert
* @param instantTime Instant time of the commit
* @return JavaRDD[WriteStatus] - RDD of WriteStatus to inspect errors and counts
*/
public HoodieWriteResult insertOverwriteTable(JavaRDD<HoodieRecord<T>> records, final String instantTime) {
HoodieTable table = getTableAndInitCtx(WriteOperationType.INSERT_OVERWRITE_TABLE, instantTime);
table.validateInsertSchema();
preWrite(instantTime, WriteOperationType.INSERT_OVERWRITE_TABLE, table.getMetaClient());
HoodieWriteMetadata result = table.insertOverwriteTable(context, instantTime, records);
return new HoodieWriteResult(postWrite(result, instantTime, table), result.getPartitionToReplaceFileIds());
}
@Override
public JavaRDD<WriteStatus> bulkInsert(JavaRDD<HoodieRecord<T>> records, String instantTime) {
return bulkInsert(records, instantTime, Option.empty());
}
@Override
public JavaRDD<WriteStatus> bulkInsert(JavaRDD<HoodieRecord<T>> records, String instantTime, Option<BulkInsertPartitioner<JavaRDD<HoodieRecord<T>>>> userDefinedBulkInsertPartitioner) {
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> table =
getTableAndInitCtx(WriteOperationType.BULK_INSERT, instantTime);
table.validateInsertSchema();
preWrite(instantTime, WriteOperationType.BULK_INSERT, table.getMetaClient());
HoodieWriteMetadata<JavaRDD<WriteStatus>> result = table.bulkInsert(context,instantTime, records, userDefinedBulkInsertPartitioner);
return postWrite(result, instantTime, table);
}
@Override
public JavaRDD<WriteStatus> bulkInsertPreppedRecords(JavaRDD<HoodieRecord<T>> preppedRecords, String instantTime, Option<BulkInsertPartitioner<JavaRDD<HoodieRecord<T>>>> bulkInsertPartitioner) {
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> table =
getTableAndInitCtx(WriteOperationType.BULK_INSERT_PREPPED, instantTime);
table.validateInsertSchema();
preWrite(instantTime, WriteOperationType.BULK_INSERT_PREPPED, table.getMetaClient());
HoodieWriteMetadata<JavaRDD<WriteStatus>> result = table.bulkInsertPrepped(context,instantTime, preppedRecords, bulkInsertPartitioner);
return postWrite(result, instantTime, table);
}
@Override
public JavaRDD<WriteStatus> delete(JavaRDD<HoodieKey> keys, String instantTime) {
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> table = getTableAndInitCtx(WriteOperationType.DELETE, instantTime);
preWrite(instantTime, WriteOperationType.DELETE, table.getMetaClient());
HoodieWriteMetadata<JavaRDD<WriteStatus>> result = table.delete(context,instantTime, keys);
return postWrite(result, instantTime, table);
}
public HoodieWriteResult deletePartitions(List<String> partitions, String instantTime) {
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> table = getTableAndInitCtx(WriteOperationType.DELETE_PARTITION, instantTime);
preWrite(instantTime, WriteOperationType.DELETE_PARTITION, table.getMetaClient());
HoodieWriteMetadata<JavaRDD<WriteStatus>> result = table.deletePartitions(context,instantTime, partitions);
return new HoodieWriteResult(postWrite(result, instantTime, table), result.getPartitionToReplaceFileIds());
}
@Override
protected JavaRDD<WriteStatus> postWrite(HoodieWriteMetadata<JavaRDD<WriteStatus>> result,
String instantTime,
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> hoodieTable) {
if (result.getIndexLookupDuration().isPresent()) {
metrics.updateIndexMetrics(getOperationType().name(), result.getIndexUpdateDuration().get().toMillis());
}
if (result.isCommitted()) {
// Perform post commit operations.
if (result.getFinalizeDuration().isPresent()) {
metrics.updateFinalizeWriteMetrics(result.getFinalizeDuration().get().toMillis(),
result.getWriteStats().get().size());
}
postCommit(hoodieTable, result.getCommitMetadata().get(), instantTime, Option.empty());
emitCommitMetrics(instantTime, result.getCommitMetadata().get(), hoodieTable.getMetaClient().getCommitActionType());
}
return result.getWriteStatuses();
}
@Override
public void commitCompaction(String compactionInstantTime, JavaRDD<WriteStatus> writeStatuses, Option<Map<String, String>> extraMetadata) throws IOException {
HoodieSparkTable<T> table = HoodieSparkTable.create(config, context);
HoodieCommitMetadata metadata = SparkCompactHelpers.newInstance().createCompactionMetadata(
table, compactionInstantTime, writeStatuses, config.getSchema());
extraMetadata.ifPresent(m -> m.forEach(metadata::addMetadata));
completeCompaction(metadata, writeStatuses, table, compactionInstantTime);
}
@Override
protected void completeCompaction(HoodieCommitMetadata metadata, JavaRDD<WriteStatus> writeStatuses,
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> table,
String compactionCommitTime) {
this.context.setJobStatus(this.getClass().getSimpleName(), "Collect compaction write status and commit compaction");
List<HoodieWriteStat> writeStats = writeStatuses.map(WriteStatus::getStat).collect();
finalizeWrite(table, compactionCommitTime, writeStats);
LOG.info("Committing Compaction " + compactionCommitTime + ". Finished with result " + metadata);
SparkCompactHelpers.newInstance().completeInflightCompaction(table, compactionCommitTime, metadata);
if (compactionTimer != null) {
long durationInMs = metrics.getDurationInMs(compactionTimer.stop());
try {
metrics.updateCommitMetrics(HoodieActiveTimeline.COMMIT_FORMATTER.parse(compactionCommitTime).getTime(),
durationInMs, metadata, HoodieActiveTimeline.COMPACTION_ACTION);
} catch (ParseException e) {
throw new HoodieCommitException("Commit time is not of valid format. Failed to commit compaction "
+ config.getBasePath() + " at time " + compactionCommitTime, e);
}
}
LOG.info("Compacted successfully on commit " + compactionCommitTime);
}
@Override
protected JavaRDD<WriteStatus> compact(String compactionInstantTime, boolean shouldComplete) {
HoodieSparkTable<T> table = HoodieSparkTable.create(config, context);
preWrite(compactionInstantTime, WriteOperationType.COMPACT, table.getMetaClient());
HoodieTimeline pendingCompactionTimeline = table.getActiveTimeline().filterPendingCompactionTimeline();
HoodieInstant inflightInstant = HoodieTimeline.getCompactionInflightInstant(compactionInstantTime);
if (pendingCompactionTimeline.containsInstant(inflightInstant)) {
rollbackInflightCompaction(inflightInstant, table);
table.getMetaClient().reloadActiveTimeline();
}
compactionTimer = metrics.getCompactionCtx();
HoodieWriteMetadata<JavaRDD<WriteStatus>> compactionMetadata = table.compact(context, compactionInstantTime);
JavaRDD<WriteStatus> statuses = compactionMetadata.getWriteStatuses();
if (shouldComplete && compactionMetadata.getCommitMetadata().isPresent()) {
completeTableService(TableServiceType.COMPACT, compactionMetadata.getCommitMetadata().get(), statuses, table, compactionInstantTime);
}
return statuses;
}
@Override
public HoodieWriteMetadata<JavaRDD<WriteStatus>> cluster(String clusteringInstant, boolean shouldComplete) {
HoodieSparkTable<T> table = HoodieSparkTable.create(config, context);
preWrite(clusteringInstant, WriteOperationType.CLUSTER, table.getMetaClient());
HoodieTimeline pendingClusteringTimeline = table.getActiveTimeline().filterPendingReplaceTimeline();
HoodieInstant inflightInstant = HoodieTimeline.getReplaceCommitInflightInstant(clusteringInstant);
if (pendingClusteringTimeline.containsInstant(inflightInstant)) {
rollbackInflightClustering(inflightInstant, table);
table.getMetaClient().reloadActiveTimeline();
}
clusteringTimer = metrics.getClusteringCtx();
LOG.info("Starting clustering at " + clusteringInstant);
HoodieWriteMetadata<JavaRDD<WriteStatus>> clusteringMetadata = table.cluster(context, clusteringInstant);
JavaRDD<WriteStatus> statuses = clusteringMetadata.getWriteStatuses();
// TODO : Where is shouldComplete used ?
if (shouldComplete && clusteringMetadata.getCommitMetadata().isPresent()) {
completeTableService(TableServiceType.CLUSTER, clusteringMetadata.getCommitMetadata().get(), statuses, table, clusteringInstant);
}
return clusteringMetadata;
}
private void completeClustering(HoodieReplaceCommitMetadata metadata, JavaRDD<WriteStatus> writeStatuses,
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> table,
String clusteringCommitTime) {
List<HoodieWriteStat> writeStats = writeStatuses.map(WriteStatus::getStat).collect();
if (!writeStatuses.filter(WriteStatus::hasErrors).isEmpty()) {
throw new HoodieClusteringException("Clustering failed to write to files:"
+ writeStatuses.filter(WriteStatus::hasErrors).map(WriteStatus::getFileId).collect());
}
finalizeWrite(table, clusteringCommitTime, writeStats);
try {
LOG.info("Committing Clustering " + clusteringCommitTime + ". Finished with result " + metadata);
table.getActiveTimeline().transitionReplaceInflightToComplete(
HoodieTimeline.getReplaceCommitInflightInstant(clusteringCommitTime),
Option.of(metadata.toJsonString().getBytes(StandardCharsets.UTF_8)));
} catch (IOException e) {
throw new HoodieClusteringException("unable to transition clustering inflight to complete: " + clusteringCommitTime, e);
}
new MarkerFiles(table, clusteringCommitTime).quietDeleteMarkerDir(context, config.getMarkersDeleteParallelism());
if (clusteringTimer != null) {
long durationInMs = metrics.getDurationInMs(clusteringTimer.stop());
try {
metrics.updateCommitMetrics(HoodieActiveTimeline.COMMIT_FORMATTER.parse(clusteringCommitTime).getTime(),
durationInMs, metadata, HoodieActiveTimeline.REPLACE_COMMIT_ACTION);
} catch (ParseException e) {
throw new HoodieCommitException("Commit time is not of valid format. Failed to commit compaction "
+ config.getBasePath() + " at time " + clusteringCommitTime, e);
}
}
LOG.info("Clustering successfully on commit " + clusteringCommitTime);
}
@Override
protected HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> getTableAndInitCtx(WriteOperationType operationType, String instantTime) {
HoodieTableMetaClient metaClient = createMetaClient(true);
AbstractUpgradeDowngrade upgradeDowngrade = new SparkUpgradeDowngrade(metaClient, config, context);
if (upgradeDowngrade.needsUpgradeOrDowngrade(HoodieTableVersion.current())) {
if (config.getWriteConcurrencyMode().supportsOptimisticConcurrencyControl()) {
this.txnManager.beginTransaction();
try {
// Ensure no inflight commits by setting EAGER policy and explicitly cleaning all failed commits
this.rollbackFailedWrites(getInstantsToRollback(metaClient, HoodieFailedWritesCleaningPolicy.EAGER));
new SparkUpgradeDowngrade(metaClient, config, context)
.run(metaClient, HoodieTableVersion.current(), config, context, instantTime);
} finally {
this.txnManager.endTransaction();
}
} else {
upgradeDowngrade.run(metaClient, HoodieTableVersion.current(), config, context, instantTime);
}
}
metaClient.validateTableProperties(config.getProps(), operationType);
return getTableAndInitCtx(metaClient, operationType, instantTime);
}
// TODO : To enforce priority between table service and ingestion writer, use transactions here and invoke strategy
private void completeTableService(TableServiceType tableServiceType, HoodieCommitMetadata metadata, JavaRDD<WriteStatus> writeStatuses,
HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> table,
String commitInstant) {
switch (tableServiceType) {
case CLUSTER:
completeClustering((HoodieReplaceCommitMetadata) metadata, writeStatuses, table, commitInstant);
break;
case COMPACT:
completeCompaction(metadata, writeStatuses, table, commitInstant);
break;
default:
throw new IllegalArgumentException("This table service is not valid " + tableServiceType);
}
}
private HoodieTable<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> getTableAndInitCtx(
HoodieTableMetaClient metaClient, WriteOperationType operationType, String instantTime) {
if (operationType == WriteOperationType.DELETE) {
setWriteSchemaForDeletes(metaClient);
}
// Create a Hoodie table which encapsulated the commits and files visible
HoodieSparkTable<T> table = HoodieSparkTable.create(config, (HoodieSparkEngineContext) context, metaClient);
if (table.getMetaClient().getCommitActionType().equals(HoodieTimeline.COMMIT_ACTION)) {
writeTimer = metrics.getCommitCtx();
} else {
writeTimer = metrics.getDeltaCommitCtx();
}
return table;
}
@Override
public void syncTableMetadata() {
// Open up the metadata table again, for syncing
try (HoodieTableMetadataWriter writer = SparkHoodieBackedTableMetadataWriter.create(hadoopConf, config, context)) {
LOG.info("Successfully synced to metadata table");
} catch (Exception e) {
throw new HoodieMetadataException("Error syncing to metadata table.", e);
}
}
@Override
protected void preCommit(String instantTime, HoodieCommitMetadata metadata) {
// Create a Hoodie table after startTxn which encapsulated the commits and files visible.
// Important to create this after the lock to ensure latest commits show up in the timeline without need for reload
HoodieTable table = createTable(config, hadoopConf);
TransactionUtils.resolveWriteConflictIfAny(table, this.txnManager.getCurrentTransactionOwner(),
Option.of(metadata), config, txnManager.getLastCompletedTransactionOwner());
}
@Override
protected void initWrapperFSMetrics() {
if (config.isMetricsOn()) {
Registry registry;
Registry registryMeta;
JavaSparkContext jsc = ((HoodieSparkEngineContext) context).getJavaSparkContext();
if (config.isExecutorMetricsEnabled()) {
// Create a distributed registry for HoodieWrapperFileSystem
registry = Registry.getRegistry(HoodieWrapperFileSystem.class.getSimpleName(),
DistributedRegistry.class.getName());
((DistributedRegistry)registry).register(jsc);
registryMeta = Registry.getRegistry(HoodieWrapperFileSystem.class.getSimpleName() + "MetaFolder",
DistributedRegistry.class.getName());
((DistributedRegistry)registryMeta).register(jsc);
} else {
registry = Registry.getRegistry(HoodieWrapperFileSystem.class.getSimpleName());
registryMeta = Registry.getRegistry(HoodieWrapperFileSystem.class.getSimpleName() + "MetaFolder");
}
HoodieWrapperFileSystem.setMetricsRegistry(registry, registryMeta);
}
}
@Override
protected void releaseResources() {
((HoodieSparkEngineContext) context).getJavaSparkContext().getPersistentRDDs().values()
.forEach(rdd -> rdd.unpersist());
}
}
|
package com.keybo.mina.server.protocol.core;
import org.apache.mina.core.session.IoSession;
/**
* @author: xiaojian
* @date: 16-7-5
* @time: 下午2:18
* @version: V1.0
*/
public abstract class AbstractBiz implements Biz {
protected IoSession session;
protected BaseCommand command;
protected abstract void exec() throws Exception;
@Override
public void doBiz(IoSession session, BaseCommand command) throws Exception {
before(); //处理之前
try {
this.session = session;
this.command = command;
exec();
} finally {
after(); //处理之后
}
}
public void before() {
}
public void after() {
}
}
|
/*******************************************************************************
* Copyright 2013-2017 alladin-IT GmbH
* Copyright 2014-2016 SPECURE GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package at.alladin.rmbt.android.main;
import android.content.res.Resources;
import android.content.res.TypedArray;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import at.alladin.openrmbt.android.R;
public class MainMenuUtil {
/**
*
*/
public static final int MENU_STATISTICS_INDEX = 3;
public static List<String> getMenuTitles(final Resources res) {
final List<String> menuList = new ArrayList<String>();
Collections.addAll(menuList, res.getStringArray(R.array.navigation_main_titles));
if (!FeatureConfig.USE_OPENDATA) {
menuList.remove(MENU_STATISTICS_INDEX);
}
return menuList;
}
public static List<Integer> getMenuIds(final Resources res) {
final List<Integer> menuIds = new ArrayList<Integer>();
final TypedArray iconIds = res.obtainTypedArray(R.array.navigation_main_icon_ids);
for (int x = 0; x < iconIds.length(); x++) {
menuIds.add(iconIds.getResourceId(x, 0));
}
if (!FeatureConfig.USE_OPENDATA) {
menuIds.remove(MENU_STATISTICS_INDEX);
}
return menuIds;
}
public static List<Integer> getMenuActionIds(final Resources res) {
final List<Integer> menuIds = new ArrayList<Integer>();
final TypedArray iconIds = res.obtainTypedArray(R.array.navigation_main_menu_ids);
for (int x = 0; x < iconIds.length(); x++) {
menuIds.add(iconIds.getResourceId(x, 0));
}
if (!FeatureConfig.USE_OPENDATA) {
menuIds.remove(MENU_STATISTICS_INDEX);
}
return menuIds;
}
}
|
/**
* Copyright (c) 2011, SOCIETIES Consortium (WATERFORD INSTITUTE OF TECHNOLOGY (TSSG), HERIOT-WATT UNIVERSITY (HWU), SOLUTA.NET
* (SN), GERMAN AEROSPACE CENTRE (Deutsches Zentrum fuer Luft- und Raumfahrt e.V.) (DLR), Zavod za varnostne tehnologije
* informacijske družbe in elektronsko poslovanje (SETCCE), INSTITUTE OF COMMUNICATION AND COMPUTER SYSTEMS (ICCS), LAKE
* COMMUNICATIONS (LAKE), INTEL PERFORMANCE LEARNING SOLUTIONS LTD (INTEL), PORTUGAL TELECOM INOVAÇÃO, SA (PTIN), IBM Corp.,
* INSTITUT TELECOM (ITSUD), AMITEC DIACHYTI EFYIA PLIROFORIKI KAI EPIKINONIES ETERIA PERIORISMENIS EFTHINIS (AMITEC), TELECOM
* ITALIA S.p.a.(TI), TRIALOG (TRIALOG), Stiftelsen SINTEF (SINTEF), NEC EUROPE LTD (NEC))
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following
* conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.societies.api.identity.util;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.societies.api.context.model.CtxIdentifier;
import org.societies.api.context.model.MalformedCtxIdentifierException;
import org.societies.api.privacytrust.privacy.util.privacypolicy.ResourceUtils;
import org.societies.api.schema.identity.DataIdentifier;
import org.societies.api.schema.identity.DataIdentifierScheme;
import org.societies.api.schema.privacytrust.privacy.model.privacypolicy.Resource;
/**
* Utility method that helps manipulating DataIdentifier objects
*
* @author Olivier Maridat (Trialog)
*
*/
public class DataIdentifierUtils {
/**
* Generate a list of URI: sheme://ownerId/type from these data ids
* @param dataIds List of data identifier
* @return List of URI string representing these data identifier
*/
public static List<String> toUriString(List<DataIdentifier> dataIds) {
if (null == dataIds || dataIds.size() <= 0) {
return null;
}
List<String> dataIdsString = new ArrayList<String>();
for(DataIdentifier dataId : dataIds) {
dataIdsString.add(toUriString(dataId));
}
return dataIdsString;
}
/**
* Generate a URI: sheme://ownerId/type
* @param dataId
* @return
*/
public static String toUriString(DataIdentifier dataId)
{
if (dataId instanceof CtxIdentifier) {
return ((CtxIdentifier) dataId).toUriString();
}
return toUriString(dataId.getScheme(), dataId.getOwnerId(), dataId.getType());
}
/**
* Generate a URI: sheme://ownerid/type
* @param scheme
* @param ownerId
* @param dataType
* @return
*/
public static String toUriString(DataIdentifierScheme scheme, String ownerId, String dataType) {
StringBuilder str = new StringBuilder("");
str.append((scheme != null ? scheme.value()+"://" : "://"));
str.append((ownerId != null ? ownerId+"/" : "/"));
str.append((dataType != null ? dataType+"/" : "/"));
return str.toString();
}
/**
* Generate a URI: sheme:///type
* @param scheme
* @param dataType
* @return
*/
public static String toUriString(DataIdentifierScheme scheme, String dataType) {
return toUriString(scheme, "", dataType);
}
public static boolean equal(DataIdentifier o1, Object o2) {
// -- Verify reference equality
if (o1 == o2) { return true; }
if (o2 == null) { return false; }
if (o1 == null) { return false; }
if (o1.getClass() != o2.getClass()) { return false; }
// -- Verify obj type
DataIdentifier ro2 = (DataIdentifier) o2;
String uri1 = DataIdentifierUtils.toUriString(o1);
String uri2 = DataIdentifierUtils.toUriString(ro2);
return null != uri1 && uri1.equals(uri2);
}
public static boolean equal(DataIdentifier o1, Resource o2) {
// -- Verify reference equality
if (o2 == null) { return false; }
if (o1 == null) { return false; }
// -- Verify obj type
String uri1 = DataIdentifierUtils.toUriString(o1);
String uri2 = ResourceUtils.getDataIdUri(o2);
return null != uri1 && uri1.equals(uri2);
}
/**
* scheme + type are equals?
*/
public static boolean hasSameType(DataIdentifier id1, DataIdentifier id2) {
if (null == id1 || null == id2) {
return false;
}
// Scheme equal?
if (!DataIdentifierSchemeUtils.equal(DataTypeFactory.getScheme(id1), DataTypeFactory.getScheme(id2))) {
return false;
}
// Type equal?
String type1 = DataTypeFactory.getType(id1);
String type2 = DataTypeFactory.getType(id2);
return type1.equals(type2);
}
/**
* scheme + type are equals, or id1 type is a parent type of id2 type?
*/
public static boolean isParentOrSameType(DataIdentifier id1, DataIdentifier id2) {
if (null == id1 || null == id2) {
return false;
}
// Scheme equal?
if (!DataIdentifierSchemeUtils.equal(DataTypeFactory.getScheme(id1), DataTypeFactory.getScheme(id2))) {
return false;
}
// Type equal?
String type1 = DataTypeFactory.getType(id1);
String type2 = DataTypeFactory.getType(id2);
Set<String> subTypes1 = (new DataTypeUtils()).getLookableDataTypes(type1);
return subTypes1.contains(type2);
}
/**
* To sort a list of data ids by their parent type
* E.g. Ids of types NAME_FIRST (leaf), NAME_LAST (leaf), ACTION (root and leaf) will be sorted as: NAME -> NAME_FIRST, NAME_LAST ; ACTION -> ACTION
* E.g. Ids of types NAME (root not leaf), NAME_FIRST (leaf), NAME_LAST (leaf), ACTION (root and leaf) will be sorted as: NAME -> NAME_FIRST, NAME_LAST ; ACTION -> ACTION
* E.g. Ids of types NAME (root not leaf), ACTION (root and leaf) will be sorted as: NAME -> null ; ACTION -> ACTION
* @param dataIds List of data ids
* @return A map of parent types and their related data id (or this parent type if it is also a leaf)
*/
public static Map<String, Set<DataIdentifier>> sortByParent(Set<DataIdentifier> dataIds) {
if (null == dataIds || dataIds.size() <= 0) {
return null;
}
// -- Create the map
Map<String, Set<DataIdentifier>> sorted = new HashMap<String, Set<DataIdentifier>>();
DataTypeUtils dataTypeUtils = new DataTypeUtils();
for(DataIdentifier dataId : dataIds) {
// Retrieve parent type
String dataTypeParent = dataTypeUtils.getParent(dataId.getType());
Set<DataIdentifier> dataTypeGroup = null;
// Parent type
if (null == dataTypeParent) {
dataTypeParent = dataId.getType();
// Parent & leaf
if (dataTypeUtils.isLeaf(dataId.getType())) {
dataTypeGroup = new HashSet<DataIdentifier>();
dataTypeGroup.add(dataId);
}
// Parent with children
else {
dataTypeGroup = sorted.get(dataTypeParent);
}
}
// Child
else {
dataTypeGroup = sorted.get(dataTypeParent);
if (null == dataTypeGroup) {
dataTypeGroup = new HashSet<DataIdentifier>();
}
dataTypeGroup.add(dataId);
}
sorted.put(dataTypeParent, dataTypeGroup);
}
return sorted;
}
/**
* @throws MalformedCtxIdentifierException
* @see DataIdentifierFactory#fromUri(String)
*/
@Deprecated
public static DataIdentifier fromUri(String dataIdUri) throws MalformedCtxIdentifierException
{
return DataIdentifierFactory.fromUri(dataIdUri);
}
}
|
/*
* Copyright (C) 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package interactivespaces.domain.support;
/**
* A validator for activity values.
*
* @author Keith M. Hughes
*/
public interface Validator {
/**
* Validate the given candidate.
*
* @param candidate
* the candidate value
*
* @return the result of the validation
*/
DomainValidationResult validate(String candidate);
}
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* VpnStaticRoutesSetType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST)
*/
package com.amazon.ec2;
/**
* VpnStaticRoutesSetType bean class
*/
public class VpnStaticRoutesSetType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = VpnStaticRoutesSetType
Namespace URI = http://ec2.amazonaws.com/doc/2012-08-15/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for Item
* This was an Array!
*/
protected com.amazon.ec2.VpnStaticRouteType[] localItem ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localItemTracker = false ;
/**
* Auto generated getter method
* @return com.amazon.ec2.VpnStaticRouteType[]
*/
public com.amazon.ec2.VpnStaticRouteType[] getItem(){
return localItem;
}
/**
* validate the array for Item
*/
protected void validateItem(com.amazon.ec2.VpnStaticRouteType[] param){
}
/**
* Auto generated setter method
* @param param Item
*/
public void setItem(com.amazon.ec2.VpnStaticRouteType[] param){
validateItem(param);
if (param != null){
//update the setting tracker
localItemTracker = true;
} else {
localItemTracker = false;
}
this.localItem=param;
}
/**
* Auto generated add method for the array for convenience
* @param param com.amazon.ec2.VpnStaticRouteType
*/
public void addItem(com.amazon.ec2.VpnStaticRouteType param){
if (localItem == null){
localItem = new com.amazon.ec2.VpnStaticRouteType[]{};
}
//update the setting tracker
localItemTracker = true;
java.util.List list =
org.apache.axis2.databinding.utils.ConverterUtil.toList(localItem);
list.add(param);
this.localItem =
(com.amazon.ec2.VpnStaticRouteType[])list.toArray(
new com.amazon.ec2.VpnStaticRouteType[list.size()]);
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
VpnStaticRoutesSetType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2012-08-15/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":VpnStaticRoutesSetType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"VpnStaticRoutesSetType",
xmlWriter);
}
}
if (localItemTracker){
if (localItem!=null){
for (int i = 0;i < localItem.length;i++){
if (localItem[i] != null){
localItem[i].serialize(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","item"),
factory,xmlWriter);
} else {
// we don't have to do any thing since minOccures is zero
}
}
} else {
throw new org.apache.axis2.databinding.ADBException("item cannot be null!!");
}
}
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
if (localItemTracker){
if (localItem!=null) {
for (int i = 0;i < localItem.length;i++){
if (localItem[i] != null){
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/",
"item"));
elementList.add(localItem[i]);
} else {
// nothing to do
}
}
} else {
throw new org.apache.axis2.databinding.ADBException("item cannot be null!!");
}
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static VpnStaticRoutesSetType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
VpnStaticRoutesSetType object =
new VpnStaticRoutesSetType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"VpnStaticRoutesSetType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (VpnStaticRoutesSetType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
java.util.ArrayList list1 = new java.util.ArrayList();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","item").equals(reader.getName())){
// Process the array and step past its final element's end.
list1.add(com.amazon.ec2.VpnStaticRouteType.Factory.parse(reader));
//loop until we find a start element that is not part of this array
boolean loopDone1 = false;
while(!loopDone1){
// We should be at the end element, but make sure
while (!reader.isEndElement())
reader.next();
// Step out of this element
reader.next();
// Step to next element event.
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isEndElement()){
//two continuous end elements means we are exiting the xml structure
loopDone1 = true;
} else {
if (new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","item").equals(reader.getName())){
list1.add(com.amazon.ec2.VpnStaticRouteType.Factory.parse(reader));
}else{
loopDone1 = true;
}
}
}
// call the converter utility to convert and set the array
object.setItem((com.amazon.ec2.VpnStaticRouteType[])
org.apache.axis2.databinding.utils.ConverterUtil.convertToArray(
com.amazon.ec2.VpnStaticRouteType.class,
list1));
} // End of if for expected property start element
else {
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
|
package com.kk.taurus.ijkplayer;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.net.Uri;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import tv.danmaku.ijk.media.player.misc.IMediaDataSource;
public class RawDataSourceProvider implements IMediaDataSource {
private AssetFileDescriptor mDescriptor;
private byte[] mMediaBytes;
private RawDataSourceProvider(AssetFileDescriptor descriptor) {
this.mDescriptor = descriptor;
}
@Override
public int readAt(long position, byte[] buffer, int offset, int size) {
if(position + 1 >= mMediaBytes.length){
return -1;
}
int length;
if(position + size < mMediaBytes.length){
length = size;
}else{
length = (int) (mMediaBytes.length - position);
if(length > buffer.length)
length = buffer.length ;
length--;
}
System.arraycopy(mMediaBytes, (int) position, buffer, offset, length);
return length;
}
@Override
public long getSize() throws IOException {
long length = mDescriptor.getLength();
if(mMediaBytes == null){
InputStream inputStream = mDescriptor.createInputStream();
mMediaBytes = readBytes(inputStream);
}
return length;
}
@Override
public void close() throws IOException {
if(mDescriptor != null)
mDescriptor.close();
mDescriptor = null;
mMediaBytes = null;
}
private byte[] readBytes(InputStream inputStream) throws IOException {
ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
int bufferSize = 1024;
byte[] buffer = new byte[bufferSize];
int len = 0;
while ((len = inputStream.read(buffer)) != -1) {
byteBuffer.write(buffer, 0, len);
}
return byteBuffer.toByteArray();
}
public static RawDataSourceProvider create(Context context, Uri uri){
try {
AssetFileDescriptor fileDescriptor = context.getContentResolver().openAssetFileDescriptor(uri, "r");
return new RawDataSourceProvider(fileDescriptor);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
return null;
}
}
|
package de.tum.in.www1.artemis.service;
import static de.tum.in.www1.artemis.domain.Authority.ADMIN_AUTHORITY;
import static de.tum.in.www1.artemis.security.AuthoritiesConstants.*;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.util.*;
import java.util.stream.Collectors;
import javax.validation.constraints.NotNull;
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.cache.CacheManager;
import org.springframework.context.event.EventListener;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.lang.Nullable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import de.tum.in.www1.artemis.config.Constants;
import de.tum.in.www1.artemis.domain.Authority;
import de.tum.in.www1.artemis.domain.Course;
import de.tum.in.www1.artemis.domain.GuidedTourSetting;
import de.tum.in.www1.artemis.domain.User;
import de.tum.in.www1.artemis.domain.enumeration.SortingOrder;
import de.tum.in.www1.artemis.exception.UsernameAlreadyUsedException;
import de.tum.in.www1.artemis.repository.AuthorityRepository;
import de.tum.in.www1.artemis.repository.CourseRepository;
import de.tum.in.www1.artemis.repository.GuidedTourSettingsRepository;
import de.tum.in.www1.artemis.repository.UserRepository;
import de.tum.in.www1.artemis.security.ArtemisAuthenticationProvider;
import de.tum.in.www1.artemis.security.PBEPasswordEncoder;
import de.tum.in.www1.artemis.security.SecurityUtils;
import de.tum.in.www1.artemis.service.connectors.VcsUserManagementService;
import de.tum.in.www1.artemis.service.connectors.jira.JiraAuthenticationProvider;
import de.tum.in.www1.artemis.service.dto.UserDTO;
import de.tum.in.www1.artemis.service.ldap.LdapUserDto;
import de.tum.in.www1.artemis.service.ldap.LdapUserService;
import de.tum.in.www1.artemis.web.rest.dto.PageableSearchDTO;
import de.tum.in.www1.artemis.web.rest.errors.EmailAlreadyUsedException;
import de.tum.in.www1.artemis.web.rest.errors.EntityNotFoundException;
import de.tum.in.www1.artemis.web.rest.errors.InvalidPasswordException;
import de.tum.in.www1.artemis.web.rest.vm.ManagedUserVM;
import io.github.jhipster.security.RandomUtil;
/**
* Service class for managing users.
*/
@Service
@Transactional
public class UserService {
private final Logger log = LoggerFactory.getLogger(UserService.class);
@Value("${artemis.user-management.external.admin-group-name:#{null}}")
private Optional<String> adminGroupName;
@Value("${artemis.user-management.use-external}")
private Boolean useExternalUserManagement;
@Value("${artemis.encryption-password}")
private String encryptionPassword;
@Value("${artemis.user-management.internal-admin.username:#{null}}")
private Optional<String> artemisInternalAdminUsername;
@Value("${artemis.user-management.internal-admin.password:#{null}}")
private Optional<String> artemisInternalAdminPassword;
private final UserRepository userRepository;
private final CourseRepository courseRepository;
private final AuthorityRepository authorityRepository;
private final GuidedTourSettingsRepository guidedTourSettingsRepository;
private final CacheManager cacheManager;
private final Optional<LdapUserService> ldapUserService;
private Optional<VcsUserManagementService> optionalVcsUserManagementService;
private ArtemisAuthenticationProvider artemisAuthenticationProvider;
public UserService(UserRepository userRepository, AuthorityRepository authorityRepository, CacheManager cacheManager, Optional<LdapUserService> ldapUserService,
GuidedTourSettingsRepository guidedTourSettingsRepository, CourseRepository courseRepository) {
this.userRepository = userRepository;
this.authorityRepository = authorityRepository;
this.cacheManager = cacheManager;
this.ldapUserService = ldapUserService;
this.guidedTourSettingsRepository = guidedTourSettingsRepository;
this.courseRepository = courseRepository;
}
@Autowired
// break the dependency cycle
public void setOptionalVcsUserManagementService(Optional<VcsUserManagementService> optionalVcsUserManagementService) {
this.optionalVcsUserManagementService = optionalVcsUserManagementService;
}
@Autowired
// break the dependency cycle
public void setArtemisAuthenticationProvider(ArtemisAuthenticationProvider artemisAuthenticationProvider) {
this.artemisAuthenticationProvider = artemisAuthenticationProvider;
}
/**
* find all users who do not have registration numbers: in case they are TUM users, try to retrieve their registration number and set a proper first name and last name
*/
@EventListener(ApplicationReadyEvent.class)
public void applicationReady() {
try {
if (artemisInternalAdminUsername.isPresent() && artemisInternalAdminPassword.isPresent()) {
Optional<User> existingInternalAdmin = userRepository.findOneWithGroupsAndAuthoritiesByLogin(artemisInternalAdminUsername.get());
if (existingInternalAdmin.isPresent()) {
log.info("Update internal admin user " + artemisInternalAdminUsername.get());
existingInternalAdmin.get().setPassword(passwordEncoder().encode(artemisInternalAdminPassword.get()));
// needs to be mutable --> new HashSet<>(Set.of(...))
existingInternalAdmin.get().setAuthorities(new HashSet<>(Set.of(ADMIN_AUTHORITY, new Authority(USER))));
userRepository.save(existingInternalAdmin.get());
updateUserInConnectorsAndAuthProvider(existingInternalAdmin.get(), existingInternalAdmin.get().getGroups(), true);
}
else {
log.info("Create internal admin user " + artemisInternalAdminUsername.get());
ManagedUserVM userDto = new ManagedUserVM();
userDto.setLogin(artemisInternalAdminUsername.get());
userDto.setPassword(artemisInternalAdminPassword.get());
userDto.setActivated(true);
userDto.setFirstName("Administrator");
userDto.setLastName("Administrator");
userDto.setEmail("admin@localhost");
userDto.setLangKey("en");
userDto.setCreatedBy("system");
userDto.setLastModifiedBy("system");
// needs to be mutable --> new HashSet<>(Set.of(...))
userDto.setAuthorities(new HashSet<>(Set.of(ADMIN, USER)));
userDto.setGroups(new HashSet<>());
createUser(userDto);
}
}
}
catch (Exception ex) {
log.error("An error occurred after application startup when creating or updating the admin user or in the LDAP search: " + ex.getMessage(), ex);
}
}
/**
* load additional user details from the ldap if it is available: correct firstname, correct lastname and registration number (= matriculation number)
* @param login the login of the user for which the details should be retrieved
* @return the found Ldap user details or null if the user cannot be found
*/
@Nullable
public LdapUserDto loadUserDetailsFromLdap(@NotNull String login) {
try {
Optional<LdapUserDto> ldapUserOptional = ldapUserService.get().findByUsername(login);
if (ldapUserOptional.isPresent()) {
LdapUserDto ldapUser = ldapUserOptional.get();
log.info("Ldap User " + ldapUser.getUsername() + " has registration number: " + ldapUser.getRegistrationNumber());
return ldapUserOptional.get();
}
else {
log.warn("Ldap User " + login + " not found");
}
}
catch (Exception ex) {
log.error("Error in LDAP Search " + ex.getMessage());
}
return null;
}
private PBEPasswordEncoder passwordEncoder;
private StandardPBEStringEncryptor encryptor;
/**
* Get the encoder for password encryption
* @return existing password encoder or newly created password encryptor
*/
public PBEPasswordEncoder passwordEncoder() {
if (passwordEncoder != null) {
return passwordEncoder;
}
passwordEncoder = new PBEPasswordEncoder(encryptor());
return passwordEncoder;
}
/**
* Get the the password encryptor with MD5 and DES encryption algorithm
* @return existing encryptor or newly created encryptor
*/
public StandardPBEStringEncryptor encryptor() {
if (encryptor != null) {
return encryptor;
}
encryptor = new StandardPBEStringEncryptor();
encryptor.setAlgorithm("PBEWithMD5AndDES");
encryptor.setPassword(encryptionPassword);
return encryptor;
}
/**
* Activate user registration
* @param key activation key for user registration
* @return user if user exists otherwise null
*/
public Optional<User> activateRegistration(String key) {
log.debug("Activating user for activation key {}", key);
return userRepository.findOneByActivationKey(key).map(user -> {
// activate given user for the registration key.
activateUser(user);
return user;
});
}
/**
* Activate user
* @param user the user that should be activated
*/
public void activateUser(User user) {
user.setActivated(true);
user.setActivationKey(null);
this.clearUserCaches(user);
log.info("Activated user: {}", user);
}
/**
* Reset user password for given reset key
* @param newPassword new password string
* @param key reset key
* @return user for whom the password was performed
*/
public Optional<User> completePasswordReset(String newPassword, String key) {
log.debug("Reset user password for reset key {}", key);
return userRepository.findOneByResetKey(key).filter(user -> user.getResetDate().isAfter(Instant.now().minusSeconds(86400))).map(user -> {
user.setPassword(passwordEncoder().encode(newPassword));
user.setResetKey(null);
user.setResetDate(null);
userRepository.save(user);
this.clearUserCaches(user);
optionalVcsUserManagementService.ifPresent(vcsUserManagementService -> vcsUserManagementService.updateUser(user, null, null, true));
return user;
});
}
/**
* Request password reset for user email
* @param mail to find user
* @return user if user exists otherwise null
*/
public Optional<User> requestPasswordReset(String mail) {
return userRepository.findOneByEmailIgnoreCase(mail).filter(User::getActivated).map(user -> {
user.setResetKey(RandomUtil.generateResetKey());
user.setResetDate(Instant.now());
this.clearUserCaches(user);
return user;
});
}
/**
* Register user and create it only in the internal Artemis database. This is a pure service method without any logic with respect to external systems.
* @param userDTO user data transfer object
* @param password string
* @return newly registered user or throw registration exception
*/
public User registerUser(UserDTO userDTO, String password) {
userRepository.findOneByLogin(userDTO.getLogin().toLowerCase()).ifPresent(existingUser -> {
boolean removed = removeNonActivatedUser(existingUser);
if (!removed) {
throw new UsernameAlreadyUsedException();
}
});
userRepository.findOneByEmailIgnoreCase(userDTO.getEmail()).ifPresent(existingUser -> {
boolean removed = removeNonActivatedUser(existingUser);
if (!removed) {
throw new EmailAlreadyUsedException();
}
});
User newUser = new User();
String encryptedPassword = passwordEncoder().encode(password);
newUser.setLogin(userDTO.getLogin().toLowerCase());
// new user gets initially a generated password
newUser.setPassword(encryptedPassword);
newUser.setFirstName(userDTO.getFirstName());
newUser.setLastName(userDTO.getLastName());
newUser.setEmail(userDTO.getEmail().toLowerCase());
newUser.setImageUrl(userDTO.getImageUrl());
newUser.setLangKey(userDTO.getLangKey());
// new user is not active
newUser.setActivated(false);
// new user gets registration key
newUser.setActivationKey(RandomUtil.generateActivationKey());
Set<Authority> authorities = new HashSet<>();
authorityRepository.findById(USER).ifPresent(authorities::add);
newUser.setAuthorities(authorities);
newUser = userRepository.save(newUser);
// we need to save first so that the user can be found in the database in the subsequent method
createUserInExternalSystems(newUser);
log.debug("Created Information for User: {}", newUser);
return newUser;
}
/**
* Remove non activated user
*
* @param existingUser user object of an existing user
* @return true if removal has been executed successfully otherwise false
*/
private boolean removeNonActivatedUser(User existingUser) {
if (existingUser.getActivated()) {
return false;
}
userRepository.delete(existingUser);
userRepository.flush();
optionalVcsUserManagementService.ifPresent(vcsUserManagementService -> vcsUserManagementService.deleteUser(existingUser.getLogin()));
this.clearUserCaches(existingUser);
return true;
}
/**
* Searches the (optional) LDAP service for a user with the give registration number (= Matrikelnummer) and returns a new Artemis user-
* Also creates the user in the external user management (e.g. JIRA), in case this is activated
* Note: this method should only be used if the user does not yet exist in the database
*
* @param registrationNumber the matriculation number of the student
* @return a new user or null if the LDAP user was not found
*/
public Optional<User> createUserFromLdap(String registrationNumber) {
if (ldapUserService.isPresent()) {
Optional<LdapUserDto> ldapUserOptional = ldapUserService.get().findByRegistrationNumber(registrationNumber);
if (ldapUserOptional.isPresent()) {
LdapUserDto ldapUser = ldapUserOptional.get();
log.info("Ldap User " + ldapUser.getUsername() + " has registration number: " + ldapUser.getRegistrationNumber());
// Use empty password, so that we don't store the credentials of Jira users in the Artemis DB
User user = createUser(ldapUser.getUsername(), "", ldapUser.getFirstName(), ldapUser.getLastName(), ldapUser.getEmail(), registrationNumber, null, "en");
if (useExternalUserManagement) {
artemisAuthenticationProvider.createUserInExternalUserManagement(user);
}
return Optional.of(user);
}
else {
log.warn("Ldap User with registration number " + registrationNumber + " not found");
}
}
return Optional.empty();
}
public Optional<User> findUserWithGroupsAndAuthoritiesByRegistrationNumber(String registrationNumber) {
return userRepository.findOneWithGroupsAndAuthoritiesByRegistrationNumber(registrationNumber);
}
/**
* Create user only in the internal Artemis database. This is a pure service method without any logic with respect to external systems.
*
* @param login user login string
* @param password user password
* @param firstName first name of user
* @param lastName last name of the user
* @param email email of the user
* @param registrationNumber the matriculation number of the student
* @param imageUrl user image url
* @param langKey user language
* @return newly created user
*/
public User createUser(String login, @Nullable String password, String firstName, String lastName, String email, String registrationNumber, String imageUrl, String langKey) {
return createUser(login, password, new HashSet<>(), firstName, lastName, email, registrationNumber, imageUrl, langKey);
}
/**
* Create user only in the internal Artemis database. This is a pure service method without any logic with respect to external systems.
*
* @param login user login string
* @param groups The groups the user should belong to
* @param firstName first name of user
* @param lastName last name of the user
* @param email email of the user
* @param registrationNumber the matriculation number of the student
* @param imageUrl user image url
* @param langKey user language
* @return newly created user
*/
public User createUser(String login, Set<String> groups, String firstName, String lastName, String email, String registrationNumber, String imageUrl, String langKey) {
return createUser(login, null, groups, firstName, lastName, email, registrationNumber, imageUrl, langKey);
}
/**
* Create user only in the internal Artemis database. This is a pure service method without any logic with respect to external systems.
*
* @param login user login string
* @param password user password
* @param groups The groups the user should belong to
* @param firstName first name of user
* @param lastName last name of the user
* @param email email of the user
* @param registrationNumber the matriculation number of the student*
* @param imageUrl user image url
* @param langKey user language
* @return newly created user
*/
public User createUser(String login, @Nullable String password, Set<String> groups, String firstName, String lastName, String email, String registrationNumber, String imageUrl,
String langKey) {
User newUser = new User();
// Set random password for null passwords
if (password == null) {
password = RandomUtil.generatePassword();
}
String encryptedPassword = passwordEncoder().encode(password);
// new user gets initially a generated password
newUser.setPassword(encryptedPassword);
newUser.setLogin(login);
newUser.setFirstName(firstName);
newUser.setLastName(lastName);
newUser.setGroups(groups);
newUser.setEmail(email);
newUser.setRegistrationNumber(registrationNumber);
newUser.setImageUrl(imageUrl);
newUser.setLangKey(langKey);
// new user is not active
newUser.setActivated(false);
// new user gets registration key
newUser.setActivationKey(RandomUtil.generateActivationKey());
final var authority = authorityRepository.findById(USER).get();
// needs to be mutable --> new HashSet<>(Set.of(...))
final var authorities = new HashSet<>(Set.of(authority));
newUser.setAuthorities(authorities);
userRepository.save(newUser);
clearUserCaches(newUser);
log.debug("Created user: {}", newUser);
return newUser;
}
/**
* Create user based on UserDTO. If the user management is done internally by Artemis, also create the user in the (optional) version control system
* In case user management is done externally, the users groups are configured in the external user management as well.
*
* TODO: how should we handle the case, that a new user is created that does not exist in the external user management?
*
* @param userDTO user data transfer object
* @return newly created user
*/
public User createUser(ManagedUserVM userDTO) {
User user = new User();
user.setLogin(userDTO.getLogin());
user.setFirstName(userDTO.getFirstName());
user.setLastName(userDTO.getLastName());
user.setEmail(userDTO.getEmail());
user.setImageUrl(userDTO.getImageUrl());
if (userDTO.getLangKey() == null) {
user.setLangKey(Constants.DEFAULT_LANGUAGE); // default language
}
else {
user.setLangKey(userDTO.getLangKey());
}
if (userDTO.getAuthorities() != null) {
Set<Authority> authorities = userDTO.getAuthorities().stream().map(authorityRepository::findById).filter(Optional::isPresent).map(Optional::get)
.collect(Collectors.toSet());
user.setAuthorities(authorities);
}
String encryptedPassword = passwordEncoder().encode(userDTO.getPassword() == null ? RandomUtil.generatePassword() : userDTO.getPassword());
user.setPassword(encryptedPassword);
user.setResetKey(RandomUtil.generateResetKey());
user.setResetDate(Instant.now());
user.setGroups(userDTO.getGroups());
user.setActivated(true);
userRepository.save(user);
createUserInExternalSystems(user);
artemisAuthenticationProvider.addUserToGroups(user, userDTO.getGroups());
log.debug("Created Information for User: {}", user);
return user;
}
/**
* tries to create the user in the external system, in case this is available
* @param user the user, that should be created in the external system
*/
private void createUserInExternalSystems(User user) {
// If user management is done by Artemis, we also have to create the user in the version control system
optionalVcsUserManagementService.ifPresent(vcsUserManagementService -> vcsUserManagementService.createUser(user));
}
/**
* Update basic information (first name, last name, email, language) for the current user.
*
* @param firstName first name of user
* @param lastName last name of user
* @param email email id of user
* @param langKey language key
* @param imageUrl image URL of user
*/
public void updateUser(String firstName, String lastName, String email, String langKey, String imageUrl) {
SecurityUtils.getCurrentUserLogin().flatMap(userRepository::findOneByLogin).ifPresent(user -> {
user.setFirstName(firstName);
user.setLastName(lastName);
user.setEmail(email.toLowerCase());
user.setLangKey(langKey);
user.setImageUrl(imageUrl);
this.clearUserCaches(user);
log.info("Changed Information for User: {}", user);
optionalVcsUserManagementService.ifPresent(vcsUserManagementService -> vcsUserManagementService.updateUser(user, null, null, true));
});
}
/**
* Update all information for a specific user (incl. its password), and return the modified user.
*
* @param user The user that should get updated
* @param updatedUserDTO The DTO containing the to be updated values
* @return updated user
*/
public User updateUser(User user, ManagedUserVM updatedUserDTO) {
final var oldGroups = user.getGroups();
this.clearUserCaches(user);
user.setLogin(updatedUserDTO.getLogin().toLowerCase());
user.setFirstName(updatedUserDTO.getFirstName());
user.setLastName(updatedUserDTO.getLastName());
user.setEmail(updatedUserDTO.getEmail().toLowerCase());
user.setImageUrl(updatedUserDTO.getImageUrl());
user.setActivated(updatedUserDTO.isActivated());
user.setLangKey(updatedUserDTO.getLangKey());
user.setGroups(updatedUserDTO.getGroups());
if (updatedUserDTO.getPassword() != null) {
user.setPassword(passwordEncoder().encode(updatedUserDTO.getPassword()));
}
Set<Authority> managedAuthorities = user.getAuthorities();
managedAuthorities.clear();
updatedUserDTO.getAuthorities().stream().map(authorityRepository::findById).filter(Optional::isPresent).map(Optional::get).forEach(managedAuthorities::add);
user = userRepository.save(user);
this.clearUserCaches(user);
updateUserInConnectorsAndAuthProvider(user, oldGroups, true);
log.debug("Changed Information for User: {}", user);
return user;
}
/**
* Updates the user (optionally also synchronizes its password) and its groups in the connected version control system (e.g. GitLab if available).
* Also updates the user groups in the used authentication provider (like {@link JiraAuthenticationProvider}.
*
* @param user The updated user in Artemis
* @param oldGroups The old groups of the user before the update
* @param shouldSynchronizePassword whether the password in the optional vcs user management service should be synchronized (only relevant if internal user management is used)
*/
private void updateUserInConnectorsAndAuthProvider(User user, Set<String> oldGroups, boolean shouldSynchronizePassword) {
final var updatedGroups = user.getGroups();
final var removedGroups = oldGroups.stream().filter(group -> !updatedGroups.contains(group)).collect(Collectors.toSet());
final var addedGroups = updatedGroups.stream().filter(group -> !oldGroups.contains(group)).collect(Collectors.toSet());
optionalVcsUserManagementService.ifPresent(vcsUserManagementService -> vcsUserManagementService.updateUser(user, removedGroups, addedGroups, shouldSynchronizePassword));
removedGroups.forEach(group -> artemisAuthenticationProvider.removeUserFromGroup(user, group));
addedGroups.forEach(group -> artemisAuthenticationProvider.addUserToGroup(user, group));
}
/**
* Delete user based on login string
* @param login user login string
*/
public void deleteUser(String login) {
// Delete the user in the connected VCS if necessary (e.g. for GitLab)
optionalVcsUserManagementService.ifPresent(userManagementService -> userManagementService.deleteUser(login));
// Delete the user in the local Artemis database
userRepository.findOneByLogin(login).ifPresent(user -> {
userRepository.delete(user);
this.clearUserCaches(user);
log.debug("Deleted User: {}", user);
});
}
/**
* Change password of current user
* @param currentClearTextPassword cleartext password
* @param newPassword new password string
*/
public void changePassword(String currentClearTextPassword, String newPassword) {
SecurityUtils.getCurrentUserLogin().flatMap(userRepository::findOneByLogin).ifPresent(user -> {
String currentEncryptedPassword = user.getPassword();
if (!passwordEncoder().matches(currentClearTextPassword, currentEncryptedPassword)) {
throw new InvalidPasswordException();
}
String encryptedPassword = passwordEncoder().encode(newPassword);
user.setPassword(encryptedPassword);
userRepository.save(user);
optionalVcsUserManagementService.ifPresent(vcsUserManagementService -> vcsUserManagementService.updateUser(user, null, null, true));
this.clearUserCaches(user);
log.debug("Changed password for User: {}", user);
});
}
/**
* Get decrypted password for the current user
* @return decrypted password or empty string
*/
public String decryptPasswordOfCurrentUser() {
User user = userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin().get()).get();
try {
return encryptor().decrypt(user.getPassword());
}
catch (Exception e) {
return "";
}
}
/**
* Get decrypted password for given user
* @param user the user
* @return decrypted password or empty string
*/
public String decryptPassword(User user) {
return encryptor().decrypt(user.getPassword());
}
/**
* Get decrypted password for given user login
* @param login of a user
* @return decrypted password or empty string
*/
public Optional<String> decryptPasswordByLogin(String login) {
return userRepository.findOneByLogin(login).map(user -> encryptor().decrypt(user.getPassword()));
}
/**
* Get all managed users
* @param userSearch used to find users
* @return all users
*/
public Page<UserDTO> getAllManagedUsers(PageableSearchDTO<String> userSearch) {
final var searchTerm = userSearch.getSearchTerm();
var sorting = Sort.by(userSearch.getSortedColumn());
sorting = userSearch.getSortingOrder() == SortingOrder.ASCENDING ? sorting.ascending() : sorting.descending();
final var sorted = PageRequest.of(userSearch.getPage(), userSearch.getPageSize(), sorting);
return userRepository.searchByLoginOrNameWithGroups(searchTerm, sorted).map(UserDTO::new);
}
/**
* Search for all users by login or name
* @param pageable Pageable configuring paginated access (e.g. to limit the number of records returned)
* @param loginOrName Search query that will be searched for in login and name field
* @return all users matching search criteria
*/
public Page<UserDTO> searchAllUsersByLoginOrName(Pageable pageable, String loginOrName) {
Page<User> users = userRepository.searchAllByLoginOrName(pageable, loginOrName);
users.forEach(user -> user.setVisibleRegistrationNumber(user.getRegistrationNumber()));
return users.map(UserDTO::new);
}
/**
* Get user with groups by given login string
* @param login user login string
* @return existing user with given login string or null
*/
public Optional<User> getUserWithGroupsByLogin(String login) {
return userRepository.findOneWithGroupsByLogin(login);
}
/**
* Get user with groups and authorities by given login string
* @param login user login string
* @return existing user with given login string or null
*/
public Optional<User> getUserWithGroupsAndAuthoritiesByLogin(String login) {
return userRepository.findOneWithGroupsAndAuthoritiesByLogin(login);
}
/**
* Get user with authorities by given login string
* @param login user login string
* @return existing user with given login string or null
*/
public Optional<User> getUserWithAuthoritiesByLogin(String login) {
return userRepository.findOneWithGroupsAndAuthoritiesByLogin(login);
}
/**
* Get current user for login string
* @param login user login string
* @return existing user for the given login string or null
*/
public Optional<User> getUserByLogin(String login) {
return userRepository.findOneByLogin(login);
}
/**
* @return existing user object by current user login
*/
@NotNull
public User getUser() {
String currentUserLogin = getCurrentUserLogin();
Optional<User> user = userRepository.findOneByLogin(currentUserLogin);
return unwrapOptionalUser(user, currentUserLogin);
}
/**
* Get user with user groups and authorities of currently logged in user
* @return currently logged in user
*/
@NotNull
public User getUserWithGroupsAndAuthorities() {
String currentUserLogin = getCurrentUserLogin();
Optional<User> user = userRepository.findOneWithGroupsAndAuthoritiesByLogin(currentUserLogin);
return unwrapOptionalUser(user, currentUserLogin);
}
/**
* Get user with user groups, authorities and guided tour settings of currently logged in user
* Note: this method should only be invoked if the guided tour settings are really needed
* @return currently logged in user
*/
@NotNull
public User getUserWithGroupsAuthoritiesAndGuidedTourSettings() {
String currentUserLogin = getCurrentUserLogin();
Optional<User> user = userRepository.findOneWithGroupsAuthoritiesAndGuidedTourSettingsByLogin(currentUserLogin);
return unwrapOptionalUser(user, currentUserLogin);
}
@NotNull
private User unwrapOptionalUser(Optional<User> optionalUser, String currentUserLogin) {
if (optionalUser.isPresent()) {
return optionalUser.get();
}
throw new EntityNotFoundException("No user found with login: " + currentUserLogin);
}
private String getCurrentUserLogin() {
Optional<String> currentUserLogin = SecurityUtils.getCurrentUserLogin();
if (currentUserLogin.isPresent()) {
return currentUserLogin.get();
}
throw new EntityNotFoundException("ERROR: No current user login found!");
}
/**
* Get user with user groups and authorities with the username (i.e. user.getLogin() or principal.getName())
* @param username the username of the user who should be retrieved from the database
* @return the user that belongs to the given principal with eagerly loaded groups and authorities
*/
public User getUserWithGroupsAndAuthorities(@NotNull String username) {
Optional<User> user = userRepository.findOneWithGroupsAndAuthoritiesByLogin(username);
return unwrapOptionalUser(user, username);
}
/**
* @return a list of all the authorities
*/
public List<String> getAuthorities() {
return authorityRepository.findAll().stream().map(Authority::getName).collect(Collectors.toList());
}
private void clearUserCaches(User user) {
cacheManager.getCache(User.class.getName()).evict(user.getLogin());
}
/**
* Update user notification read date for current user
* @return currently logged in user
*/
public User updateUserNotificationReadDate() {
User loggedInUser = getUserWithGroupsAndAuthorities();
userRepository.updateUserNotificationReadDate(loggedInUser.getId(), ZonedDateTime.now());
return loggedInUser;
}
/**
* Get students by given course
* @param course object
* @return list of students for given course
*/
public List<User> getStudents(Course course) {
return findAllUsersInGroup(course.getStudentGroupName());
}
/**
* Get tutors by given course
* @param course object
* @return list of tutors for given course
*/
public List<User> getTutors(Course course) {
return findAllUsersInGroup(course.getTeachingAssistantGroupName());
}
/**
* Get all instructors for a given course
*
* @param course The course for which to fetch all instructors
* @return A list of all users that have the role of instructor in the course
*/
public List<User> getInstructors(Course course) {
return findAllUsersInGroup(course.getInstructorGroupName());
}
/**
* Get all users in a given group
*
* @param groupName The group name for which to return all members
* @return A list of all users that belong to the group
*/
public List<User> findAllUsersInGroup(String groupName) {
return userRepository.findAllInGroup(groupName);
}
/**
* Get all users in a given team
*
* @param course The course to which the team belongs (acts as a scope for the team short name)
* @param teamShortName The short name of the team for which to get all students
* @return A set of all users that belong to the team
*/
public Set<User> findAllUsersInTeam(Course course, String teamShortName) {
return userRepository.findAllInTeam(course.getId(), teamShortName);
}
/**
* Update the guided tour settings of the currently logged in user
* @param guidedTourSettings the updated set of guided tour settings
* @return the updated user object with the changed guided tour settings
*/
public User updateGuidedTourSettings(Set<GuidedTourSetting> guidedTourSettings) {
User loggedInUser = getUserWithGroupsAuthoritiesAndGuidedTourSettings();
loggedInUser.getGuidedTourSettings().clear();
for (GuidedTourSetting setting : guidedTourSettings) {
loggedInUser.addGuidedTourSetting(setting);
guidedTourSettingsRepository.save(setting);
}
return userRepository.save(loggedInUser);
}
/**
* Delete a given guided tour setting of the currently logged in user (e.g. when the user restarts a guided tutorial)
* @param guidedTourSettingsKey the key of the guided tour setting that should be deleted
* @return the updated user object without the deleted guided tour setting
*/
public User deleteGuidedTourSetting(String guidedTourSettingsKey) {
User loggedInUser = getUserWithGroupsAuthoritiesAndGuidedTourSettings();
Set<GuidedTourSetting> guidedTourSettings = loggedInUser.getGuidedTourSettings();
for (GuidedTourSetting setting : guidedTourSettings) {
if (setting.getGuidedTourKey().equals(guidedTourSettingsKey)) {
loggedInUser.removeGuidedTourSetting(setting);
break;
}
}
return userRepository.save(loggedInUser);
}
/**
* Finds all users that are part of the specified group, but are not contained in the collection of excluded users
*
* @param groupName The group by which all users should get filtered
* @param excludedUsers The users that should get ignored/excluded
* @return A list of filtered users
*/
public List<User> findAllUserInGroupAndNotIn(String groupName, Collection<User> excludedUsers) {
// For an empty list, we have to use another query, because Hibernate builds an invalid query with empty lists
if (!excludedUsers.isEmpty()) {
return userRepository.findAllInGroupContainingAndNotIn(groupName, new HashSet<>(excludedUsers));
}
return userRepository.findAllInGroup(groupName);
}
/**
* removes the passed group from all users in the Artemis database, e.g. when the group was deleted
*
* @param groupName the group that should be removed from all existing users
*/
public void removeGroupFromUsers(String groupName) {
log.info("Remove group " + groupName + " from users");
List<User> users = userRepository.findAllInGroup(groupName);
log.info("Found " + users.size() + " users with group " + groupName);
for (User user : users) {
user.getGroups().remove(groupName);
}
userRepository.saveAll(users);
}
public Long countUserInGroup(String groupName) {
return userRepository.countByGroupsIsContaining(groupName);
}
/**
* add the user to the specified group and update in VCS (like GitLab) if used
* @param user the user
* @param group the group
*/
public void addUserToGroup(User user, String group) {
if (optionalVcsUserManagementService.isPresent()) {
final var oldGroups = new TreeSet<>(user.getGroups());
artemisAuthenticationProvider.addUserToGroup(user, group);
updateUserInConnectorsAndAuthProvider(user, oldGroups, true);
}
else {
artemisAuthenticationProvider.addUserToGroup(user, group);
}
}
/**
* remove the user from the specified group and update in VCS (like GitLab) if used
*
* @param user the user
* @param group the group
*/
public void removeUserFromGroup(User user, String group) {
if (optionalVcsUserManagementService.isPresent()) {
final var oldGroups = new TreeSet<>(user.getGroups());
artemisAuthenticationProvider.removeUserFromGroup(user, group);
updateUserInConnectorsAndAuthProvider(user, oldGroups, true);
}
else {
artemisAuthenticationProvider.removeUserFromGroup(user, group);
}
}
/**
*
* Builds the authorities list from the groups:
*
* 1) Admin group if the globally defined ADMIN_GROUP_NAME is available and is contained in the users groups, or if the user was an admin before
* 2) group contains configured instructor group name -> instructor role
* 3) group contains configured tutor group name -> tutor role
* 4) the user role is always given
*
* @param user a user with groups
* @return a set of authorities based on the course configuration and the given groups
*/
public Set<Authority> buildAuthorities(User user) {
Set<Authority> authorities = new HashSet<>();
Set<String> groups = user.getGroups();
if (groups == null) {
// prevent null pointer exceptions
groups = new HashSet<>();
}
// Check if the user is admin in case the admin group is defined
if (adminGroupName.isPresent() && groups.contains(adminGroupName.get())) {
authorities.add(ADMIN_AUTHORITY);
}
// Users who already have admin access, keep admin access.
if (user.getAuthorities() != null && user.getAuthorities().contains(ADMIN_AUTHORITY)) {
authorities.add(ADMIN_AUTHORITY);
}
Set<String> instructorGroups = courseRepository.findAllInstructorGroupNames();
Set<String> teachingAssistantGroups = courseRepository.findAllTeachingAssistantGroupNames();
// Check if user is an instructor in any course
if (groups.stream().anyMatch(instructorGroups::contains)) {
authorities.add(new Authority(INSTRUCTOR));
}
// Check if user is a tutor in any course
if (groups.stream().anyMatch(teachingAssistantGroups::contains)) {
authorities.add(new Authority(TEACHING_ASSISTANT));
}
authorities.add(new Authority(USER));
return authorities;
}
}
|
package module0648.a;
import java.util.zip.*;
import javax.annotation.processing.*;
import javax.lang.model.*;
/**
* Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut
* labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum.
* Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.
*
* @see java.awt.datatransfer.DataFlavor
* @see java.beans.beancontext.BeanContext
* @see java.io.File
*/
@SuppressWarnings("all")
public interface IFoo1<R> extends module0648.a.IFoo0<R> {
java.rmi.Remote f0 = null;
java.nio.file.FileStore f1 = null;
java.sql.Array f2 = null;
String getName();
void setName(String s);
R get();
void set(R e);
}
|
// This is a generated file. Not intended for manual editing.
package ru.adelf.idea.dotenv.psi;
import java.util.List;
import org.jetbrains.annotations.*;
import com.intellij.psi.PsiElement;
public interface DotEnvKey extends PsiElement {
}
|
/* file: ResultId.java */
/*******************************************************************************
* Copyright 2014-2017 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
/**
* @ingroup iterative_solver
* @{
*/
package com.intel.daal.algorithms.optimization_solver.iterative_solver;
/**
* <a name="DAAL-CLASS-ALGORITHMS__OPTIMIZATION_SOLVER__ITERATIVE_SOLVER__RESULTID"></a>
* @brief Available result identifiers for the iterative solver algorithm
*/
public final class ResultId {
private int _value;
/**
* Constructs the result object identifier using the provided value
* @param value Value corresponding to the result object identifier
*/
public ResultId(int value) {
_value = value;
}
/**
* Returns the value corresponding to the result object identifier
* @return Value corresponding to the result object identifier
*/
public int getValue() {
return _value;
}
private static final int minimumId = 0;
private static final int nIterationsId = 1;
public static final ResultId minimum = new ResultId(minimumId); /*!< Table to store the obtained minimum value */
public static final ResultId nIterations = new ResultId(nIterationsId); /*!< Table containing the number of executed iterations */
}
/** @} */
|
package com.epam.brest.model;
public class Epmloyee {
private Integer employeeId;
private String firstName;
private String lastName;
private String email;
private Double salary;
public Double getSalary() {
return salary;
}
public void setSalary(Double salary) {
this.salary = salary;
}
private Integer departmentId;
public Integer getEmployeeId() {
return employeeId;
}
public void setEmployeeId(Integer employeeId) {
this.employeeId = employeeId;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public Integer getDepartmentId() {
return departmentId;
}
public void setDepartmentId(Integer departmentId) {
this.departmentId = departmentId;
}
}
|
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.tracing;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
/**
* This is a helper class that handles {@link android.os.Trace} functionality in API >= 18.
* <p>
* This class is being defined separately to avoid class verification failures.
* For more information read https://chromium.googlesource
* .com/chromium/src/build/+/refs/heads/master/android/docs/class_verification_failures
* .md#understanding-the-reason-for-the-failure
*/
@RequiresApi(18)
final class TraceApi18Impl {
private TraceApi18Impl() {
// Does nothing
}
/**
* Writes a trace message to indicate that a given section of code has begun. This call must
* be followed by a corresponding call to {@link #endSection()} on the same thread.
*
* <p class="note"> At this time the vertical bar character '|', newline character '\n', and
* null character '\0' are used internally by the tracing mechanism. If sectionName contains
* these characters they will be replaced with a space character in the trace.
*
* @param label The name of the code section to appear in the trace. This may be at
* most 127 Unicode code units long.
*/
public static void beginSection(@NonNull String label) {
android.os.Trace.beginSection(label);
}
/**
* Writes a trace message to indicate that a given section of code has ended. This call must
* be preceded by a corresponding call to {@link #beginSection(String)}. Calling this method
* will mark the end of the most recently begun section of code, so care must be taken to
* ensure that beginSection / endSection pairs are properly nested and called from the same
* thread.
*/
public static void endSection() {
android.os.Trace.endSection();
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.rewrite;
import com.facebook.presto.Session;
import com.facebook.presto.execution.QueryPreparer;
import com.facebook.presto.execution.QueryPreparer.PreparedQuery;
import com.facebook.presto.execution.warnings.WarningCollector;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.security.AccessControl;
import com.facebook.presto.sql.analyzer.QueryExplainer;
import com.facebook.presto.sql.analyzer.SemanticException;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.tree.AstVisitor;
import com.facebook.presto.sql.tree.Explain;
import com.facebook.presto.sql.tree.ExplainFormat;
import com.facebook.presto.sql.tree.ExplainOption;
import com.facebook.presto.sql.tree.ExplainType;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.Node;
import com.facebook.presto.sql.tree.Statement;
import java.util.List;
import java.util.Optional;
import static com.facebook.presto.sql.QueryUtil.singleValueQuery;
import static com.facebook.presto.sql.tree.ExplainFormat.Type.JSON;
import static com.facebook.presto.sql.tree.ExplainFormat.Type.TEXT;
import static com.facebook.presto.sql.tree.ExplainType.Type.IO;
import static com.facebook.presto.sql.tree.ExplainType.Type.LOGICAL;
import static com.facebook.presto.sql.tree.ExplainType.Type.VALIDATE;
import static java.util.Objects.requireNonNull;
final class ExplainRewrite
implements StatementRewrite.Rewrite
{
@Override
public Statement rewrite(
Session session,
Metadata metadata,
SqlParser parser,
Optional<QueryExplainer> queryExplainer,
Statement node,
List<Expression> parameters,
AccessControl accessControl,
WarningCollector warningCollector)
{
return (Statement) new Visitor(session, parser, queryExplainer, warningCollector).process(node, null);
}
private static final class Visitor
extends AstVisitor<Node, Void>
{
private final Session session;
private final QueryPreparer queryPreparer;
private final Optional<QueryExplainer> queryExplainer;
private final WarningCollector warningCollector;
public Visitor(
Session session,
SqlParser parser,
Optional<QueryExplainer> queryExplainer,
WarningCollector warningCollector)
{
this.session = requireNonNull(session, "session is null");
this.queryPreparer = new QueryPreparer(requireNonNull(parser, "queryPreparer is null"));
this.queryExplainer = requireNonNull(queryExplainer, "queryExplainer is null");
this.warningCollector = requireNonNull(warningCollector, "warningCollector is null");
}
@Override
protected Node visitExplain(Explain node, Void context)
throws SemanticException
{
if (node.isAnalyze()) {
Statement statement = (Statement) process(node.getStatement(), context);
return new Explain(statement, node.isAnalyze(), node.isVerbose(), node.getOptions());
}
ExplainType.Type planType = LOGICAL;
ExplainFormat.Type planFormat = TEXT;
List<ExplainOption> options = node.getOptions();
for (ExplainOption option : options) {
if (option instanceof ExplainType) {
planType = ((ExplainType) option).getType();
// Use JSON as the default format for EXPLAIN (TYPE IO).
if (planType == IO) {
planFormat = JSON;
}
break;
}
}
for (ExplainOption option : options) {
if (option instanceof ExplainFormat) {
planFormat = ((ExplainFormat) option).getType();
break;
}
}
return getQueryPlan(node, planType, planFormat);
}
private Node getQueryPlan(Explain node, ExplainType.Type planType, ExplainFormat.Type planFormat)
throws IllegalArgumentException
{
PreparedQuery preparedQuery = queryPreparer.prepareQuery(session, node.getStatement(), warningCollector);
if (planType == VALIDATE) {
queryExplainer.get().analyze(session, preparedQuery.getStatement(), preparedQuery.getParameters(), warningCollector);
return singleValueQuery("Valid", true);
}
String plan;
switch (planFormat) {
case GRAPHVIZ:
plan = queryExplainer.get().getGraphvizPlan(session, preparedQuery.getStatement(), planType, preparedQuery.getParameters(), warningCollector);
break;
case JSON:
plan = queryExplainer.get().getJsonPlan(session, preparedQuery.getStatement(), planType, preparedQuery.getParameters(), warningCollector);
break;
case TEXT:
plan = queryExplainer.get().getPlan(session, preparedQuery.getStatement(), planType, preparedQuery.getParameters(), node.isVerbose(), warningCollector);
break;
default:
throw new IllegalArgumentException("Invalid Explain Format: " + planFormat.toString());
}
return singleValueQuery("Query Plan", plan);
}
@Override
protected Node visitNode(Node node, Void context)
{
return node;
}
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.integration.tests;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.ImmutableList;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.avro.file.DataFileStream;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.commons.io.FileUtils;
import org.apache.http.Header;
import org.apache.http.HttpStatus;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicHeader;
import org.apache.http.message.BasicNameValuePair;
import org.apache.pinot.broker.broker.helix.HelixBrokerStarter;
import org.apache.pinot.common.exception.HttpErrorStatusException;
import org.apache.pinot.common.utils.FileUploadDownloadClient;
import org.apache.pinot.common.utils.http.HttpClient;
import org.apache.pinot.controller.helix.ControllerTest;
import org.apache.pinot.minion.MinionStarter;
import org.apache.pinot.plugin.inputformat.avro.AvroRecordExtractor;
import org.apache.pinot.plugin.inputformat.avro.AvroUtils;
import org.apache.pinot.server.starter.helix.DefaultHelixStarterServerConfig;
import org.apache.pinot.server.starter.helix.HelixServerStarter;
import org.apache.pinot.spi.config.table.TableType;
import org.apache.pinot.spi.data.readers.GenericRow;
import org.apache.pinot.spi.data.readers.RecordExtractor;
import org.apache.pinot.spi.env.PinotConfiguration;
import org.apache.pinot.spi.stream.StreamMessageDecoder;
import org.apache.pinot.spi.utils.CommonConstants.Broker;
import org.apache.pinot.spi.utils.CommonConstants.Helix;
import org.apache.pinot.spi.utils.CommonConstants.Minion;
import org.apache.pinot.spi.utils.CommonConstants.Server;
import org.apache.pinot.spi.utils.JsonUtils;
import org.apache.pinot.spi.utils.NetUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
/**
* Base class for integration tests that involve a complete Pinot cluster.
*/
public abstract class ClusterTest extends ControllerTest {
private static final Logger LOGGER = LoggerFactory.getLogger(ClusterTest.class);
protected static final int DEFAULT_BROKER_PORT = 18099;
protected static final Random RANDOM = new Random(System.currentTimeMillis());
protected String _brokerBaseApiUrl;
private List<HelixBrokerStarter> _brokerStarters;
private List<HelixServerStarter> _serverStarters;
private MinionStarter _minionStarter;
private List<Integer> _brokerPorts;
protected PinotConfiguration getDefaultBrokerConfiguration() {
return new PinotConfiguration();
}
protected void startBroker()
throws Exception {
startBrokers(1);
}
protected void startBroker(int port, String zkStr)
throws Exception {
startBrokers(1, port, zkStr, Collections.emptyMap());
}
protected void startBrokers(int numBrokers)
throws Exception {
startBrokers(numBrokers, DEFAULT_BROKER_PORT, getZkUrl(), Collections.emptyMap());
}
protected void startBrokers(int numBrokers, int basePort, String zkStr)
throws Exception {
startBrokers(numBrokers, basePort, zkStr, Collections.emptyMap());
}
protected void startBrokers(int numBrokers, int basePort, String zkStr, Map<String, Object> extraProperties)
throws Exception {
_brokerStarters = new ArrayList<>(numBrokers);
_brokerPorts = new ArrayList<>();
for (int i = 0; i < numBrokers; i++) {
Map<String, Object> properties = getDefaultBrokerConfiguration().toMap();
properties.put(Helix.CONFIG_OF_CLUSTER_NAME, getHelixClusterName());
properties.put(Helix.CONFIG_OF_ZOOKEEPR_SERVER, zkStr);
properties.put(Broker.CONFIG_OF_BROKER_TIMEOUT_MS, 60 * 1000L);
int port = NetUtils.findOpenPort(basePort + i);
_brokerPorts.add(port);
properties.put(Helix.KEY_OF_BROKER_QUERY_PORT, port);
properties.put(Broker.CONFIG_OF_DELAY_SHUTDOWN_TIME_MS, 0);
properties.putAll(extraProperties);
PinotConfiguration configuration = new PinotConfiguration(properties);
overrideBrokerConf(configuration);
HelixBrokerStarter brokerStarter = new HelixBrokerStarter();
brokerStarter.init(configuration);
brokerStarter.start();
_brokerStarters.add(brokerStarter);
}
_brokerBaseApiUrl = "http://localhost:" + _brokerPorts.get(0);
}
protected void startBrokerHttps()
throws Exception {
_brokerStarters = new ArrayList<>();
_brokerPorts = new ArrayList<>();
Map<String, Object> properties = getDefaultBrokerConfiguration().toMap();
properties.put(Broker.CONFIG_OF_BROKER_TIMEOUT_MS, 60 * 1000L);
properties.put(Broker.CONFIG_OF_DELAY_SHUTDOWN_TIME_MS, 0);
properties.put(Helix.CONFIG_OF_CLUSTER_NAME, getHelixClusterName());
properties.put(Helix.CONFIG_OF_ZOOKEEPR_SERVER, getZkUrl());
properties.put(Broker.CONFIG_OF_BROKER_HOSTNAME, LOCAL_HOST);
PinotConfiguration configuration = new PinotConfiguration(properties);
overrideBrokerConf(configuration);
HelixBrokerStarter brokerStarter = new HelixBrokerStarter();
brokerStarter.init(configuration);
brokerStarter.start();
_brokerStarters.add(brokerStarter);
// TLS configs require hard-coding
_brokerPorts.add(DEFAULT_BROKER_PORT);
_brokerBaseApiUrl = "https://localhost:" + _brokerPorts.get(0);
}
protected int getRandomBrokerPort() {
return _brokerPorts.get(RANDOM.nextInt(_brokerPorts.size()));
}
protected int getBrokerPort(int index) {
return _brokerPorts.get(index);
}
protected List<Integer> getBrokerPorts() {
return ImmutableList.copyOf(_brokerPorts);
}
protected PinotConfiguration getDefaultServerConfiguration() {
PinotConfiguration configuration = DefaultHelixStarterServerConfig.loadDefaultServerConf();
configuration.setProperty(Helix.KEY_OF_SERVER_NETTY_HOST, LOCAL_HOST);
configuration.setProperty(Server.CONFIG_OF_SEGMENT_FORMAT_VERSION, "v3");
configuration.setProperty(Server.CONFIG_OF_SHUTDOWN_ENABLE_QUERY_CHECK, false);
return configuration;
}
protected void startServer() {
startServers(1);
}
protected void startServer(PinotConfiguration configuration) {
startServers(1, configuration);
}
protected void startServers(int numServers) {
startServers(numServers, getDefaultServerConfiguration());
}
protected void startServers(int numServers, PinotConfiguration configuration) {
startServers(numServers, configuration, Server.DEFAULT_ADMIN_API_PORT, Helix.DEFAULT_SERVER_NETTY_PORT, getZkUrl());
}
protected void startServers(int numServers, int baseAdminApiPort, int baseNettyPort, String zkStr) {
startServers(numServers, getDefaultServerConfiguration(), baseAdminApiPort, baseNettyPort, zkStr);
}
protected void startServers(int numServers, PinotConfiguration configuration, int baseAdminApiPort, int baseNettyPort,
String zkStr) {
startServers(numServers, configuration, baseAdminApiPort, baseNettyPort, Server.DEFAULT_GRPC_PORT, zkStr);
}
protected void startServers(int numServers, PinotConfiguration configuration, int baseAdminApiPort, int baseNettyPort,
int baseGrpcPort, String zkStr) {
FileUtils.deleteQuietly(new File(Server.DEFAULT_INSTANCE_BASE_DIR));
_serverStarters = new ArrayList<>(numServers);
overrideServerConf(configuration);
try {
for (int i = 0; i < numServers; i++) {
configuration.setProperty(Helix.CONFIG_OF_CLUSTER_NAME, getHelixClusterName());
configuration.setProperty(Helix.CONFIG_OF_ZOOKEEPR_SERVER, zkStr);
configuration.setProperty(Server.CONFIG_OF_INSTANCE_DATA_DIR, Server.DEFAULT_INSTANCE_DATA_DIR + "-" + i);
configuration.setProperty(Server.CONFIG_OF_INSTANCE_SEGMENT_TAR_DIR,
Server.DEFAULT_INSTANCE_SEGMENT_TAR_DIR + "-" + i);
configuration.setProperty(Server.CONFIG_OF_ADMIN_API_PORT, baseAdminApiPort - i);
configuration.setProperty(Server.CONFIG_OF_NETTY_PORT, baseNettyPort + i);
if (configuration.getProperty(Server.CONFIG_OF_ENABLE_GRPC_SERVER, false)) {
configuration.setProperty(Server.CONFIG_OF_GRPC_PORT, baseGrpcPort + i);
}
configuration.setProperty(Server.CONFIG_OF_NETTY_PORT, baseNettyPort + i);
// Thread time measurement is disabled by default, enable it in integration tests.
// TODO: this can be removed when we eventually enable thread time measurement by default.
configuration.setProperty(Server.CONFIG_OF_ENABLE_THREAD_CPU_TIME_MEASUREMENT, true);
HelixServerStarter helixServerStarter = new HelixServerStarter();
helixServerStarter.init(configuration);
helixServerStarter.start();
_serverStarters.add(helixServerStarter);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
protected void startServerHttps() {
FileUtils.deleteQuietly(new File(Server.DEFAULT_INSTANCE_BASE_DIR));
_serverStarters = new ArrayList<>();
Map<String, Object> properties = getDefaultServerConfiguration().toMap();
properties.put(Helix.CONFIG_OF_CLUSTER_NAME, getHelixClusterName());
properties.put(Helix.CONFIG_OF_ZOOKEEPR_SERVER, getZkUrl());
PinotConfiguration configuration = new PinotConfiguration(properties);
overrideServerConf(configuration);
try {
HelixServerStarter helixServerStarter = new HelixServerStarter();
helixServerStarter.init(configuration);
_serverStarters.add(helixServerStarter);
helixServerStarter.start();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
protected PinotConfiguration getDefaultMinionConfiguration() {
return new PinotConfiguration();
}
// NOTE: We don't allow multiple Minion instances in the same JVM because Minion uses singleton class MinionContext
// to manage the instance level configs
protected void startMinion() {
FileUtils.deleteQuietly(new File(Minion.DEFAULT_INSTANCE_BASE_DIR));
try {
PinotConfiguration minionConf = getDefaultMinionConfiguration();
minionConf.setProperty(Helix.CONFIG_OF_CLUSTER_NAME, getHelixClusterName());
minionConf.setProperty(Helix.CONFIG_OF_ZOOKEEPR_SERVER, getZkUrl());
_minionStarter = new MinionStarter();
_minionStarter.init(minionConf);
_minionStarter.start();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
protected void overrideServerConf(PinotConfiguration configuration) {
// Do nothing, to be overridden by tests if they need something specific
}
protected void overrideBrokerConf(PinotConfiguration configuration) {
// Do nothing, to be overridden by tests if they need something specific
}
protected void stopBroker() {
assertNotNull(_brokerStarters, "Brokers are not started");
for (HelixBrokerStarter brokerStarter : _brokerStarters) {
try {
brokerStarter.stop();
} catch (Exception e) {
LOGGER.error("Encountered exception while stopping broker {}", e.getMessage());
}
}
_brokerStarters = null;
}
protected void stopServer() {
assertNotNull(_serverStarters, "Servers are not started");
for (HelixServerStarter helixServerStarter : _serverStarters) {
try {
helixServerStarter.stop();
} catch (Exception e) {
LOGGER.error("Encountered exception while stopping server {}", e.getMessage());
}
}
FileUtils.deleteQuietly(new File(Server.DEFAULT_INSTANCE_BASE_DIR));
_serverStarters = null;
}
protected void restartServers(int numServers) {
assertNotNull(_serverStarters, "Servers are not started");
for (HelixServerStarter helixServerStarter : _serverStarters) {
try {
helixServerStarter.stop();
} catch (Exception e) {
LOGGER.error("Encountered exception while stopping server {}", e.getMessage());
}
}
_serverStarters = new ArrayList<>(numServers);
String zkStr = getZkUrl();
int baseAdminApiPort = Server.DEFAULT_ADMIN_API_PORT;
int baseNettyPort = Helix.DEFAULT_SERVER_NETTY_PORT;
int baseGrpcPort = Server.DEFAULT_GRPC_PORT;
PinotConfiguration configuration = getDefaultServerConfiguration();
overrideServerConf(configuration);
try {
for (int i = 0; i < numServers; i++) {
configuration.setProperty(Helix.CONFIG_OF_CLUSTER_NAME, getHelixClusterName());
configuration.setProperty(Helix.CONFIG_OF_ZOOKEEPR_SERVER, zkStr);
configuration.setProperty(Server.CONFIG_OF_INSTANCE_DATA_DIR, Server.DEFAULT_INSTANCE_DATA_DIR + "-" + i);
configuration.setProperty(Server.CONFIG_OF_INSTANCE_SEGMENT_TAR_DIR,
Server.DEFAULT_INSTANCE_SEGMENT_TAR_DIR + "-" + i);
configuration.setProperty(Server.CONFIG_OF_ADMIN_API_PORT, baseAdminApiPort - i);
configuration.setProperty(Server.CONFIG_OF_NETTY_PORT, baseNettyPort + i);
if (configuration.getProperty(Server.CONFIG_OF_ENABLE_GRPC_SERVER, false)) {
configuration.setProperty(Server.CONFIG_OF_GRPC_PORT, baseGrpcPort + i);
}
configuration.setProperty(Server.CONFIG_OF_NETTY_PORT, baseNettyPort + i);
// Thread time measurement is disabled by default, enable it in integration tests.
// TODO: this can be removed when we eventually enable thread time measurement by default.
configuration.setProperty(Server.CONFIG_OF_ENABLE_THREAD_CPU_TIME_MEASUREMENT, true);
HelixServerStarter helixServerStarter = new HelixServerStarter();
helixServerStarter.init(configuration);
helixServerStarter.start();
_serverStarters.add(helixServerStarter);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
protected void stopMinion() {
assertNotNull(_minionStarter, "Minion is not started");
try {
_minionStarter.stop();
} catch (Exception e) {
LOGGER.error("Encountered exception while stopping minion {}", e.getMessage());
}
FileUtils.deleteQuietly(new File(Minion.DEFAULT_INSTANCE_BASE_DIR));
_minionStarter = null;
}
/**
* Upload all segments inside the given directory to the cluster.
*
* @param tarDir Segment directory
*/
protected void uploadSegments(String tableName, File tarDir)
throws Exception {
File[] segmentTarFiles = tarDir.listFiles();
assertNotNull(segmentTarFiles);
int numSegments = segmentTarFiles.length;
assertTrue(numSegments > 0);
URI uploadSegmentHttpURI = FileUploadDownloadClient.getUploadSegmentHttpURI(LOCAL_HOST, _controllerPort);
try (FileUploadDownloadClient fileUploadDownloadClient = new FileUploadDownloadClient()) {
if (numSegments == 1) {
File segmentTarFile = segmentTarFiles[0];
if (System.currentTimeMillis() % 2 == 0) {
assertEquals(
fileUploadDownloadClient.uploadSegment(uploadSegmentHttpURI, segmentTarFile.getName(), segmentTarFile,
tableName).getStatusCode(), HttpStatus.SC_OK);
} else {
assertEquals(
uploadSegmentWithOnlyMetadata(tableName, uploadSegmentHttpURI, fileUploadDownloadClient, segmentTarFile),
HttpStatus.SC_OK);
}
} else {
// Upload all segments in parallel
ExecutorService executorService = Executors.newFixedThreadPool(numSegments);
List<Future<Integer>> futures = new ArrayList<>(numSegments);
for (File segmentTarFile : segmentTarFiles) {
futures.add(executorService.submit(() -> {
if (System.currentTimeMillis() % 2 == 0) {
return fileUploadDownloadClient.uploadSegment(uploadSegmentHttpURI, segmentTarFile.getName(),
segmentTarFile, tableName).getStatusCode();
} else {
return uploadSegmentWithOnlyMetadata(tableName, uploadSegmentHttpURI, fileUploadDownloadClient,
segmentTarFile);
}
}));
}
executorService.shutdown();
for (Future<Integer> future : futures) {
assertEquals((int) future.get(), HttpStatus.SC_OK);
}
}
}
}
/**
* tarDirPaths contains a list of directories that contain segment files. API uploads all segments inside the given
* list of directories to the cluster.
*
* @param tarDirPaths List of directories containing segments
*/
protected void uploadSegments(String tableName, List<File> tarDirPaths, TableType tableType,
boolean enableParallelPushProtection)
throws Exception {
List<File> segmentTarFiles = new ArrayList<>();
for (File tarDir : tarDirPaths) {
Collections.addAll(segmentTarFiles, tarDir.listFiles());
}
assertNotNull(segmentTarFiles);
int numSegments = segmentTarFiles.size();
assertTrue(numSegments > 0);
URI uploadSegmentHttpURI = FileUploadDownloadClient.getUploadSegmentHttpURI(LOCAL_HOST, _controllerPort);
try (FileUploadDownloadClient fileUploadDownloadClient = new FileUploadDownloadClient()) {
if (numSegments == 1) {
File segmentTarFile = segmentTarFiles.get(0);
assertEquals(
fileUploadDownloadClient.uploadSegment(uploadSegmentHttpURI, segmentTarFile.getName(), segmentTarFile,
tableName, tableType.OFFLINE, enableParallelPushProtection, true).getStatusCode(), HttpStatus.SC_OK);
} else {
// Upload all segments in parallel
ExecutorService executorService = Executors.newFixedThreadPool(numSegments);
List<Future<Integer>> futures = new ArrayList<>(numSegments);
for (File segmentTarFile : segmentTarFiles) {
futures.add(executorService.submit(() -> {
return fileUploadDownloadClient.uploadSegment(uploadSegmentHttpURI, segmentTarFile.getName(),
segmentTarFile, tableName, tableType.OFFLINE, enableParallelPushProtection, true).getStatusCode();
}));
}
executorService.shutdown();
for (Future<Integer> future : futures) {
assertEquals((int) future.get(), HttpStatus.SC_OK);
}
}
}
}
private int uploadSegmentWithOnlyMetadata(String tableName, URI uploadSegmentHttpURI,
FileUploadDownloadClient fileUploadDownloadClient, File segmentTarFile)
throws IOException, HttpErrorStatusException {
List<Header> headers = ImmutableList.of(new BasicHeader(FileUploadDownloadClient.CustomHeaders.DOWNLOAD_URI,
"file://" + segmentTarFile.getParentFile().getAbsolutePath() + "/" + URLEncoder.encode(segmentTarFile.getName(),
StandardCharsets.UTF_8.toString())), new BasicHeader(FileUploadDownloadClient.CustomHeaders.UPLOAD_TYPE,
FileUploadDownloadClient.FileUploadType.METADATA.toString()));
// Add table name as a request parameter
NameValuePair tableNameValuePair =
new BasicNameValuePair(FileUploadDownloadClient.QueryParameters.TABLE_NAME, tableName);
List<NameValuePair> parameters = Arrays.asList(tableNameValuePair);
return fileUploadDownloadClient.uploadSegmentMetadata(uploadSegmentHttpURI, segmentTarFile.getName(),
segmentTarFile, headers, parameters, HttpClient.DEFAULT_SOCKET_TIMEOUT_MS).getStatusCode();
}
public static class AvroFileSchemaKafkaAvroMessageDecoder implements StreamMessageDecoder<byte[]> {
private static final Logger LOGGER = LoggerFactory.getLogger(AvroFileSchemaKafkaAvroMessageDecoder.class);
public static File _avroFile;
private org.apache.avro.Schema _avroSchema;
private RecordExtractor _recordExtractor;
private DecoderFactory _decoderFactory = new DecoderFactory();
private DatumReader<GenericData.Record> _reader;
@Override
public void init(Map<String, String> props, Set<String> fieldsToRead, String topicName)
throws Exception {
// Load Avro schema
try (DataFileStream<GenericRecord> reader = AvroUtils.getAvroReader(_avroFile)) {
_avroSchema = reader.getSchema();
}
_recordExtractor = new AvroRecordExtractor();
_recordExtractor.init(fieldsToRead, null);
_reader = new GenericDatumReader<>(_avroSchema);
}
@Override
public GenericRow decode(byte[] payload, GenericRow destination) {
return decode(payload, 0, payload.length, destination);
}
@Override
public GenericRow decode(byte[] payload, int offset, int length, GenericRow destination) {
try {
GenericData.Record avroRecord =
_reader.read(null, _decoderFactory.binaryDecoder(payload, offset, length, null));
return _recordExtractor.extract(avroRecord, destination);
} catch (Exception e) {
LOGGER.error("Caught exception", e);
throw new RuntimeException(e);
}
}
}
protected JsonNode getDebugInfo(final String uri)
throws Exception {
return JsonUtils.stringToJsonNode(sendGetRequest(_brokerBaseApiUrl + "/" + uri));
}
/**
* Queries the broker's sql query endpoint (/query/sql)
*/
protected JsonNode postQuery(String query)
throws Exception {
return postQuery(query, _brokerBaseApiUrl);
}
/**
* Queries the broker's sql query endpoint (/sql)
*/
public static JsonNode postQuery(String query, String brokerBaseApiUrl)
throws Exception {
return postQuery(query, brokerBaseApiUrl, null);
}
/**
* Queries the broker's sql query endpoint (/sql)
*/
public static JsonNode postQuery(String query, String brokerBaseApiUrl, Map<String, String> headers)
throws Exception {
ObjectNode payload = JsonUtils.newObjectNode();
payload.put("sql", query);
payload.put("queryOptions", "groupByMode=sql;responseFormat=sql");
return JsonUtils.stringToJsonNode(sendPostRequest(brokerBaseApiUrl + "/query/sql", payload.toString(), headers));
}
}
|
package com.gzsll.hupu.bean;
import com.gzsll.hupu.db.Forum;
import java.io.Serializable;
import java.util.ArrayList;
/**
* Created by sll on 2016/3/8.
*/
public class Forums implements Serializable {
public ArrayList<Forum> data;
public int weight;
public String name;
}
|
package one.oktw;
import com.google.common.net.InetAddresses;
import com.mojang.authlib.GameProfile;
import com.mojang.authlib.properties.Property;
import net.minecraft.network.PacketByteBuf;
import net.minecraft.util.Identifier;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import java.net.InetAddress;
import java.security.InvalidKeyException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
public class VelocityLib {
public static final Identifier PLAYER_INFO_CHANNEL = new Identifier("velocity", "player_info");
private static final int SUPPORTED_FORWARDING_VERSION = 1;
public static boolean checkIntegrity(final PacketByteBuf buf) {
final byte[] signature = new byte[32];
buf.readBytes(signature);
final byte[] data = new byte[buf.readableBytes()];
buf.getBytes(buf.readerIndex(), data);
try {
final Mac mac = Mac.getInstance("HmacSHA256");
mac.init(new SecretKeySpec(FabricProxy.config.getSecret().getBytes(), "HmacSHA256"));
final byte[] mySignature = mac.doFinal(data);
if (!MessageDigest.isEqual(signature, mySignature)) {
return false;
}
} catch (final InvalidKeyException | NoSuchAlgorithmException e) {
throw new AssertionError(e);
}
int version = buf.readVarInt();
if (version != SUPPORTED_FORWARDING_VERSION) {
throw new IllegalStateException("Unsupported forwarding version " + version + ", wanted " + SUPPORTED_FORWARDING_VERSION);
}
return true;
}
public static InetAddress readAddress(final PacketByteBuf buf) {
return InetAddresses.forString(buf.readString(Short.MAX_VALUE));
}
public static GameProfile createProfile(final PacketByteBuf buf) {
final GameProfile profile = new GameProfile(buf.readUuid(), buf.readString(16));
readProperties(buf, profile);
return profile;
}
private static void readProperties(final PacketByteBuf buf, final GameProfile profile) {
final int properties = buf.readVarInt();
for (int i1 = 0; i1 < properties; i1++) {
final String name = buf.readString(Short.MAX_VALUE);
final String value = buf.readString(Short.MAX_VALUE);
final String signature = buf.readBoolean() ? buf.readString(Short.MAX_VALUE) : null;
profile.getProperties().put(name, new Property(name, value, signature));
}
}
}
|
/*
* Copyright (c) 2012 - 2020 Arvato Systems GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arvatosystems.t9t.base.be.eventhandler;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Consumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.arvatosystems.t9t.base.services.ICacheInvalidationRegistry;
import de.jpaw.bonaparte.core.BonaPortable;
import de.jpaw.dp.Singleton;
/**
* This class implements a central repository for cache invalidation.
* For a given key (DTO or JPA entity simple name or PQON), a lambda can be stored which clears the local cache.
* Usually for configuration data, the whole cache is cleared, ignoring any specific entry.
* Bigger caches could choose to clear selected entries only.
*
* This strategy allows to register a single event listener for all kinds of caches
* (the map.get here is faster than launching a separate EventListener per cache).
*
*/
@Singleton
public class CacheInvalidationRegistry implements ICacheInvalidationRegistry {
private static final Logger LOGGER = LoggerFactory.getLogger(CacheInvalidationRegistry.class);
private static final ConcurrentHashMap<String, Consumer<BonaPortable>> INVALIDATORS = new ConcurrentHashMap<>(16);
@Override
public void registerInvalidator(final String dto, final Consumer<BonaPortable> invalidator) {
LOGGER.info("{} cache invalidator for {}", invalidator == null ? "Deleting" : "Registering", dto);
if (invalidator == null)
INVALIDATORS.remove(dto);
else
INVALIDATORS.put(dto, invalidator);
}
@Override
public Consumer<BonaPortable> getInvalidator(final String dto) {
return INVALIDATORS.get(dto);
}
}
|
package com.example.leiyun.coolweather.db;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
/**
* Created by LeiYun on 2016/8/1 0001.
*/
public class CoolWeatherOpenHelper extends SQLiteOpenHelper {
/**
* Province表建表语句
*/
public static final String CREATE_PROVINCE = "create table Province ("
+ "id integer primary key autoincrement, "
+ "province_name text, "
+ "province_code text)";
/**
* City表建表语句
*/
public static final String CREATE_CITY = "create table City ("
+ "id integer primary key autoincrement, "
+ "city_name text, "
+ "city_code text, "
+ "province_id integer)";
/**
* County表建表语句
*/
public static final String CREATE_COUNTY = "create table County ("
+ "id integer primary key autoincrement, "
+ "county_name text, "
+ "county_code text, "
+ "city_id integer)";
public CoolWeatherOpenHelper(Context context, String name, SQLiteDatabase.CursorFactory factory, int version) {
super(context, name, factory, version);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(CREATE_PROVINCE); // 创建Province表
db.execSQL(CREATE_CITY); // 创建City表
db.execSQL(CREATE_COUNTY); // 创建County表
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
}
}
|
package com.codeborne.selenide.webdriver;
import com.codeborne.selenide.Browser;
import com.codeborne.selenide.Config;
import org.apache.commons.lang3.math.NumberUtils;
import org.openqa.selenium.Proxy;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
class ChromeDriverFactory extends AbstractDriverFactory {
private static final Logger log = LoggerFactory.getLogger(ChromeDriverFactory.class);
@Override
WebDriver create(Config config, Proxy proxy) {
ChromeOptions options = createChromeOptions(config, proxy);
return new ChromeDriver(options);
}
@Override
boolean supports(Config config, Browser browser) {
return browser.isChrome();
}
ChromeOptions createChromeOptions(Config config, Proxy proxy) {
ChromeOptions options = new ChromeOptions();
options.setHeadless(config.headless());
if (!config.browserBinary().isEmpty()) {
log.info("Using browser binary: {}", config.browserBinary());
options.setBinary(config.browserBinary());
}
options.addArguments("--proxy-bypass-list=<-loopback>");
options.merge(createCommonCapabilities(config, proxy));
options = transferChromeOptionsFromSystemProperties(options);
log.debug("Chrome options: {}", options.toString());
return options;
}
/**
* This method only handles so-called "arguments" and "preferences"
* for ChromeOptions (there is also "Extensions" etc.)
*
* @param currentChromeOptions
* @return options updated with args & prefs parameters
*/
private ChromeOptions transferChromeOptionsFromSystemProperties(ChromeOptions currentChromeOptions) {
if (System.getProperty("chromeoptions.args") != null) {
Stream<String> params = Arrays.stream(parseCSVhandlingQuotes(System.getProperty("chromeoptions.args")));
List<String> args = params
.map(s -> s.replace("\"", ""))
.collect(Collectors.toList());
currentChromeOptions.addArguments(args);
}
if (System.getProperty("chromeoptions.prefs") != null) {
Map<String, Object> prefs = parsePreferencesFromString(System.getProperty("chromeoptions.prefs"));
currentChromeOptions.setExperimentalOption("prefs", prefs);
}
return currentChromeOptions;
}
private Map<String, Object> parsePreferencesFromString(String preferencesString) {
Map<String, Object> prefs = new HashMap<>();
String[] allPrefs = parseCSVhandlingQuotes(preferencesString);
for (String pref : allPrefs) {
String[] keyValue = pref
.replace("\"", "")
.split("=");
if (keyValue.length == 1) {
log.warn("Missing '=' sign while parsing <key=value> pairs from {}. Key '{}' is ignored.",
preferencesString, keyValue[0]);
continue;
} else if (keyValue.length > 2) {
log.warn("More than one '=' sign while parsing <key=value> pairs from {}. Key '{}' is ignored.",
preferencesString, keyValue[0]);
continue;
}
Object prefValue = convertStringToNearestObjectType(keyValue[1]);
prefs.put(keyValue[0], prefValue);
}
return prefs;
}
/**
* parse parameters which can come from command-line interface
* @param csvString comma-separated values, quotes can be used to mask spaces and commas
* Example: 123,"foo bar","bar,foo"
* @return values as array, quotes are preserved
*/
private String[] parseCSVhandlingQuotes(String csvString) {
// Regexp from https://stackoverflow.com/a/15739087/1110503 to handle commas in values
return csvString.split(",(?=([^\"]*\"[^\"]*\")*[^\"]*$)");
}
/**
* Converts String to Boolean\Integer or returns original String.
* @param value string to convert
* @return string's object representation
*/
private Object convertStringToNearestObjectType(String value) {
switch (value) {
case "true":
return true;
case "false":
return false;
default: {
if (NumberUtils.isParsable(value)) {
return Integer.parseInt(value);
}
return value;
}
}
}
}
|
/*
* Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package java.time.temporal;
import java.time.DateTimeException;
/**
* Framework-level interface defining read-write access to a temporal object,
* such as a date, time, offset or some combination of these.
* <p>
* This is the base interface type for date, time and offset objects that
* are complete enough to be manipulated using plus and minus.
* It is implemented by those classes that can provide and manipulate information
* as {@linkplain TemporalField fields} or {@linkplain TemporalQuery queries}.
* See {@link TemporalAccessor} for the read-only version of this interface.
* <p>
* Most date and time information can be represented as a number.
* These are modeled using {@code TemporalField} with the number held using
* a {@code long} to handle large values. Year, month and day-of-month are
* simple examples of fields, but they also include instant and offsets.
* See {@link ChronoField} for the standard set of fields.
* <p>
* Two pieces of date/time information cannot be represented by numbers,
* the {@linkplain java.time.chrono.Chronology chronology} and the
* {@linkplain java.time.ZoneId time-zone}.
* These can be accessed via {@link #query(TemporalQuery) queries} using
* the static methods defined on {@link TemporalQuery}.
* <p>
* This interface is a framework-level interface that should not be widely
* used in application code. Instead, applications should create and pass
* around instances of concrete types, such as {@code LocalDate}.
* There are many reasons for this, part of which is that implementations
* of this interface may be in calendar systems other than ISO.
* See {@link java.time.chrono.ChronoLocalDate} for a fuller discussion of the issues.
*
* <h3>When to implement</h3>
* <p>
* A class should implement this interface if it meets three criteria:
* <ul>
* <li>it provides access to date/time/offset information, as per {@code TemporalAccessor}
* <li>the set of fields are contiguous from the largest to the smallest
* <li>the set of fields are complete, such that no other field is needed to define the
* valid range of values for the fields that are represented
* </ul>
* <p>
* Four examples make this clear:
* <ul>
* <li>{@code LocalDate} implements this interface as it represents a set of fields
* that are contiguous from days to forever and require no external information to determine
* the validity of each date. It is therefore able to implement plus/minus correctly.
* <li>{@code LocalTime} implements this interface as it represents a set of fields
* that are contiguous from nanos to within days and require no external information to determine
* validity. It is able to implement plus/minus correctly, by wrapping around the day.
* <li>{@code MonthDay}, the combination of month-of-year and day-of-month, does not implement
* this interface. While the combination is contiguous, from days to months within years,
* the combination does not have sufficient information to define the valid range of values
* for day-of-month. As such, it is unable to implement plus/minus correctly.
* <li>The combination day-of-week and day-of-month ("Friday the 13th") should not implement
* this interface. It does not represent a contiguous set of fields, as days to weeks overlaps
* days to months.
* </ul>
*
* @implSpec
* This interface places no restrictions on the mutability of implementations,
* however immutability is strongly recommended.
* All implementations must be {@link Comparable}.
*
* @since 1.8
*/
public interface Temporal extends TemporalAccessor {
/**
* Checks if the specified unit is supported.
* <p>
* This checks if the specified unit can be added to, or subtracted from, this date-time.
* If false, then calling the {@link #plus(long, TemporalUnit)} and
* {@link #minus(long, TemporalUnit) minus} methods will throw an exception.
*
* @implSpec
* Implementations must check and handle all units defined in {@link ChronoUnit}.
* If the unit is supported, then true must be returned, otherwise false must be returned.
* <p>
* If the field is not a {@code ChronoUnit}, then the result of this method
* is obtained by invoking {@code TemporalUnit.isSupportedBy(Temporal)}
* passing {@code this} as the argument.
* <p>
* Implementations must ensure that no observable state is altered when this
* read-only method is invoked.
*
* @param unit the unit to check, null returns false
* @return true if the unit can be added/subtracted, false if not
*/
boolean isSupported(TemporalUnit unit);
/**
* Returns an adjusted object of the same type as this object with the adjustment made.
* <p>
* This adjusts this date-time according to the rules of the specified adjuster.
* A simple adjuster might simply set the one of the fields, such as the year field.
* A more complex adjuster might set the date to the last day of the month.
* A selection of common adjustments is provided in
* {@link java.time.temporal.TemporalAdjusters TemporalAdjusters}.
* These include finding the "last day of the month" and "next Wednesday".
* The adjuster is responsible for handling special cases, such as the varying
* lengths of month and leap years.
* <p>
* Some example code indicating how and why this method is used:
* <pre>
* date = date.with(Month.JULY); // most key classes implement TemporalAdjuster
* date = date.with(lastDayOfMonth()); // static import from Adjusters
* date = date.with(next(WEDNESDAY)); // static import from Adjusters and DayOfWeek
* </pre>
*
* @implSpec
* <p>
* Implementations must not alter either this object or the specified temporal object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
* <p>
* The default implementation must behave equivalent to this code:
* <pre>
* return adjuster.adjustInto(this);
* </pre>
*
* @param adjuster the adjuster to use, not null
* @return an object of the same type with the specified adjustment made, not null
* @throws DateTimeException if unable to make the adjustment
* @throws ArithmeticException if numeric overflow occurs
*/
default Temporal with(TemporalAdjuster adjuster) {
return adjuster.adjustInto(this);
}
/**
* Returns an object of the same type as this object with the specified field altered.
* <p>
* This returns a new object based on this one with the value for the specified field changed.
* For example, on a {@code LocalDate}, this could be used to set the year, month or day-of-month.
* The returned object will have the same observable type as this object.
* <p>
* In some cases, changing a field is not fully defined. For example, if the target object is
* a date representing the 31st January, then changing the month to February would be unclear.
* In cases like this, the field is responsible for resolving the result. Typically it will choose
* the previous valid date, which would be the last valid day of February in this example.
*
* @implSpec
* Implementations must check and handle all fields defined in {@link ChronoField}.
* If the field is supported, then the adjustment must be performed.
* If unsupported, then an {@code UnsupportedTemporalTypeException} must be thrown.
* <p>
* If the field is not a {@code ChronoField}, then the result of this method
* is obtained by invoking {@code TemporalField.adjustInto(Temporal, long)}
* passing {@code this} as the first argument.
* <p>
* Implementations must not alter this object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
*
* @param field the field to set in the result, not null
* @param newValue the new value of the field in the result
* @return an object of the same type with the specified field set, not null
* @throws DateTimeException if the field cannot be set
* @throws UnsupportedTemporalTypeException if the field is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
Temporal with(TemporalField field, long newValue);
//-----------------------------------------------------------------------
/**
* Returns an object of the same type as this object with an amount added.
* <p>
* This adjusts this temporal, adding according to the rules of the specified amount.
* The amount is typically a {@link java.time.Period} but may be any other type implementing
* the {@link TemporalAmount} interface, such as {@link java.time.Duration}.
* <p>
* Some example code indicating how and why this method is used:
* <pre>
* date = date.plus(period); // add a Period instance
* date = date.plus(duration); // add a Duration instance
* date = date.plus(workingDays(6)); // example user-written workingDays method
* </pre>
* <p>
* Note that calling {@code plus} followed by {@code minus} is not guaranteed to
* return the same date-time.
*
* @implSpec
* <p>
* Implementations must not alter either this object or the specified temporal object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
* <p>
* The default implementation must behave equivalent to this code:
* <pre>
* return amount.addTo(this);
* </pre>
*
* @param amount the amount to add, not null
* @return an object of the same type with the specified adjustment made, not null
* @throws DateTimeException if the addition cannot be made
* @throws ArithmeticException if numeric overflow occurs
*/
default Temporal plus(TemporalAmount amount) {
return amount.addTo(this);
}
/**
* Returns an object of the same type as this object with the specified period added.
* <p>
* This method returns a new object based on this one with the specified period added.
* For example, on a {@code LocalDate}, this could be used to add a number of years, months or days.
* The returned object will have the same observable type as this object.
* <p>
* In some cases, changing a field is not fully defined. For example, if the target object is
* a date representing the 31st January, then adding one month would be unclear.
* In cases like this, the field is responsible for resolving the result. Typically it will choose
* the previous valid date, which would be the last valid day of February in this example.
*
* @implSpec
* Implementations must check and handle all units defined in {@link ChronoUnit}.
* If the unit is supported, then the addition must be performed.
* If unsupported, then an {@code UnsupportedTemporalTypeException} must be thrown.
* <p>
* If the unit is not a {@code ChronoUnit}, then the result of this method
* is obtained by invoking {@code TemporalUnit.addTo(Temporal, long)}
* passing {@code this} as the first argument.
* <p>
* Implementations must not alter this object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
*
* @param amountToAdd the amount of the specified unit to add, may be negative
* @param unit the unit of the amount to add, not null
* @return an object of the same type with the specified period added, not null
* @throws DateTimeException if the unit cannot be added
* @throws UnsupportedTemporalTypeException if the unit is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
Temporal plus(long amountToAdd, TemporalUnit unit);
//-----------------------------------------------------------------------
/**
* Returns an object of the same type as this object with an amount subtracted.
* <p>
* This adjusts this temporal, subtracting according to the rules of the specified amount.
* The amount is typically a {@link java.time.Period} but may be any other type implementing
* the {@link TemporalAmount} interface, such as {@link java.time.Duration}.
* <p>
* Some example code indicating how and why this method is used:
* <pre>
* date = date.minus(period); // subtract a Period instance
* date = date.minus(duration); // subtract a Duration instance
* date = date.minus(workingDays(6)); // example user-written workingDays method
* </pre>
* <p>
* Note that calling {@code plus} followed by {@code minus} is not guaranteed to
* return the same date-time.
*
* @implSpec
* <p>
* Implementations must not alter either this object or the specified temporal object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
* <p>
* The default implementation must behave equivalent to this code:
* <pre>
* return amount.subtractFrom(this);
* </pre>
*
* @param amount the amount to subtract, not null
* @return an object of the same type with the specified adjustment made, not null
* @throws DateTimeException if the subtraction cannot be made
* @throws ArithmeticException if numeric overflow occurs
*/
default Temporal minus(TemporalAmount amount) {
return amount.subtractFrom(this);
}
/**
* Returns an object of the same type as this object with the specified period subtracted.
* <p>
* This method returns a new object based on this one with the specified period subtracted.
* For example, on a {@code LocalDate}, this could be used to subtract a number of years, months or days.
* The returned object will have the same observable type as this object.
* <p>
* In some cases, changing a field is not fully defined. For example, if the target object is
* a date representing the 31st March, then subtracting one month would be unclear.
* In cases like this, the field is responsible for resolving the result. Typically it will choose
* the previous valid date, which would be the last valid day of February in this example.
*
* @implSpec
* Implementations must behave in a manor equivalent to the default method behavior.
* <p>
* Implementations must not alter this object.
* Instead, an adjusted copy of the original must be returned.
* This provides equivalent, safe behavior for immutable and mutable implementations.
* <p>
* The default implementation must behave equivalent to this code:
* <pre>
* return (amountToSubtract == Long.MIN_VALUE ?
* plus(Long.MAX_VALUE, unit).plus(1, unit) : plus(-amountToSubtract, unit));
* </pre>
*
* @param amountToSubtract the amount of the specified unit to subtract, may be negative
* @param unit the unit of the amount to subtract, not null
* @return an object of the same type with the specified period subtracted, not null
* @throws DateTimeException if the unit cannot be subtracted
* @throws UnsupportedTemporalTypeException if the unit is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
default Temporal minus(long amountToSubtract, TemporalUnit unit) {
return (amountToSubtract == Long.MIN_VALUE ? plus(Long.MAX_VALUE, unit).plus(1, unit) : plus(-amountToSubtract, unit));
}
//-----------------------------------------------------------------------
/**
* Calculates the amount of time until another temporal in terms of the specified unit.
* <p>
* This calculates the amount of time between two temporal objects
* in terms of a single {@code TemporalUnit}.
* The start and end points are {@code this} and the specified temporal.
* The end point is converted to be of the same type as the start point if different.
* The result will be negative if the end is before the start.
* For example, the amount in hours between two temporal objects can be
* calculated using {@code startTime.until(endTime, HOURS)}.
* <p>
* The calculation returns a whole number, representing the number of
* complete units between the two temporals.
* For example, the amount in hours between the times 11:30 and 13:29
* will only be one hour as it is one minute short of two hours.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method directly.
* The second is to use {@link TemporalUnit#between(Temporal, Temporal)}:
* <pre>
* // these two lines are equivalent
* temporal = start.until(end, unit);
* temporal = unit.between(start, end);
* </pre>
* The choice should be made based on which makes the code more readable.
* <p>
* For example, this method allows the number of days between two dates to
* be calculated:
* <pre>
* long daysBetween = start.until(end, DAYS);
* // or alternatively
* long daysBetween = DAYS.between(start, end);
* </pre>
*
* @implSpec
* Implementations must begin by checking to ensure that the input temporal
* object is of the same observable type as the implementation.
* They must then perform the calculation for all instances of {@link ChronoUnit}.
* An {@code UnsupportedTemporalTypeException} must be thrown for {@code ChronoUnit}
* instances that are unsupported.
* <p>
* If the unit is not a {@code ChronoUnit}, then the result of this method
* is obtained by invoking {@code TemporalUnit.between(Temporal, Temporal)}
* passing {@code this} as the first argument and the converted input temporal as
* the second argument.
* <p>
* In summary, implementations must behave in a manner equivalent to this pseudo-code:
* <pre>
* // convert the end temporal to the same type as this class
* if (unit instanceof ChronoUnit) {
* // if unit is supported, then calculate and return result
* // else throw UnsupportedTemporalTypeException for unsupported units
* }
* return unit.between(this, convertedEndTemporal);
* </pre>
* <p>
* Note that the unit's {@code between} method must only be invoked if the
* two temporal objects have exactly the same type evaluated by {@code getClass()}.
* <p>
* Implementations must ensure that no observable state is altered when this
* read-only method is invoked.
*
* @param endExclusive the end temporal, exclusive, converted to be of the
* same type as this object, not null
* @param unit the unit to measure the amount in, not null
* @return the amount of time between this temporal object and the specified one
* in terms of the unit; positive if the specified object is later than this one,
* negative if it is earlier than this one
* @throws DateTimeException if the amount cannot be calculated, or the end
* temporal cannot be converted to the same type as this temporal
* @throws UnsupportedTemporalTypeException if the unit is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
long until(Temporal endExclusive, TemporalUnit unit);
}
|
package think.rpgitems;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.HandlerList;
import org.bukkit.event.Listener;
import org.bukkit.event.server.ServerLoadEvent;
import org.bukkit.inventory.ItemStack;
import org.bukkit.plugin.InvalidDescriptionException;
import org.bukkit.plugin.InvalidPluginException;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.java.JavaPlugin;
import org.librazy.nclangchecker.LangKey;
import think.rpgitems.data.Font;
import think.rpgitems.item.ItemManager;
import think.rpgitems.power.*;
import think.rpgitems.power.impl.BasePower;
import think.rpgitems.support.WGSupport;
import java.io.File;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class RPGItems extends JavaPlugin {
private static int version;
private static int serial;
private static String pluginMCVersion;
private static String serverMCVersion;
public static Logger logger;
public static RPGItems plugin;
List<Plugin> managedPlugins = new ArrayList<>();
public I18n i18n;
public Configuration cfg;
@Override
public void onLoad() {
plugin = this;
logger = this.getLogger();
String versionDesc = getDescription().getVersion();
Pattern serialPattern = Pattern.compile("(\\d+)\\.(\\d+)\\.(\\d+)-mc([\\d.]+)");
Matcher serialMatcher = serialPattern.matcher(versionDesc);
if (serialMatcher.matches()) {
version = Integer.parseInt(serialMatcher.group(1)) * 100 + Integer.parseInt(serialMatcher.group(2));
serial = Integer.parseInt(serialMatcher.group(3));
pluginMCVersion = serialMatcher.group(4);
}
String serverVersion = Bukkit.getVersion();
Pattern mcVersionPattern = Pattern.compile("\\(MC:\\s+([\\d.]+)\\)");
Matcher mcVersionMatcher = mcVersionPattern.matcher(serverVersion);
if (mcVersionMatcher.find()) {
serverMCVersion = mcVersionMatcher.group(1);
}
logger.log(Level.INFO, "Plugin serial: '" + serial + "', native version: '" + pluginMCVersion + "', server version: '" + serverMCVersion + "'.");
cfg = new Configuration(this);
cfg.load();
i18n = new I18n(this, cfg.language);
PowerManager.registerAdapter(PowerPlain.class, PowerOffhandClick.class, p -> getWrapper(p, PowerOffhandClick.class, "offhandClick"));
PowerManager.registerAdapter(PowerPlain.class, PowerSprint.class, p -> getWrapper(p, PowerSprint.class, "sprint"));
PowerManager.registerAdapter(PowerPlain.class, PowerSneak.class, p -> getWrapper(p, PowerSneak.class, "sneak"));
PowerManager.registerAdapter(PowerPlain.class, PowerAttachment.class, p -> getWrapper(p, PowerAttachment.class, "attachment"));
PowerManager.addDescriptionResolver(RPGItems.plugin, (power, property) -> {
if (property == null) {
@LangKey(skipCheck = true) String powerKey = "power.properties." + power.getKey() + ".main_description";
return I18n.format(powerKey);
}
@LangKey(skipCheck = true) String key = "power.properties." + power.getKey() + "." + property;
if (I18n.getInstance().hasKey(key)) {
return I18n.format(key);
}
@LangKey(skipCheck = true) String baseKey = "power.properties.base." + property;
if (I18n.getInstance().hasKey(baseKey)) {
return I18n.format(baseKey);
}
return null;
});
PowerManager.registerPowers(RPGItems.plugin, BasePower.class.getPackage().getName());
saveDefaultConfig();
Font.load();
WGSupport.load();
loadExtensions();
}
public void loadExtensions() {
File extDir = new File(plugin.getDataFolder(), "ext");
if (extDir.isDirectory() || extDir.mkdirs()) {
File[] files = extDir.listFiles((d, n) -> n.endsWith(".jar"));
if (files == null) return;
for (File file : files) {
try {
Plugin plugin = Bukkit.getPluginManager().loadPlugin(file);
String message = String.format("Loading %s", plugin.getDescription().getFullName());
plugin.getLogger().info(message);
plugin.onLoad();
managedPlugins.add(plugin);
logger.info("Loaded extension: " + plugin.getName());
} catch (InvalidPluginException | InvalidDescriptionException e) {
logger.log(Level.SEVERE, "Error loading extension: " + file.getName(), e);
}
}
} else {
logger.severe("Error creating extension directory ./ext");
}
}
@Override
public void onEnable() {
Trigger.stopAcceptingRegistrations();
plugin = this;
if (plugin.cfg.version.startsWith("0.") && Double.parseDouble(plugin.cfg.version) < 0.5) {
Bukkit.getConsoleSender().sendMessage(ChatColor.RED + "======================================");
Bukkit.getConsoleSender().sendMessage(ChatColor.RED + "You current version of RPGItems config is not supported.");
Bukkit.getConsoleSender().sendMessage(ChatColor.RED + "Please run your server with latest version of RPGItems 3.5 before update.");
Bukkit.getConsoleSender().sendMessage(ChatColor.RED + "======================================");
throw new IllegalStateException();
} else if (plugin.cfg.version.equals("0.5")) {
cfg.pidCompat = true;
}
if (Bukkit.class.getPackage().getImplementationVersion().startsWith("git-Bukkit-")) {
Bukkit.getConsoleSender().sendMessage(ChatColor.RED + "======================================");
Bukkit.getConsoleSender().sendMessage(ChatColor.RED + "RPGItems plugin requires Spigot API, Please make sure you are using Spigot.");
Bukkit.getConsoleSender().sendMessage(ChatColor.RED + "======================================");
}
try {
Bukkit.spigot();
} catch (NoSuchMethodError e) {
getCommand("rpgitem").setExecutor((sender, command, label, args) -> {
sender.sendMessage(ChatColor.RED + "======================================");
sender.sendMessage(ChatColor.RED + "RPGItems plugin requires Spigot API, Please make sure you are using Spigot.");
sender.sendMessage(ChatColor.RED + "======================================");
return true;
});
}
AdminHandler adminCommandHandler = new AdminHandler(this, i18n);
UserHandler userCommandHandler = new UserHandler(this, i18n);
getCommand("rpgitem").setExecutor(adminCommandHandler);
getCommand("rpgitem").setTabCompleter(adminCommandHandler);
getCommand("rpgitems").setExecutor(userCommandHandler);
getCommand("rpgitems").setTabCompleter(userCommandHandler);
getServer().getPluginManager().registerEvents(new ServerLoadListener(), this);
managedPlugins.forEach(Bukkit.getPluginManager()::enablePlugin);
}
public static int getVersion() {
return version;
}
public static int getSerial() {
return serial;
}
public static String getPluginMCVersion() {
return pluginMCVersion;
}
public static String getServerMCVersion() {
return serverMCVersion;
}
private class ServerLoadListener implements Listener {
@EventHandler
public void onServerLoad(ServerLoadEvent event) {
HandlerList.unregisterAll(this);
getServer().getPluginManager().registerEvents(new Events(), RPGItems.this);
WGSupport.init(RPGItems.this);
logger.info("Loading RPGItems...");
ItemManager.load(RPGItems.this);
logger.info("Done");
new Ticker().runTaskTimer(RPGItems.this, 0, 0);
}
}
@Override
public void onDisable() {
WGSupport.unload();
HandlerList.unregisterAll(plugin);
getCommand("rpgitem").setExecutor(null);
getCommand("rpgitem").setTabCompleter(null);
this.getServer().getScheduler().cancelTasks(plugin);
ItemManager.unload();
managedPlugins.forEach(Bukkit.getPluginManager()::disablePlugin);
}
@SuppressWarnings({"unchecked", "JavaReflectionInvocation"})
private static <T> T getWrapper(final PowerPlain obj, final Class<T> implInterface, final String delegateMethod) {
InvocationHandler invocationHandler = (proxy, method, args) -> {
if (!method.getName().equals(delegateMethod)) {
return obj.getClass().getMethod(method.getName(), method.getParameterTypes()).invoke(obj, args);
} else {
return obj.getClass().getDeclaredMethod("fire", Player.class, ItemStack.class).invoke(obj, args[0], args[1]);
}
};
return (T) Proxy.newProxyInstance(obj.getClass().getClassLoader(), new Class[]{implInterface}, invocationHandler);
}
}
|
package cn.hutool.core.io;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import cn.hutool.core.util.StrUtil;
/**
* 文件类型判断工具类<br>
* 此工具根据文件的前几位bytes猜测文件类型,对于文本、zip判断不准确,对于视频、图片类型判断准确
*
* @author Looly
*
*/
public final class FileTypeUtil {
private FileTypeUtil() {
};
private static final Map<String, String> fileTypeMap;
static {
fileTypeMap = new ConcurrentHashMap<>();
fileTypeMap.put("ffd8ffe000104a464946", "jpg"); // JPEG (jpg)
fileTypeMap.put("89504e470d0a1a0a0000", "png"); // PNG (png)
fileTypeMap.put("47494638396126026f01", "gif"); // GIF (gif)
fileTypeMap.put("49492a00227105008037", "tif"); // TIFF (tif)
fileTypeMap.put("424d228c010000000000", "bmp"); // 16色位图(bmp)
fileTypeMap.put("424d8240090000000000", "bmp"); // 24位位图(bmp)
fileTypeMap.put("424d8e1b030000000000", "bmp"); // 256色位图(bmp)
fileTypeMap.put("41433130313500000000", "dwg"); // CAD (dwg)
fileTypeMap.put("3c21444f435459504520", "html"); // HTML (html)
fileTypeMap.put("3c21646f637479706520", "htm"); // HTM (htm)
fileTypeMap.put("48544d4c207b0d0a0942", "css"); // css
fileTypeMap.put("696b2e71623d696b2e71", "js"); // js
fileTypeMap.put("7b5c727466315c616e73", "rtf"); // Rich Text Format (rtf)
fileTypeMap.put("38425053000100000000", "psd"); // Photoshop (psd)
fileTypeMap.put("46726f6d3a203d3f6762", "eml"); // Email [Outlook Express 6] (eml)
fileTypeMap.put("d0cf11e0a1b11ae10000", "doc"); // MS Excel 注意:word、msi 和 excel的文件头一样
fileTypeMap.put("d0cf11e0a1b11ae10000", "vsd"); // Visio 绘图
fileTypeMap.put("5374616E64617264204A", "mdb"); // MS Access (mdb)
fileTypeMap.put("252150532D41646F6265", "ps");
fileTypeMap.put("255044462d312e330d", "pdf"); // Adobe Acrobat (pdf)
fileTypeMap.put("2e524d46000000120001", "rmvb"); // rmvb/rm相同
fileTypeMap.put("464c5601050000000900", "flv"); // flv与f4v相同
fileTypeMap.put("00000020667479706d70", "mp4");
fileTypeMap.put("49443303000000002176", "mp3");
fileTypeMap.put("000001ba210001000180", "mpg"); //
fileTypeMap.put("3026b2758e66cf11a6d9", "wmv"); // wmv与asf相同
fileTypeMap.put("52494646e27807005741", "wav"); // Wave (wav)
fileTypeMap.put("52494646d07d60074156", "avi");
fileTypeMap.put("4d546864000000060001", "mid"); // MIDI (mid)
fileTypeMap.put("526172211a0700cf9073", "rar");// WinRAR
fileTypeMap.put("235468697320636f6e66", "ini");
fileTypeMap.put("504B03040a0000000000", "jar");
fileTypeMap.put("504B0304140008000800", "jar");
fileTypeMap.put("504B0304140006000800", "docx");// docx文件
fileTypeMap.put("504B0304140006000800", "xlsx");// docx文件
fileTypeMap.put("D0CF11E0A1B11AE10", "xls");// xls文件
fileTypeMap.put("504B0304", "zip");
fileTypeMap.put("4d5a9000030000000400", "exe");// 可执行文件
fileTypeMap.put("3c25402070616765206c", "jsp");// jsp文件
fileTypeMap.put("4d616e69666573742d56", "mf");// MF文件
fileTypeMap.put("3c3f786d6c2076657273", "xml");// xml文件
fileTypeMap.put("494e5345525420494e54", "sql");// xml文件
fileTypeMap.put("7061636b616765207765", "java");// java文件
fileTypeMap.put("406563686f206f66660d", "bat");// bat文件
fileTypeMap.put("1f8b0800000000000000", "gz");// gz文件
fileTypeMap.put("6c6f67346a2e726f6f74", "properties");// bat文件
fileTypeMap.put("cafebabe0000002e0041", "class");// bat文件
fileTypeMap.put("49545346030000006000", "chm");// bat文件
fileTypeMap.put("04000000010000001300", "mxp");// bat文件
fileTypeMap.put("d0cf11e0a1b11ae10000", "wps");// WPS文字wps、表格et、演示dps都是一样的
fileTypeMap.put("6431303a637265617465", "torrent");
fileTypeMap.put("6D6F6F76", "mov"); // Quicktime (mov)
fileTypeMap.put("FF575043", "wpd"); // WordPerfect (wpd)
fileTypeMap.put("CFAD12FEC5FD746F", "dbx"); // Outlook Express (dbx)
fileTypeMap.put("2142444E", "pst"); // Outlook (pst)
fileTypeMap.put("AC9EBD8F", "qdf"); // Quicken (qdf)
fileTypeMap.put("E3828596", "pwl"); // Windows Password (pwl)
fileTypeMap.put("2E7261FD", "ram"); // Real Audio (ram)
}
/**
* 增加文件类型映射<br>
* 如果已经存在将覆盖之前的映射
*
* @param fileStreamHexHead 文件流头部Hex信息
* @param extName 文件扩展名
* @return 之前已经存在的文件扩展名
*/
public static String putFileType(String fileStreamHexHead, String extName) {
return fileTypeMap.put(fileStreamHexHead.toLowerCase(), extName);
}
/**
* 移除文件类型映射
*
* @param fileStreamHexHead 文件流头部Hex信息
* @return 移除的文件扩展名
*/
public static String removeFileType(String fileStreamHexHead) {
return fileTypeMap.remove(fileStreamHexHead.toLowerCase());
}
/**
* 根据文件流的头部信息获得文件类型
*
* @param fileStreamHexHead 文件流头部16进制字符串
* @return 文件类型,未找到为<code>null</code>
*/
public static String getType(String fileStreamHexHead) {
for (Entry<String, String> fileTypeEntry : fileTypeMap.entrySet()) {
if(StrUtil.startWithIgnoreCase(fileStreamHexHead, fileTypeEntry.getKey())) {
return fileTypeEntry.getValue();
}
}
return null;
}
/**
* 根据文件流的头部信息获得文件类型
*
* @param in {@link InputStream}
* @return 类型,文件的扩展名,未找到为<code>null</code>
* @throws IORuntimeException 读取流引起的异常
*/
public static String getType(InputStream in) throws IORuntimeException {
return getType(IoUtil.readHex28Upper(in));
}
/**
* 根据文件流的头部信息获得文件类型
*
* @param file 文件 {@link File}
* @return 类型,文件的扩展名,未找到为<code>null</code>
* @throws IORuntimeException 读取文件引起的异常
*/
public static String getType(File file) throws IORuntimeException {
FileInputStream in = null;
try {
in = IoUtil.toStream(file);
return getType(in);
} finally {
IoUtil.close(in);
}
}
/**
* 通过路径获得文件类型
*
* @param path 路径,绝对路径或相对ClassPath的路径
* @return 类型
* @throws IORuntimeException 读取文件引起的异常
*/
public static String getTypeByPath(String path) throws IORuntimeException {
return getType(FileUtil.file(path));
}
}
|
package com.ruoyi.framework.web.controller;
import java.beans.PropertyEditorSupport;
import java.util.Date;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.InitBinder;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.ruoyi.common.constant.HttpStatus;
import com.ruoyi.common.utils.DateUtils;
import com.ruoyi.common.utils.StringUtils;
import com.ruoyi.common.utils.sql.SqlUtil;
import com.ruoyi.framework.web.domain.AjaxResult;
import com.ruoyi.framework.web.page.PageDomain;
import com.ruoyi.framework.web.page.TableDataInfo;
import com.ruoyi.framework.web.page.TableSupport;
/**
* web层通用数据处理
*
* @author ruoyi
*/
public class BaseController
{
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
/**
* 将前台传递过来的日期格式的字符串,自动转化为Date类型
*/
@InitBinder
public void initBinder(WebDataBinder binder)
{
// Date 类型转换
binder.registerCustomEditor(Date.class, new PropertyEditorSupport()
{
@Override
public void setAsText(String text)
{
setValue(DateUtils.parseDate(text));
}
});
}
/**
* 设置请求分页数据
*/
protected void startPage()
{
PageDomain pageDomain = TableSupport.buildPageRequest();
Integer pageNum = pageDomain.getPageNum();
Integer pageSize = pageDomain.getPageSize();
if (StringUtils.isNotNull(pageNum) && StringUtils.isNotNull(pageSize))
{
String orderBy = SqlUtil.escapeOrderBySql(pageDomain.getOrderBy());
PageHelper.startPage(pageNum, pageSize, orderBy);
}
}
/**
* 设置请求排序数据
*/
protected void startOrderBy()
{
PageDomain pageDomain = TableSupport.buildPageRequest();
if (StringUtils.isNotEmpty(pageDomain.getOrderBy()))
{
String orderBy = SqlUtil.escapeOrderBySql(pageDomain.getOrderBy());
PageHelper.orderBy(orderBy);
}
}
/**
* 响应请求分页数据
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
protected TableDataInfo getDataTable(List<?> list)
{
TableDataInfo rspData = new TableDataInfo();
rspData.setCode(HttpStatus.SUCCESS);
rspData.setMsg("查询成功");
rspData.setRows(list);
rspData.setTotal(new PageInfo(list).getTotal());
return rspData;
}
/**
* 响应返回结果
*
* @param result 结果
* @return 操作结果
*/
protected AjaxResult toAjax(boolean result)
{
return result ? success() : error();
}
/**
* 返回成功
*/
public AjaxResult success()
{
return AjaxResult.success();
}
/**
* 返回失败消息
*/
public AjaxResult error()
{
return AjaxResult.error();
}
/**
* 返回成功消息
*/
public AjaxResult success(String message)
{
return AjaxResult.success(message);
}
/**
* 返回失败消息
*/
public AjaxResult error(String message)
{
return AjaxResult.error(message);
}
/**
* 响应返回结果
*
* @param rows 影响行数
* @return 操作结果
*/
protected AjaxResult toAjax(int rows)
{
return rows > 0 ? AjaxResult.success() : AjaxResult.error();
}
}
|
package zrock.application.scriptme.create;
import zrock.application.scriptme.R;
import android.os.Bundle;
import android.preference.CheckBoxPreference;
import android.preference.Preference;
import android.preference.Preference.OnPreferenceChangeListener;
import android.preference.PreferenceActivity;
import zrock.application.engine.Engine;
import zrock.application.scriptme.BootReceiver;
public class SettingsActivity extends PreferenceActivity {
private CheckBoxPreference mStartOnBootPreference;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.pref_root_adb);
mStartOnBootPreference = (CheckBoxPreference) getPreferenceScreen()
.findPreference("start_onboot");
// toggle BootReceiver on/off
mStartOnBootPreference
.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference,
Object newValue) {
if (newValue instanceof Boolean) {
if ((Boolean) newValue) {
Engine.enableReceiver(getApplicationContext(),
BootReceiver.class);
} else {
Engine.disableReceiver(getApplicationContext(),
BootReceiver.class);
}
return true;
}
return false;
}
});
}
}
|
/* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* aapt tool from the resource data it found. It
* should not be modified by hand.
*/
package androidx.lifecycle.livedata.core;
public final class R {
}
|
package com.yxy.algorithms.binary.tree.flatten;
import com.yxy.algorithms.binary.tree.TreeNode;
import com.yxy.algorithms.binary.tree.util.BinaryTreeUtil;
/**
* 114. Flatten Binary Tree to Linked List My Submissions QuestionEditorial Solution
Total Accepted: 83420 Total Submissions: 266906 Difficulty: Medium
Given a binary tree, flatten it to a linked list in-place.
For example,
Given
1
/ \
2 5
/ \ \
3 4 6
The flattened tree should look like:
1
\
2
\
3
\
4
\
5
\
6
click to show hints.
Hints:
If you notice carefully in the flattened tree, each node's right child points to the next node of a pre-order traversal.
* @author xianyiye
* @date 05/27/2016
* Reference:
* https://leetcode.com/problems/flatten-binary-tree-to-linked-list/
*/
public class Solution3 {
TreeNode prev = null ;
public void flatten(TreeNode root) {
if (root == null)
return;
flatten(root.right);
flatten(root.left);
root.right = prev;
root.left = null;
prev = root;
}
public static void main(String[] args) {
TreeNode root = BinaryTreeUtil.create(new Integer[]{1,2,5,3,4,null,6}, TreeNode.class) ;
new Solution3().flatten(root) ;
while(root!=null){
System.out.print(root.val+" ") ;
root = root.right ;
}
}
}
|
package com.e16din.sc.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.SOURCE)
public @interface OnHide {
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.resourcegroup;
import static org.apache.pulsar.client.api.CompressionType.SNAPPY;
import com.google.common.collect.Sets;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.apache.pulsar.broker.PulsarServerException;
import org.apache.pulsar.broker.PulsarService;
import org.apache.pulsar.broker.ServiceConfiguration;
import org.apache.pulsar.broker.service.resource.usage.ResourceUsageInfo;
import org.apache.pulsar.client.admin.PulsarAdmin;
import org.apache.pulsar.client.admin.PulsarAdminException;
import org.apache.pulsar.client.api.Message;
import org.apache.pulsar.client.api.MessageId;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.PulsarClientException;
import org.apache.pulsar.client.api.Reader;
import org.apache.pulsar.client.api.ReaderListener;
import org.apache.pulsar.common.allocator.PulsarByteBufAllocator;
import org.apache.pulsar.common.naming.TopicName;
import org.apache.pulsar.common.policies.data.TenantInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Resource Usage Transport Manager
*
* <P>Module to exchange usage information with other brokers. Implements a task to periodically.
* <P>publish the usage as well as handlers to process the usage info from other brokers.
*
* @see <a href="https://github.com/apache/pulsar/wiki/PIP-82%3A-Tenant-and-namespace-level-rate-limiting">Global-quotas</a>
*
*/
public class ResourceUsageTransportManager implements AutoCloseable {
private class ResourceUsageWriterTask implements Runnable, AutoCloseable {
private final Producer<byte[]> producer;
private final ScheduledFuture<?> resourceUsagePublishTask;
private Producer<byte[]> createProducer() throws PulsarClientException {
final int publishDelayMilliSecs = 10;
final int sendTimeoutSecs = 10;
return pulsarClient.newProducer()
.topic(pulsarService.getConfig().getResourceUsageTransportPublishTopicName())
.batchingMaxPublishDelay(publishDelayMilliSecs, TimeUnit.MILLISECONDS)
.sendTimeout(sendTimeoutSecs, TimeUnit.SECONDS)
.blockIfQueueFull(false)
.compressionType(SNAPPY)
.create();
}
public ResourceUsageWriterTask() throws PulsarClientException {
producer = createProducer();
resourceUsagePublishTask = pulsarService.getExecutor().scheduleAtFixedRate(
this,
pulsarService.getConfig().getResourceUsageTransportPublishIntervalInSecs(),
pulsarService.getConfig().getResourceUsageTransportPublishIntervalInSecs(),
TimeUnit.SECONDS);
}
@Override
public void run() {
if (!publisherMap.isEmpty()) {
ResourceUsageInfo rUsageInfo = new ResourceUsageInfo();
rUsageInfo.setBroker(pulsarService.getBrokerServiceUrl());
publisherMap.forEach((key, item) -> item.fillResourceUsage(rUsageInfo.addUsageMap()));
ByteBuf buf = PulsarByteBufAllocator.DEFAULT.heapBuffer(rUsageInfo.getSerializedSize());
rUsageInfo.writeTo(buf);
byte[] bytes = buf.array();
producer.sendAsync(bytes).whenComplete((id, ex) -> {
if (null != ex) {
LOG.error("Resource usage publisher: sending message ID {} error {}", id, ex);
}
buf.release();
});
}
}
@Override
public void close() throws Exception {
resourceUsagePublishTask.cancel(true);
producer.close();
}
}
private class ResourceUsageReader implements ReaderListener<byte[]>, AutoCloseable {
private final ResourceUsageInfo recdUsageInfo = new ResourceUsageInfo();
private final Reader<byte[]> consumer;
public ResourceUsageReader() throws PulsarClientException {
consumer = pulsarClient.newReader()
.topic(pulsarService.getConfig().getResourceUsageTransportPublishTopicName())
.startMessageId(MessageId.latest)
.readerListener(this)
.create();
}
@Override
public void close() throws Exception {
consumer.close();
}
@Override
public void received(Reader<byte[]> reader, Message<byte[]> msg) {
try {
recdUsageInfo.parseFrom(Unpooled.wrappedBuffer(msg.getData()), msg.getData().length);
recdUsageInfo.getUsageMapsList().forEach(ru -> {
ResourceUsageConsumer owner = consumerMap.get(ru.getOwner());
if (owner != null) {
owner.acceptResourceUsage(recdUsageInfo.getBroker(), ru);
}
});
} catch (IllegalStateException exception) {
LOG.error("Resource usage reader: Error parsing incoming message {}", exception);
} catch (Exception exception) {
LOG.error("Resource usage reader: Unknown exception while parsing message {}", exception);
}
}
}
private static final Logger LOG = LoggerFactory.getLogger(ResourceUsageTransportManager.class);
private final PulsarService pulsarService;
private final PulsarClient pulsarClient;
private final ResourceUsageWriterTask pTask;
private final ResourceUsageReader consumer;
private final Map<String, ResourceUsagePublisher>
publisherMap = new ConcurrentHashMap<String, ResourceUsagePublisher>();
private final Map<String, ResourceUsageConsumer>
consumerMap = new ConcurrentHashMap<String, ResourceUsageConsumer>();
private void createTenantAndNamespace() throws PulsarServerException, PulsarAdminException {
// Create a public tenant and default namespace
TopicName topicName = TopicName.get(pulsarService.getConfig().getResourceUsageTransportPublishTopicName());
PulsarAdmin admin = pulsarService.getAdminClient();
ServiceConfiguration config = pulsarService.getConfig();
String cluster = config.getClusterName();
final String tenant = topicName.getTenant();
final String namespace = topicName.getNamespace();
List<String> tenantList = admin.tenants().getTenants();
if (!tenantList.contains(tenant)) {
admin.tenants().createTenant(tenant,
new TenantInfo(Sets.newHashSet(config.getSuperUserRoles()), Sets.newHashSet(cluster)));
}
List<String> nsList = admin.namespaces().getNamespaces(tenant);
if (!nsList.contains(namespace)) {
admin.namespaces().createNamespace(namespace);
}
}
public ResourceUsageTransportManager(PulsarService pulsarService) throws Exception {
this.pulsarService = pulsarService;
this.pulsarClient = pulsarService.getClient();
try {
createTenantAndNamespace();
consumer = new ResourceUsageReader();
pTask = new ResourceUsageWriterTask();
} catch (Exception ex) {
LOG.error("Error initializing resource usage transport manager: {}", ex);
throw ex;
}
}
/*
* Register a resource owner (resource-group, tenant, namespace, topic etc).
*
* @param resource usage publisher
*/
public void registerResourceUsagePublisher(ResourceUsagePublisher r) {
publisherMap.put(r.getID(), r);
}
/*
* Unregister a resource owner (resource-group, tenant, namespace, topic etc).
*
* @param resource usage publisher
*/
public void unregisterResourceUsageProducer(ResourceUsagePublisher r) {
publisherMap.remove(r.getID());
}
/*
* Register a resource owner (resource-group, tenant, namespace, topic etc).
*
* @param resource usage consumer
*/
public void registerResourceUsageConsumer(ResourceUsageConsumer r) {
consumerMap.put(r.getID(), r);
}
/*
* Unregister a resource owner (resource-group, tenant, namespace, topic etc).
*
* @param resource usage consumer
*/
public void unregisterResourceUsageConsumer(ResourceUsageConsumer r) {
consumerMap.remove(r.getID());
}
@Override
public void close() throws Exception {
try {
pTask.close();
consumer.close();
} catch (Exception ex1) {
LOG.error("Error closing producer/consumer for resource-usage topic {}", ex1);
}
}
}
|
/**
* Copyright (C) 2016 Hurence (support@hurence.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hurence.logisland.processor.datastore;
import com.hurence.logisland.classloading.PluginProxy;
import com.hurence.logisland.component.InitializationException;
import com.hurence.logisland.component.PropertyDescriptor;
import com.hurence.logisland.processor.AbstractProcessor;
import com.hurence.logisland.processor.ProcessContext;
import com.hurence.logisland.service.datastore.DatastoreClientService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractDatastoreProcessor extends AbstractProcessor {
private static final Logger logger = LoggerFactory.getLogger(AbstractDatastoreProcessor.class);
public static final PropertyDescriptor DATASTORE_CLIENT_SERVICE = new PropertyDescriptor.Builder()
.name("datastore.client.service")
.description("The instance of the Controller Service to use for accessing datastore.")
.required(true)
.identifiesControllerService(DatastoreClientService.class)
.build();
protected DatastoreClientService datastoreClientService;
@Override
public boolean hasControllerService() {
return true;
}
@Override
public void init(final ProcessContext context) throws InitializationException {
super.init(context);
datastoreClientService = PluginProxy.rewrap(context.getPropertyValue(DATASTORE_CLIENT_SERVICE).asControllerService());
if (datastoreClientService == null) {
logger.error("Datastore client service is not initialized!");
}
}
}
|
package edu.columbia.psl.cc.util;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import com.google.gson.reflect.TypeToken;
import edu.columbia.psl.cc.pojo.GraphTemplate;
import edu.columbia.psl.cc.pojo.InstNode;
public class Partitioner {
private static TypeToken<GraphTemplate> graphToken = new TypeToken<GraphTemplate>(){};
private File graphFile;
private GraphTemplate graph;
public Partitioner(File graphFile) {
this.graphFile = graphFile;
}
public void loadAndConstructGraph() {
GraphTemplate rawGraph = GsonManager.readJsonGeneric(this.graphFile, graphToken);
GraphConstructor gc = new GraphConstructor();
gc.reconstructGraph(rawGraph, false);
gc.cleanObjInit(rawGraph);
this.graph = rawGraph;
}
public void partition() {
for (InstNode inst: this.graph.getInstPool()) {
int opCat = SearchUtil.getInstructionOp(inst);
boolean isArrayStore = (opCat == 18 || opCat == 19 || opCat == 20);
if (inst.getChildFreqMap().size() == 0 && !isArrayStore) {
System.out.println("Possible sink: " + inst);
}
}
}
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("Base repo: ");
String baseRepo = scanner.nextLine();
System.out.println("Lib:");
String lib1 = scanner.nextLine();
System.out.println("Target id:");
String targetId = scanner.nextLine();
List<String> possibleDir = new ArrayList<String>();
possibleDir.add(baseRepo + lib1);
File targetFile = TraceAnalyzer.searchFile(possibleDir, targetId);
Partitioner partioner = new Partitioner(targetFile);
partioner.loadAndConstructGraph();
partioner.partition();
}
}
|
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/msgcntr/trunk/messageforums-component-impl/src/java/org/sakaiproject/component/app/messageforums/ui/DiscussionForumManagerImpl.java $
* $Id: DiscussionForumManagerImpl.java 9227 2006-05-15 15:02:42Z cwen@iupui.edu $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.component.app.messageforums.ui;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.Vector;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.sakaiproject.tool.api.Tool;
import org.springframework.orm.hibernate4.support.HibernateDaoSupport;
import org.sakaiproject.api.app.messageforums.ActorPermissions;
import org.sakaiproject.api.app.messageforums.Area;
import org.sakaiproject.api.app.messageforums.AreaControlPermission;
import org.sakaiproject.api.app.messageforums.AreaManager;
import org.sakaiproject.api.app.messageforums.Attachment;
import org.sakaiproject.api.app.messageforums.DBMembershipItem;
import org.sakaiproject.api.app.messageforums.DiscussionForum;
import org.sakaiproject.api.app.messageforums.DiscussionForumService;
import org.sakaiproject.api.app.messageforums.DiscussionTopic;
import org.sakaiproject.api.app.messageforums.DummyDataHelperApi;
import org.sakaiproject.api.app.messageforums.events.ForumsMessageEventParams;
import org.sakaiproject.api.app.messageforums.ForumControlPermission;
import org.sakaiproject.api.app.messageforums.MembershipManager;
import org.sakaiproject.api.app.messageforums.Message;
import org.sakaiproject.api.app.messageforums.MessageForumsForumManager;
import org.sakaiproject.api.app.messageforums.MessageForumsMessageManager;
import org.sakaiproject.api.app.messageforums.MessageForumsTypeManager;
import org.sakaiproject.api.app.messageforums.MessageForumsUser;
import org.sakaiproject.api.app.messageforums.MessagePermissions;
import org.sakaiproject.api.app.messageforums.PermissionLevel;
import org.sakaiproject.api.app.messageforums.PermissionLevelManager;
import org.sakaiproject.api.app.messageforums.PermissionManager;
import org.sakaiproject.api.app.messageforums.Topic;
import org.sakaiproject.api.app.messageforums.TopicControlPermission;
import org.sakaiproject.api.app.messageforums.events.ForumsTopicEventParams;
import org.sakaiproject.api.app.messageforums.events.ForumsTopicEventParams.TopicEvent;
import org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager;
import org.sakaiproject.authz.api.AuthzGroup;
import org.sakaiproject.authz.api.AuthzGroupService;
import org.sakaiproject.authz.api.GroupNotDefinedException;
import org.sakaiproject.authz.api.Member;
import org.sakaiproject.authz.api.Role;
import org.sakaiproject.authz.api.SecurityService;
import org.sakaiproject.component.app.messageforums.MembershipItem;
import org.sakaiproject.component.app.messageforums.dao.hibernate.ActorPermissionsImpl;
import org.sakaiproject.component.app.messageforums.dao.hibernate.DBMembershipItemImpl;
import org.sakaiproject.component.app.messageforums.dao.hibernate.MessageForumsUserImpl;
import org.sakaiproject.component.app.messageforums.ui.delegates.LRSDelegate;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.content.api.ContentHostingService;
import org.sakaiproject.content.api.ContentResource;
import org.sakaiproject.event.api.Event;
import org.sakaiproject.event.api.EventTrackingService;
import org.sakaiproject.event.api.LearningResourceStoreService;
import org.sakaiproject.event.api.LearningResourceStoreService.LRS_Statement;
import org.sakaiproject.event.api.LearningResourceStoreService.LRS_Verb.SAKAI_VERB;
import org.sakaiproject.event.api.NotificationService;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.memory.api.Cache;
import org.sakaiproject.memory.api.MemoryService;
import org.sakaiproject.site.api.Group;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SiteService;
import org.sakaiproject.thread_local.api.ThreadLocalManager;
import org.sakaiproject.tool.api.SessionManager;
import org.sakaiproject.tool.api.ToolManager;
import org.sakaiproject.user.api.User;
import org.sakaiproject.user.api.UserDirectoryService;
import org.sakaiproject.user.api.UserNotDefinedException;
/**
* @author <a href="mailto:rshastri@iupui.edu">Rashmi Shastri</a>
*/
@Slf4j
public class DiscussionForumManagerImpl extends HibernateDaoSupport implements
DiscussionForumManager {
private static final String MC_DEFAULT = "mc.default.";
private AreaManager areaManager;
private MessageForumsForumManager forumManager;
private MessageForumsMessageManager messageManager;
private DummyDataHelperApi helper;
private PermissionManager permissionManager;
private MessageForumsTypeManager typeManager;
private SiteService siteService;
private UserDirectoryService userDirectoryService;
private MembershipManager membershipManager;
private SecurityService securityService;
private SessionManager sessionManager;
private PermissionLevelManager permissionLevelManager;
private AuthzGroupService authzGroupService;
private Map courseMemberMap = null;
private boolean usingHelper = false; // just a flag until moved to database from helper
private ContentHostingService contentHostingService;
private MemoryService memoryService;
private Cache<String, Set<?>> allowedFunctionsCache;
private EventTrackingService eventTrackingService;
private ThreadLocalManager threadLocalManager;
private ToolManager toolManager;
private LearningResourceStoreService learningResourceStoreService;
public static final int MAX_NUMBER_OF_SQL_PARAMETERS_IN_LIST = 1000;
public void init()
{
log.info("init()");
allowedFunctionsCache = memoryService.getCache("org.sakaiproject.component.app.messageforums.ui.DiscussionForumManagerImpl.allowedFunctionsCache");
}
public void setContentHostingService(ContentHostingService contentHostingService) {
this.contentHostingService = contentHostingService;
}
public void setAuthzGroupService(AuthzGroupService authzGroupService) {
this.authzGroupService = authzGroupService;
}
public void setEventTrackingService(EventTrackingService eventTrackingService) {
this.eventTrackingService = eventTrackingService;
}
public void setLearningResourceStoreService(LearningResourceStoreService service) {
learningResourceStoreService = service;
}
public void setThreadLocalManager(ThreadLocalManager threadLocalManager) {
this.threadLocalManager = threadLocalManager;
}
public void setToolManager(ToolManager toolManager) {
this.toolManager = toolManager;
}
public List searchTopicMessages(Long topicId, String searchText)
{
return forumManager.searchTopicMessages(topicId, searchText);
}
public Topic getTopicByIdWithAttachments(Long topicId)
{
if (log.isDebugEnabled())
{
log.debug("getTopicByIdWithAttachments(Long " + topicId + ")");
}
return forumManager.getTopicByIdWithAttachments(topicId);
}
public List getTopicsByIdWithMessages(final Long forumId)
{
if (log.isDebugEnabled())
{
log.debug("getTopicsByIdWithMessages(final Long" + forumId + ")");
}
return forumManager.getTopicsByIdWithMessages(forumId);
}
public List getTopicsByIdWithMessagesAndAttachments(final Long forumId)
{
if (log.isDebugEnabled())
{
log.debug("getTopicsByIdWithMessagesAndAttachments(final Long" + forumId
+ ")");
}
return forumManager.getTopicsByIdWithMessagesAndAttachments(forumId);
}
public List getTopicsByIdWithMessagesMembershipAndAttachments(final Long forumId)
{
if (log.isDebugEnabled())
{
log.debug("getTopicsByIdWithMessagesMembershipAndAttachments(final Long" + forumId
+ ")");
}
return forumManager.getTopicsByIdWithMessagesMembershipAndAttachments(forumId);
}
/*
* (non-Javadoc)
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getForumsForMainPage()
*/
public List<DiscussionForum> getForumsForMainPage() {
if (log.isDebugEnabled()) {
log.debug("getForumsForMainPage()");
}
return forumManager.getForumsForMainPage();
}
/*
* (non-Javadoc)
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getMessageCountsForMainPage(java.util.List)
*/
public List<Object[]> getMessageCountsForMainPage(Collection<Long> topicIds) {
if (log.isDebugEnabled()) {
log.debug("getMessageCountsForMainPage(" + topicIds + ")");
}
return messageManager.findMessageCountsForMainPage(topicIds);
}
/*
* (non-Javadoc)
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getMessageCountsForMainPage(java.util.Collection)
*/
public List<Object[]> getReadMessageCountsForMainPage(Collection<Long> topicIds) {
if (log.isDebugEnabled()) {
log.debug("getReadMessageCountsForMainPage(" + topicIds + ")");
}
return messageManager.findReadMessageCountsForMainPage(topicIds);
}
public Topic getTopicByIdWithMessages(final Long topicId)
{
if (log.isDebugEnabled())
{
log.debug("getTopicByIdWithMessages(final Long" + topicId + ")");
}
return forumManager.getTopicByIdWithMessages(topicId);
}
public Topic getTopicWithAttachmentsById(final Long topicId)
{
if (log.isDebugEnabled())
{
log.debug("getTopicWithAttachmentsById(final Long" + topicId + ")");
}
return forumManager.getTopicWithAttachmentsById(topicId);
}
public Topic getTopicByIdWithMessagesAndAttachments(final Long topicId)
{
if (log.isDebugEnabled())
{
log.debug("getTopicByIdWithMessagesAndAttachments(final Long" + topicId
+ ")");
}
return forumManager.getTopicByIdWithMessagesAndAttachments(topicId);
}
public List getModeratedTopicsInSite()
{
if (log.isDebugEnabled())
{
log.debug("getModeratedTopicsInSite()");
}
return forumManager.getModeratedTopicsInSite(toolManager.getCurrentPlacement().getContext());
}
// start injection
/**
* @param helper
*/
public void setHelper(DummyDataHelperApi helper)
{
if (log.isDebugEnabled())
{
log.debug("setHelper(DummyDataHelperApi " + helper + ")");
}
this.helper = helper;
}
/**
* @param areaManager
*/
public void setAreaManager(AreaManager areaManager)
{
if (log.isDebugEnabled())
{
log.debug("setAreaManager(AreaManager" + areaManager + ")");
}
this.areaManager = areaManager;
}
/**
* @param permissionManager
* The permissionManager to set.
*/
public void setPermissionManager(PermissionManager permissionManager)
{
if (log.isDebugEnabled())
{
log.debug("setPermissionManager(PermissionManager" + permissionManager
+ ")");
}
this.permissionManager = permissionManager;
}
/**
* @param permissionLevelManager
* The permissionLevelManager to set.
*/
public void setPermissionLevelManager(
PermissionLevelManager permissionLevelManager)
{
this.permissionLevelManager = permissionLevelManager;
}
/**
* @param typeManager
* The typeManager to set.
*/
public void setTypeManager(MessageForumsTypeManager typeManager)
{
if (log.isDebugEnabled())
{
log.debug("setTypeManager(MessageForumsTypeManager" + typeManager + ")");
}
this.typeManager = typeManager;
}
/**
* @param siteService
* The siteService to set.
*/
public void setSiteService(SiteService siteService)
{
this.siteService = siteService;
}
/**
* @param sessionManager
* The sessionManager to set.
*/
public void setSessionManager(SessionManager sessionManager)
{
this.sessionManager = sessionManager;
}
/**
* @param securityService
* The securityService to set.
*/
public void setSecurityService(SecurityService securityService)
{
this.securityService = securityService;
}
/**
* @param userDirectoryService
* The userDirectoryService to set.
*/
public void setUserDirectoryService(UserDirectoryService userDirectoryService)
{
this.userDirectoryService = userDirectoryService;
}
/**
* @param membershipManager
* The membershipManager to set.
*/
public void setMembershipManager(MembershipManager membershipManager)
{
this.membershipManager = membershipManager;
}
/**
* @return
*/
public MessageForumsMessageManager getMessageManager()
{
log.debug("getMessageManager()");
return messageManager;
}
/**
* @param messageManager
*/
public void setMessageManager(MessageForumsMessageManager messageManager)
{
if (log.isDebugEnabled())
{
log.debug("setMessageManager(MessageForumsMessageManager"
+ messageManager + ")");
}
this.messageManager = messageManager;
}
// end injection
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getDiscussionForumArea()
*/
public Area getDiscussionForumArea()
{
return getDiscussionForumArea(toolManager.getCurrentPlacement().getContext());
}
public Area getDiscussionForumArea(String siteId)
{
log.debug("getDiscussionForumArea");
if (usingHelper)
{
return helper.getDiscussionForumArea();
}
return areaManager.getDiscussionArea(siteId);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getMessageById(java.lang.Long)
*/
public Message getMessageById(Long id)
{
if (log.isDebugEnabled())
{
log.debug("getMessageById( Long" + id + ")");
}
if (usingHelper)
{
return helper.getMessageById(id);
}
return messageManager.getMessageById(id);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#saveMessage(org.sakaiproject.api.app.messageforums.Message)
*/
@Override
public Message saveMessage(Message message) {
return saveMessage(message, null, false);
}
@Override
public Message saveMessage(Message message, ForumsMessageEventParams params) {
return saveMessage(message, params, false);
}
@Override
public Message saveMessage(Message message, ForumsMessageEventParams params, boolean ignoreLockedTopicForum) {
if (log.isDebugEnabled())
{
log.debug("saveMessage(Message " + message + ")");
}
if (message.getTopic().getBaseForum() == null)
{
message.setTopic(getTopicById(message.getTopic().getId()));
}
if(this.getAnonRole() && message.getCreatedBy() == null)
{
message.setCreatedBy(".anon");
}
if(this.getAnonRole() && message.getModifiedBy() == null)
{
message.setModifiedBy(".anon");
}
// save the message first to ensure we have a valid message id
final Message persistedMessage = messageManager.saveOrUpdateMessage(message, false, ignoreLockedTopicForum);
if (params != null) {
Event event = eventTrackingService.newEvent(params.event.type, getEventMessage(persistedMessage), null, params.event.modification,
NotificationService.NOTI_OPTIONAL, params.lrsStatement);
eventTrackingService.post(event);
}
return persistedMessage;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#deleteMessage(org.sakaiproject.api.app.messageforums.Message)
*/
public void deleteMessage(Message message)
{
if (log.isDebugEnabled())
{
log.debug("deleteMessage(Message" + message + ")");
}
messageManager.deleteMessage(message);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getTotalNoMessages(org.sakaiproject.api.app.messageforums.Topic)
*/
public int getTotalNoMessages(Topic topic)
{
if (log.isDebugEnabled())
{
log.debug("getTotalNoMessages(Topic" + topic + ")");
}
if (usingHelper)
{
return 20;
}
return messageManager.findMessageCountByTopicId(topic.getId());
}
/*
* (non-Javadoc)
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getTotalViewableMessagesWhenMod(org.sakaiproject.api.app.messageforums.Topic)
*/
public int getTotalViewableMessagesWhenMod(Topic topic)
{
if (log.isDebugEnabled())
{
log.debug("getTotalViewableMessagesWhenMod(Topic" + topic + ")");
}
if (usingHelper)
{
return 20;
}
return messageManager.findViewableMessageCountByTopicId(topic.getId());
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getUnreadNoMessages(org.sakaiproject.api.app.messageforums.Topic)
*/
public int getUnreadNoMessages(Topic topic)
{
if (log.isDebugEnabled())
{
log.debug("getUnreadNoMessages(Topic" + topic + ")");
}
if (usingHelper)
{
return 10;
}
return messageManager.findUnreadMessageCountByTopicId(topic.getId());
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getUnreadApprovedNoMessages(org.sakaiproject.api.app.messageforums.Topic)
*/
public int getNumUnreadViewableMessagesWhenMod(Topic topic)
{
if (log.isDebugEnabled())
{
log.debug("getNumUnreadViewableMessagesWhenMod(Topic" + topic + ")");
}
if (usingHelper)
{
return 10;
}
return messageManager.findUnreadViewableMessageCountByTopicId(topic.getId());
}
/*
* (non-Javadoc)
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#approveAllPendingMessages(java.lang.Long)
*/
public void approveAllPendingMessages(Long topicId)
{
if (topicId == null)
{
log.error("approveAllPendingMessages failed with topicId: Null" );
throw new IllegalArgumentException("Null Argument");
}
List messages = this.getMessagesByTopicId(topicId);
if (messages != null && messages.size() > 0)
{
Iterator msgIter = messages.iterator();
while (msgIter.hasNext())
{
Message msg = (Message) msgIter.next();
if (msg.getApproved() == null)
{
msg.setApproved(Boolean.TRUE);
}
}
}
}
/*
* (non-Javadoc)
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getTotalNoPendingMessages()
*/
public List getPendingMsgsInSiteByMembership(List membershipList)
{
return messageManager.getPendingMsgsInSiteByMembership(membershipList);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getDiscussionForums()
*/
public List getDiscussionForums()
{
log.debug("getDiscussionForums()");
if (usingHelper)
{
return helper.getDiscussionForumArea().getDiscussionForums();
}
return forumManager.getForumByTypeAndContext(typeManager
.getDiscussionForumType());
// return getDiscussionForumArea().getDiscussionForums();
}
public List getDiscussionForums(String siteId)
{
log.debug("getDiscussionForums(siteId)");
if (usingHelper)
{
return helper.getDiscussionForumArea().getDiscussionForums();
}
return forumManager.getForumByTypeAndContext(typeManager
.getDiscussionForumType(), siteId);
// return getDiscussionForumArea().getDiscussionForums();
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getDiscussionForumsByContextId()
*/
public List getDiscussionForumsByContextId(String contextId)
{
log.debug("getDiscussionForumsByContextId(String contextId)");
return forumManager.getForumByTypeAndContext(typeManager
.getDiscussionForumType(), contextId);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getForumById(java.lang.Long)
*/
public DiscussionForum getForumById(Long forumId)
{
if (log.isDebugEnabled())
{
log.debug("getForumById(Long" + forumId + ")");
}
if (usingHelper)
{
return helper.getForumById(forumId);
}
return (DiscussionForum) forumManager.getForumById(true, forumId);
}
public DiscussionForum getForumByUuid(String forumId)
{
if (log.isDebugEnabled())
{
log.debug("getForumByUuid(String" + forumId + ")");
}
return (DiscussionForum) forumManager.getForumByUuid(forumId);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getMessagesByTopicId(java.lang.Long)
*/
public List getMessagesByTopicId(Long topicId)
{
if (log.isDebugEnabled())
{
log.debug("getMessagesByTopicId(Long" + topicId + ")");
}
return messageManager.findMessagesByTopicId(topicId);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getTopicById(java.lang.Long)
*/
public DiscussionTopic getTopicById(Long topicId)
{
if (log.isDebugEnabled())
{
log.debug("getTopicById(Long" + topicId + ")");
}
return (DiscussionTopic) forumManager.getTopicById(true, topicId);
}
public DiscussionForum getForumByIdWithTopics(Long forumId)
{
if (log.isDebugEnabled())
{
log.debug("getForumByIdWithTopics(Long" + forumId + ")");
}
return (DiscussionForum) forumManager.getForumByIdWithTopics(forumId);
}
public DiscussionForum getForumByIdWithTopicsAttachmentsAndMessages(Long forumId) {
if (log.isDebugEnabled()) { log.debug("getForumByIdWithTopicsAttachmentsAndMessages(Long " + forumId + ")"); }
return (DiscussionForum) forumManager.getForumByIdWithTopicsAttachmentsAndMessages(forumId);
}
public DiscussionTopic getTopicByUuid(String topicId)
{
if (log.isDebugEnabled())
{
log.debug(" getTopicByUuid(String" + topicId + ")");
}
return (DiscussionTopic) forumManager.getTopicByUuid(topicId);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#hasNextTopic(org.sakaiproject.api.app.messageforums.DiscussionTopic)
*/
public boolean hasNextTopic(DiscussionTopic topic)
{
if (log.isDebugEnabled())
{
log.debug("hasNextTopic(DiscussionTopic" + topic + ")");
}
if (usingHelper)
{
return helper.hasNextTopic(topic);
}
// TODO: Needs optimized
boolean next = false;
DiscussionForum forum = getForumById(topic.getBaseForum().getId());
if (forum != null && forum.getTopics() != null)
{
for (Iterator iter = forum.getTopics().iterator(); iter.hasNext();)
{
try{
DiscussionTopic t = (DiscussionTopic) iter.next();
if (next && getTopicAccess(t))
{
return true;
}
if (t != null && getTopicAccess(t) && t.getId().equals(topic.getId()))
{
next = true;
}
}catch (Exception e) {
log.error(e.getMessage());
}
}
}
// if we get here, there is no next topic
return false;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#hasPreviousTopic(org.sakaiproject.api.app.messageforums.DiscussionTopic)
*/
public boolean hasPreviousTopic(DiscussionTopic topic)
{
if (log.isDebugEnabled())
{
log.debug("hasPreviousTopic(DiscussionTopic" + topic + ")");
}
if (usingHelper)
{
return helper.hasPreviousTopic(topic);
}
// TODO: Needs optimized
DiscussionTopic prev = null;
DiscussionForum forum = getForumById(topic.getBaseForum().getId());
if (forum != null && forum.getTopics() != null)
{
for (Iterator iter = forum.getTopics().iterator(); iter.hasNext();)
{
DiscussionTopic t = (DiscussionTopic) iter.next();
if (t != null && getTopicAccess(t))
{
if (t.getId().equals(topic.getId()))
{
// need to check null because we might be on the first topic
// which means there is no previous one
return prev != null;
}
prev = (DiscussionTopic) t;
}
}
}
// if we get here, there is no previous topic
return false;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getNextTopic(org.sakaiproject.api.app.messageforums.DiscussionTopic)
*/
public DiscussionTopic getNextTopic(DiscussionTopic topic)
{
if (log.isDebugEnabled())
{
log.debug("getNextTopic(DiscussionTopic" + topic + ")");
}
if (usingHelper)
{
if (hasNextTopic(topic))
{
return helper.getNextTopic(topic);
}
else
{
return null;
}
}
// TODO: Needs optimized and re-written to take advantage of the db... this is really horrible.
boolean next = false;
DiscussionForum forum = getForumById(topic.getBaseForum().getId());
if (forum != null && forum.getTopics() != null)
{
for (Iterator iter = forum.getTopics().iterator(); iter.hasNext();)
{
DiscussionTopic t = (DiscussionTopic) iter.next();
if (next && getTopicAccess(t))
{
if (t == null)
{
do
{
t = (DiscussionTopic) iter.next();
} while (t == null);
}
return (DiscussionTopic) t;
}
if (t != null && getTopicAccess(t))
{
if (t.getId().equals(topic.getId()))
{
next = true;
}
}
}
}
// if we get here, there is no next topic
return null;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getPreviousTopic(org.sakaiproject.api.app.messageforums.DiscussionTopic)
*/
public DiscussionTopic getPreviousTopic(DiscussionTopic topic)
{
if (log.isDebugEnabled())
{
log.debug("getPreviousTopic(DiscussionTopic" + topic + ")");
}
if (usingHelper)
{
if (hasPreviousTopic(topic))
{
return helper.getPreviousTopic(topic);
}
else
{
return null;
}
}
// TODO: Needs optimized
DiscussionTopic prev = null;
DiscussionForum forum = getForumById(topic.getBaseForum().getId());
if (forum != null && forum.getTopics() != null)
{
for (Iterator iter = forum.getTopics().iterator(); iter.hasNext();)
{
DiscussionTopic t = (DiscussionTopic) iter.next();
if (t != null && getTopicAccess(t))
{
if (t.getId().equals(topic.getId()))
{
return prev;
}
if (t != null && getTopicAccess(t))
{
prev = (DiscussionTopic) t;
}
}
}
}
// if we get here, there is no previous topic
return null;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#isInstructor()
*/
public boolean isInstructor()
{
log.debug("isInstructor()");
return isInstructor(userDirectoryService.getCurrentUser());
}
public boolean isInstructor(String userId)
{
log.debug("isInstructor()");
try {
return isInstructor(userDirectoryService.getUser(userId));
} catch (UserNotDefinedException e) {
log.error("DiscussionForumManagerImpl: isInstructor(String userId, String siteId): " + e.getMessage());
return false;
}
}
public boolean isInstructor(String userId, String siteId) {
log.debug("isInstructor(String " + userId + ", " + siteId + ")");
try {
return isInstructor(userDirectoryService.getUser(userId), siteId);
} catch (UserNotDefinedException e) {
log.debug("DiscussionForumManagerImpl: isInstructor(String userId, String siteId): " + e.getMessage());
return false;
}
}
/**
* Check if the given user has site.upd access
*
* @param user
* @return
*/
public boolean isInstructor(User user)
{
if (log.isDebugEnabled())
{
log.debug("isInstructor(User " + user + ")");
}
if (user != null)
return isInstructor(user, getContextSiteId());
else
return false;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#isInstructor()
*/
public boolean isSectionTA()
{
log.debug("isSectionTA()");
return isSectionTA(userDirectoryService.getCurrentUser());
}
/**
* Check if the given user has site.upd access
*
* @param user
* @param siteId
* @return
*/
public boolean isInstructor(User user, String siteId)
{
if (log.isDebugEnabled())
{
log.debug("isInstructor(User " + user + ", " + siteId + ")");
}
if (user != null)
return securityService.unlock(user, "site.upd", siteId);
else
return false;
}
/**
* Check if the given user has section.role.ta access
*
* @param user
* @return
*/
private boolean isSectionTA(User user)
{
if (log.isDebugEnabled())
{
log.debug("isSectionTA(User " + user + ")");
}
if (user != null)
return securityService.unlock(user, "section.role.ta", getContextSiteId());
else
return false;
}
/**
* @return siteId
*/
private String getContextSiteId()
{
log.debug("getContextSiteId()");
return "/site/" + getCurrentContext();
}
/**
*
* @return the current context without the "/site/" prefix
*/
private String getCurrentContext() {
return toolManager.getCurrentPlacement().getContext();
}
private String getCurrentUser() {
return sessionManager.getCurrentSessionUserId();
}
/**
* @param forumManager
*/
public void setForumManager(MessageForumsForumManager forumManager)
{
if (log.isDebugEnabled())
{
log.debug("setForumManager(MessageForumsForumManager" + forumManager
+ ")");
}
this.forumManager = forumManager;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#createForum()
*/
public DiscussionForum createForum()
{
log.debug("createForum()");
return forumManager.createDiscussionForum();
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#deleteForum(org.sakaiproject.api.app.messageforums.DiscussionForum)
*/
public void deleteForum(DiscussionForum forum)
{
if (log.isDebugEnabled())
{
log.debug("setForumManager(DiscussionForum" + forum + ")");
}
forumManager.deleteDiscussionForum(forum);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#createTopic(org.sakaiproject.api.app.messageforums.DiscussionForum)
*/
public DiscussionTopic createTopic(DiscussionForum forum)
{
if (log.isDebugEnabled())
{
log.debug("createTopic(DiscussionForum" + forum + ")");
}
if (forum == null)
{
log.debug("Attempt to create topic with out forum");
return null;
}
return forumManager.createDiscussionForumTopic(forum);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#saveForum(org.sakaiproject.api.app.messageforums.DiscussionForum)
*/
public DiscussionForum saveForum(DiscussionForum forum)
{
if (log.isDebugEnabled())
{
log.debug("saveForum(DiscussionForum" + forum + ")");
}
return saveForum(forum, false, getCurrentContext(), true, getCurrentUser());
}
public DiscussionForum saveForum(String contextId, DiscussionForum forum) {
if (log.isDebugEnabled()) log.debug("saveForum(String contextId, DiscussionForum forum)");
if (contextId == null || forum == null) {
throw new IllegalArgumentException("Null contextId or forum passed to saveForum. contextId:" + contextId);
}
return saveForum(forum, forum.getDraft(), contextId, true, getCurrentUser());
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#saveForumAsDraft(org.sakaiproject.api.app.messageforums.DiscussionForum)
*/
public DiscussionForum saveForumAsDraft(DiscussionForum forum)
{
if (log.isDebugEnabled())
{
log.debug("saveForumAsDraft(DiscussionForum" + forum + ")");
}
return saveForum(forum, true, getCurrentContext(), true, getCurrentUser());
}
public DiscussionForum saveForum(DiscussionForum forum, boolean draft, String contextId, boolean logEvent, String currentUser)
{
if (log.isDebugEnabled())
{
log.debug("saveForum(DiscussionForum" + forum + "boolean " + draft + ")");
}
boolean saveArea = forum.getId() == null;
forum.setDraft(draft);
final DiscussionForum forumReturn = forumManager.saveDiscussionForum(forum, draft, logEvent, currentUser);
//set flag to false since permissions could have changed. This will force a clearing and resetting
//of the permissions cache.
threadLocalManager.set("message_center_permission_set", Boolean.FALSE);
if (saveArea)
{
String dfType = typeManager.getDiscussionForumType();
Area area = areaManager.getAreaByContextIdAndTypeId(contextId, dfType);
forumReturn.setArea(area);
forumReturn.setSortIndex(0);
area.addDiscussionForum(forumReturn);
areaManager.saveArea(area, currentUser);
}
return forumReturn;
}
@Override
public void saveTopicAsDraft(DiscussionTopic topic)
{
saveTopic(topic, true);
}
@Override
public void saveTopic(DiscussionTopic topic)
{
saveTopic(topic, false);
}
@Override
public void saveTopic(DiscussionTopic topic, boolean draft)
{
TopicEvent event = topic.getId() == null ? TopicEvent.ADD : TopicEvent.REVISE;
LRS_Statement statement = getStatementForUserPosted(topic.getTitle(), SAKAI_VERB.interacted).orElse(null);
saveTopic(topic, draft, new ForumsTopicEventParams(event, statement));
}
@Override
public void saveTopic(DiscussionTopic topic, boolean draft, ForumsTopicEventParams params)
{
saveTopic(topic, draft, params, getCurrentUser());
}
@Override
public void saveTopic(DiscussionTopic topic, boolean draft, ForumsTopicEventParams params, String currentUser)
{
log.debug("Save topic {}, as a draft ({})", topic, draft);
boolean saveForum = topic.getId() == null;
topic.setDraft(draft);
DiscussionForum forum = (DiscussionForum) topic.getBaseForum();
forumManager.saveDiscussionForumTopic(topic, forum.getDraft(), currentUser, params != null);
// refresh the forum for Hibernate
forum = (DiscussionForum) topic.getBaseForum();
if (saveForum)
{
forum.addTopic(topic);
forumManager.saveDiscussionForum(forum, forum.getDraft(), false, currentUser); // event already logged by saveDiscussionForumTopic()
//sak-5146 forumManager.saveDiscussionForum(forum);
}
if (params != null)
{
Event event = eventTrackingService.newEvent(params.event.type, getEventMessage(topic), null, params.event.modification,
NotificationService.NOTI_OPTIONAL, params.lrsStatement);
eventTrackingService.post(event);
}
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#deleteTopic(org.sakaiproject.api.app.messageforums.DiscussionTopic)
*/
public void deleteTopic(DiscussionTopic topic)
{
if (log.isDebugEnabled())
{
log.debug("deleteTopic(DiscussionTopic " + topic + ")");
}
forumManager.deleteDiscussionForumTopic(topic);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getDefaultControlPermissions()
*/
public List getDefaultControlPermissions()
{
log.debug("getDefaultControlPermissions()");
List defaultControlPermissions = new ArrayList();
Iterator roles = getRoles();
while (roles.hasNext())
{
String roleId = (String) roles.next();
AreaControlPermission controlPermission = permissionManager
.getDefaultAreaControlPermissionForRole(roleId, typeManager
.getDiscussionForumType());
defaultControlPermissions.add(controlPermission);
}
return defaultControlPermissions;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getAreaControlPermissions()
*/
public List getAreaControlPermissions()
{
log.debug("getAreaControlPermissions()");
List areaControlPermissions = new ArrayList();
Iterator roles = getRoles();
while (roles.hasNext())
{
String roleId = (String) roles.next();
AreaControlPermission controlPermission = permissionManager
.getAreaControlPermissionForRole(roleId, typeManager
.getDiscussionForumType());
if (controlPermission == null)
{
controlPermission = permissionManager
.createAreaControlPermissionForRole(roleId, typeManager
.getDiscussionForumType());
}
areaControlPermissions.add(controlPermission);
}
return areaControlPermissions;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getAreaMessagePermissions()
*/
public List getAreaMessagePermissions()
{
log.debug("getAreaMessagePermissions()");
List areaMessagePermissions = new ArrayList();
Iterator roles = getRoles();
while (roles.hasNext())
{
String roleId = (String) roles.next();
MessagePermissions messagePermission = permissionManager
.getAreaMessagePermissionForRole(roleId, typeManager
.getDiscussionForumType());
if (messagePermission == null)
{
messagePermission = permissionManager
.createAreaMessagePermissionForRole(roleId, typeManager
.getDiscussionForumType());
}
areaMessagePermissions.add(messagePermission);
}
return areaMessagePermissions;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getDefaultMessagePermissions()
*/
public List getDefaultMessagePermissions()
{
log.debug("getDefaultMessagePermissions()");
List defaultMessagePermissions = new ArrayList();
Iterator roles = getRoles();
while (roles.hasNext())
{
String roleId = (String) roles.next();
MessagePermissions messagePermission = permissionManager
.getDefaultAreaMessagePermissionForRole(roleId, typeManager
.getDiscussionForumType());
defaultMessagePermissions.add(messagePermission);
}
return defaultMessagePermissions;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#saveDefaultMessagePermissions(java.util.List)
*/
public void saveAreaMessagePermissions(List messagePermissions)
{
if (log.isDebugEnabled())
{
log.debug("saveDefaultMessagePermissions(List " + messagePermissions
+ ")");
}
if (messagePermissions != null && messagePermissions.size() > 0)
{
Iterator iterator = messagePermissions.iterator();
while (iterator.hasNext())
{
MessagePermissions msgPermission = (MessagePermissions) iterator.next();
permissionManager.saveAreaMessagePermissionForRole(
getDiscussionForumArea(), msgPermission, typeManager
.getDiscussionForumType());
}
}
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getForumControlPermissions(org.sakaiproject.api.app.messageforums.DiscussionForum)
*/
public List getForumControlPermissions(DiscussionForum forum)
{
if (log.isDebugEnabled())
{
log.debug("getForumControlPermissions(DiscussionForum " + forum + ")");
}
List forumControlPermissions = new ArrayList();
Iterator roles = getRoles();
while (roles.hasNext())
{
String roleId = (String) roles.next();
ForumControlPermission controlPermission = permissionManager
.getForumControlPermissionForRole(forum, roleId, typeManager
.getDiscussionForumType());
if (controlPermission == null)
{
controlPermission = permissionManager
.createForumControlPermissionForRole(roleId, typeManager
.getDiscussionForumType());
}
forumControlPermissions.add(controlPermission);
}
return forumControlPermissions;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getForumMessagePermissions(org.sakaiproject.api.app.messageforums.DiscussionForum)
*/
public List getForumMessagePermissions(DiscussionForum forum)
{
if (log.isDebugEnabled())
{
log.debug("getForumMessagePermissions(DiscussionForum " + forum + ")");
}
List forumMessagePermissions = new ArrayList();
Iterator roles = getRoles();
while (roles.hasNext())
{
String roleId = (String) roles.next();
MessagePermissions messagePermission = permissionManager
.getForumMessagePermissionForRole(forum, roleId, typeManager
.getDiscussionForumType());
if (messagePermission == null)
{
messagePermission = permissionManager
.createForumMessagePermissionForRole(roleId, typeManager
.getDiscussionForumType());
}
forumMessagePermissions.add(messagePermission);
}
return forumMessagePermissions;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getTopicControlPermissions(org.sakaiproject.api.app.messageforums.DiscussionTopic)
*/
public List getTopicControlPermissions(DiscussionTopic topic)
{
log.debug("getTopicControlPermissions(DiscussionTopic " + topic + ")");
List topicControlPermissions = new ArrayList();
Iterator roles = getRoles();
while (roles.hasNext())
{
String roleId = (String) roles.next();
TopicControlPermission controlPermission = permissionManager
.getTopicControlPermissionForRole(topic, roleId, typeManager
.getDiscussionForumType());
if (controlPermission == null)
{
controlPermission = permissionManager
.createTopicControlPermissionForRole(topic.getBaseForum(), roleId,
typeManager.getDiscussionForumType());
}
topicControlPermissions.add(controlPermission);
}
return topicControlPermissions;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getTopicMessagePermissions(org.sakaiproject.api.app.messageforums.DiscussionTopic)
*/
public List getTopicMessagePermissions(DiscussionTopic topic)
{
log.debug("getTopicMessagePermissions(DiscussionTopic " + topic + ")");
List topicMessagePermissions = new ArrayList();
Iterator roles = getRoles();
while (roles.hasNext())
{
String roleId = (String) roles.next();
MessagePermissions messagePermission = permissionManager
.getTopicMessagePermissionForRole(topic, roleId, typeManager
.getDiscussionForumType());
if (messagePermission == null)
{
messagePermission = permissionManager
.createTopicMessagePermissionForRole(topic.getBaseForum(), roleId,
typeManager.getDiscussionForumType());
}
topicMessagePermissions.add(messagePermission);
}
return topicMessagePermissions;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#saveDefaultControlPermissions(java.util.List)
*/
public void saveAreaControlPermissions(List controlpermissions)
{
if (log.isDebugEnabled())
{
log.debug("saveAreaControlPermissions(List" + controlpermissions + ")");
}
if (controlpermissions != null && controlpermissions.size() > 0)
{
Iterator iterator = controlpermissions.iterator();
while (iterator.hasNext())
{
AreaControlPermission controlPermission = (AreaControlPermission) iterator
.next();
permissionManager.saveAreaControlPermissionForRole(
getDiscussionForumArea(), controlPermission, typeManager
.getDiscussionForumType());
}
}
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#saveForumControlPermissions(org.sakaiproject.api.app.messageforums.DiscussionForum,
* java.util.List)
*/
public void saveForumControlPermissions(DiscussionForum forum,
List controlPermissions)
{
if (log.isDebugEnabled())
{
log.debug("saveForumControlPermissions(List " + controlPermissions + ")");
}
if (forum != null && controlPermissions != null
&& controlPermissions.size() > 0)
{
Iterator iterator = controlPermissions.iterator();
while (iterator.hasNext())
{
ForumControlPermission controlPermission = (ForumControlPermission) iterator
.next();
permissionManager.saveForumControlPermissionForRole(forum,
controlPermission);
}
}
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#saveForumMessagePermissions(org.sakaiproject.api.app.messageforums.DiscussionForum,
* java.util.List)
*/
public void saveForumMessagePermissions(DiscussionForum forum,
List messagePermissions)
{
if (log.isDebugEnabled())
{
log.debug("saveForumMessagePermissions(List " + messagePermissions + ")");
}
if (forum != null && messagePermissions != null
&& messagePermissions.size() > 0)
{
Iterator iterator = messagePermissions.iterator();
while (iterator.hasNext())
{
MessagePermissions messagePermission = (MessagePermissions) iterator
.next();
permissionManager.saveForumMessagePermissionForRole(forum,
messagePermission);
}
}
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#saveTopicControlPermissions(org.sakaiproject.api.app.messageforums.DiscussionForum,
* java.util.List)
*/
public void saveTopicControlPermissions(DiscussionTopic topic,
List controlPermissions)
{
if (log.isDebugEnabled())
{
log.debug("saveTopicControlPermissions(List " + controlPermissions + ")");
}
if (topic != null && controlPermissions != null
&& controlPermissions.size() > 0)
{
Iterator iterator = controlPermissions.iterator();
while (iterator.hasNext())
{
TopicControlPermission controlPermission = (TopicControlPermission) iterator
.next();
permissionManager.saveTopicControlPermissionForRole(topic,
controlPermission);
}
}
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#saveTopicMessagePermissions(org.sakaiproject.api.app.messageforums.DiscussionTopic,
* java.util.List)
*/
public void saveTopicMessagePermissions(DiscussionTopic topic,
List messagePermissions)
{
if (log.isDebugEnabled())
{
log.debug("saveTopicMessagePermissions(List " + messagePermissions + ")");
}
if (topic != null && messagePermissions != null
&& messagePermissions.size() > 0)
{
Iterator iterator = messagePermissions.iterator();
while (iterator.hasNext())
{
MessagePermissions messagePermission = (MessagePermissions) iterator
.next();
permissionManager.saveTopicMessagePermissionForRole(topic,
messagePermission);
}
}
}
/**
* @return Roles for the current site
*/
private Iterator getRoles()
{
log.debug("getRoles()");
List roleList = new ArrayList();
AuthzGroup realm = null;
try
{
realm = authzGroupService.getAuthzGroup(getContextSiteId());
Set roles = realm.getRoles();
if (roles != null && roles.size() > 0)
{
Iterator roleIter = roles.iterator();
while (roleIter.hasNext())
{
Role role = (Role) roleIter.next();
if (role != null) roleList.add(role.getId());
}
}
}
catch (GroupNotDefinedException e) {
log.error(e.getMessage(), e);
}
Collections.sort(roleList);
return roleList.iterator();
}
public boolean getAnonRole()
{
return getAnonRole(getContextSiteId());
}
public boolean getAnonRole(String contextSiteId)
{
log.debug("getAnonRoles()");
AuthzGroup realm = null;
try
{
realm = authzGroupService.getAuthzGroup(contextSiteId);
Role anon = realm.getRole(".anon");
if (sessionManager.getCurrentSessionUserId()==null && anon != null && anon.getAllowedFunctions().contains("site.visit"))
{
return true;
}
}
catch (GroupNotDefinedException e) {
log.error(e.getMessage(), e);
return false;
}
return false;
}
public void markMessageAs(Message message, boolean readStatus)
{
if (log.isDebugEnabled())
{
log.debug("markMessageAsRead(Message" + message + ")");
}
try
{
messageManager.markMessageReadForUser(message.getTopic().getId(), message
.getId(), readStatus);
}
catch (Exception e)
{
log.error(e.getMessage(), e);
}
}
public void markMessageReadStatusForUser(Message message, boolean readStatus, String userId)
{
if (log.isDebugEnabled())
{
log.debug("markMessageReadStatusForUser(Message" + message + " readStatus:" + readStatus + " userId: " + userId + ")");
}
try
{
messageManager.markMessageReadForUser(message.getTopic().getId(), message
.getId(), readStatus, userId);
}
catch (Exception e)
{
log.error(e.getMessage(), e);
}
}
/**
* @param forum
* @return
*/
public boolean isForumOwner(DiscussionForum forum){
return isForumOwner(forum, userDirectoryService.getCurrentUser().getId());
}
public boolean isForumOwner(DiscussionForum forum, String userId)
{
return isForumOwner(forum, userId, getContextSiteId());
}
public boolean isForumOwner(DiscussionForum forum, String userId, String siteId)
{
return isForumOwner(forum.getId(), forum.getCreatedBy(), userId, siteId);
}
public boolean isForumOwner(Long forumId, String forumCreatedBy, String userId, String siteId)
{
if (log.isDebugEnabled())
{
log.debug("isForumOwner(DiscussionForum " + forumId + ")");
}
return forumCreatedBy.equals(userId) && !isRoleSwapView(siteId);
}
private boolean isRoleSwapView(String siteId)
{
return (securityService.getUserEffectiveRole(siteId) != null);
}
/**
* @param topic
* @return
*/
public boolean isTopicOwner(DiscussionTopic topic){
return isTopicOwner(topic, userDirectoryService.getCurrentUser().getId());
}
public boolean isTopicOwner(DiscussionTopic topic, String userId)
{
return isTopicOwner(topic, userId, getContextSiteId());
}
public boolean isTopicOwner(DiscussionTopic topic, String userId, String siteId)
{
return isTopicOwner(topic.getId(), topic.getCreatedBy(), userId, siteId);
}
public boolean isTopicOwner(Long topicId, String topicCreatedBy, String userId, String siteId)
{
if (log.isDebugEnabled())
{
log.debug("isTopicOwner(DiscussionTopic " + topicId + ")");
}
return topicCreatedBy.equals(userId) && !isRoleSwapView(siteId);
}
private boolean getTopicAccess(DiscussionTopic t)
{
if (log.isDebugEnabled())
{
log.debug("getTopicAccess(DiscussionTopic" + t + ")");
}
// SAK-27570: Return early instead of looping through lots of database records
if (isInstructor() || securityService.isSuperUser() || isTopicOwner(t)) {
return true;
}
else if (t.getDraft().equals(Boolean.TRUE) || t.getAvailability() == null || !t.getAvailability()) {
return false;
}
//SAK-12685 If topic's permission level name is "None", then can't access
User user=userDirectoryService.getCurrentUser();
String role=authzGroupService.getUserRole(user.getId(), getContextSiteId());
return !forumManager.doesRoleHavePermissionInTopic(t.getId(), role, PermissionLevelManager.PERMISSION_LEVEL_NAME_NONE);
}
/**
* @param accessorList
* @return
*/
private List decodeActorPermissionTypeList(List selectedList)
{
if (log.isDebugEnabled())
{
log.debug("decodeActorPermissionTypeList(List" + selectedList + ")");
}
List newSelectedMemberList = new ArrayList();
for (Iterator i = selectedList.iterator(); i.hasNext();)
{
String selectedItem = (String) i.next();
MessageForumsUser user = new MessageForumsUserImpl();
/** lookup item in map */
MembershipItem item = (MembershipItem) getAllCourseMembers().get(
selectedItem);
if (item == null)
{
log.warn("decodeActorPermissionTypeList() could not resolve uuid: "
+ selectedItem);
}
else
{
if (MembershipItem.TYPE_ALL_PARTICIPANTS.equals(item.getType()))
{
user.setTypeUuid(typeManager.getAllParticipantType());
user.setUserId(typeManager.getAllParticipantType());
newSelectedMemberList.add(user);
}
else
if (MembershipItem.TYPE_NOT_SPECIFIED.equals(item.getType()))
{
user.setTypeUuid(typeManager.getNotSpecifiedType());
user.setUserId(typeManager.getNotSpecifiedType());
// if not specified is seleted then only this value remains.
newSelectedMemberList = null;
newSelectedMemberList = new ArrayList();
newSelectedMemberList.add(user);
break;
}
else
if (MembershipItem.TYPE_ROLE.equals(item.getType()))
{
user.setTypeUuid(typeManager.getRoleType());
user.setUserId(item.getRole().getId());
newSelectedMemberList.add(user);
}
else
if (MembershipItem.TYPE_GROUP.equals(item.getType()))
{
user.setTypeUuid(typeManager.getGroupType());
user.setUserId(item.getGroup().getId());
newSelectedMemberList.add(user);
}
else
if (MembershipItem.TYPE_USER.equals(item.getType()))
{
user.setTypeUuid(typeManager.getUserType());
user.setUserId(item.getUser().getId());
newSelectedMemberList.add(user);
}
else
{
log.warn("getRecipients() could not resolve membership type: "
+ item.getType());
}
}
}
return newSelectedMemberList;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#decodeAccessorsList(java.util.List)
*/
public List decodeAccessorsList(ArrayList accessorList)
{
if (log.isDebugEnabled())
{
log.debug("decodeAccessorsList(List" + accessorList + ")");
}
if (accessorList == null || accessorList.size() < 1)
{
return forumManager.createDefaultActorPermissions().getAccessors();
}
return decodeActorPermissionTypeList(accessorList);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#decodeContributorsList(java.util.List)
*/
public List decodeContributorsList(ArrayList contributorList)
{
if (log.isDebugEnabled())
{
log.debug("decodeContributorsList(List" + contributorList + ")");
}
if (contributorList == null || contributorList.size() < 1)
{
return forumManager.createDefaultActorPermissions().getContributors();
}
return decodeActorPermissionTypeList(contributorList);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getContributorsList(org.sakaiproject.api.app.messageforums.DiscussionForum)
*/
public List getContributorsList(DiscussionForum forum)
{
if (log.isDebugEnabled())
{
log.debug(" getContributorsList(DiscussionForum" + forum + ")");
}
List contributorList = null;
if (forum == null)
{
return null;
}
if (forum.getActorPermissions() == null
|| forum.getActorPermissions().getContributors() == null)
{
forum.setActorPermissions(forumManager.createDefaultActorPermissions());
contributorList = forumManager.createDefaultActorPermissions()
.getContributors();
}
else
{
contributorList = forum.getActorPermissions().getContributors();
}
Iterator iterator = contributorList.iterator();
return getContributorAccessorList(iterator);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getAccessorsList(org.sakaiproject.api.app.messageforums.DiscussionForum)
*/
public List getAccessorsList(DiscussionForum forum)
{
if (log.isDebugEnabled())
{
log.debug("getAccessorsList(DiscussionForum" + forum + ")");
}
List accessorsList = null;
if (forum == null)
{
return null;
}
if (forum.getActorPermissions() == null
|| forum.getActorPermissions().getAccessors() == null)
{
forum.setActorPermissions(forumManager.createDefaultActorPermissions());
accessorsList = forumManager.createDefaultActorPermissions()
.getAccessors();
}
else
{
accessorsList = forum.getActorPermissions().getAccessors();
}
Iterator iterator = accessorsList.iterator();
return getContributorAccessorList(iterator);
}
/**
* @param iterator
* @return
*/
private List getContributorAccessorList(Iterator iterator)
{
if (log.isDebugEnabled())
{
log.debug("getContributorAccessorList(Iterator" + iterator + ")");
}
List modifiedContributorList = new ArrayList();
while (iterator.hasNext())
{
String selectedId = null;
MessageForumsUser user = (MessageForumsUser) iterator.next();
List totalmembers = membershipManager
.convertMemberMapToList(courseMemberMap);
Iterator iter = totalmembers.iterator();
if (user.getTypeUuid().equals(typeManager.getAllParticipantType()))
{
while (iter.hasNext())
{
MembershipItem member = (MembershipItem) iter.next();
if (member.getType().equals(MembershipItem.TYPE_ALL_PARTICIPANTS))
{
selectedId = member.getId();
}
}
}
if (user.getTypeUuid().equals(typeManager.getNotSpecifiedType()))
{
while (iter.hasNext())
{
MembershipItem member = (MembershipItem) iter.next();
if (member.getType().equals(MembershipItem.TYPE_NOT_SPECIFIED))
{
selectedId = member.getId();
}
}
}
if (user.getTypeUuid().equals(typeManager.getGroupType()))
{
while (iter.hasNext())
{
MembershipItem member = (MembershipItem) iter.next();
if (member.getType().equals(MembershipItem.TYPE_GROUP)
&& user.getUserId().equals(member.getGroup().getId()))
{
selectedId = member.getId();
}
}
}
if (user.getTypeUuid().equals(typeManager.getRoleType()))
{
while (iter.hasNext())
{
MembershipItem member = (MembershipItem) iter.next();
if (member.getType().equals(MembershipItem.TYPE_ROLE)
&& user.getUserId().equals(member.getRole().getId()))
{
selectedId = member.getId();
}
}
}
if (user.getTypeUuid().equals(typeManager.getUserType()))
{
while (iter.hasNext())
{
MembershipItem member = (MembershipItem) iter.next();
if (member.getType().equals(MembershipItem.TYPE_USER)
&& user.getUserId().equals(member.getUser().getId()))
{
selectedId = member.getId();
}
}
}
modifiedContributorList.add(selectedId);
}
return modifiedContributorList;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getAllCourseMembers()
*/
public Map getAllCourseMembers()
{
if (log.isDebugEnabled())
{
log.debug("getAllCourseMembers()");
}
if (courseMemberMap == null)
{
courseMemberMap = membershipManager.getAllCourseMembers(true, false, true, null);
}
return courseMemberMap;
}
/**
* @param courseMemberMap
* The courseMemberMap to set.
*/
public void setCourseMemberMapToNull()
{
this.courseMemberMap = null;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getContributorsList(org.sakaiproject.api.app.messageforums.DiscussionTopic)
*/
public List getContributorsList(DiscussionTopic topic, DiscussionForum forum)
{
if (log.isDebugEnabled())
{
log.debug("getContributorsList(DiscussionTopic " + topic
+ ", DiscussionForum " + forum + ")");
}
List contributorList = null;
if (topic == null)
{
return null;
}
if (topic.getActorPermissions() == null
|| topic.getActorPermissions().getContributors() == null)
{
// hibernate does not permit this b/c saving forum and topics will
// throw uniqueobjectexception
topic.setActorPermissions(getDeepCopyOfParentActorPermissions(forum
.getActorPermissions()));
contributorList = topic.getActorPermissions().getContributors();
}
else
{
contributorList = topic.getActorPermissions().getContributors();
}
Iterator iterator = contributorList.iterator();
return getContributorAccessorList(iterator);
}
private ActorPermissions getDeepCopyOfParentActorPermissions(
ActorPermissions actorPermissions)
{
ActorPermissions newAP = new ActorPermissionsImpl();
List parentAccessors = actorPermissions.getAccessors();
List parentContributors = actorPermissions.getContributors();
List newAccessors = new ArrayList();
List newContributor = new ArrayList();
Iterator iter = parentAccessors.iterator();
while (iter.hasNext())
{
MessageForumsUser accessParent = (MessageForumsUser) iter.next();
MessageForumsUser newaccessor = new MessageForumsUserImpl();
newaccessor.setTypeUuid(accessParent.getTypeUuid());
newaccessor.setUserId(accessParent.getUserId());
newaccessor.setUuid(accessParent.getUuid());
newAccessors.add(newaccessor);
}
Iterator iter1 = parentContributors.iterator();
while (iter1.hasNext())
{
MessageForumsUser contribParent = (MessageForumsUser) iter1.next();
MessageForumsUser newcontributor = new MessageForumsUserImpl();
newcontributor.setTypeUuid(contribParent.getTypeUuid());
newcontributor.setUserId(contribParent.getUserId());
newcontributor.setUuid(contribParent.getUuid());
newContributor.add(newcontributor);
}
newAP.setAccessors(newAccessors);
newAP.setContributors(newContributor);
return newAP;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager#getAccessorsList(org.sakaiproject.api.app.messageforums.DiscussionTopic)
*/
public List getAccessorsList(DiscussionTopic topic, DiscussionForum forum)
{
if (log.isDebugEnabled())
{
log.debug("getAccessorsList(DiscussionTopic " + topic
+ ", DiscussionForum " + forum + ")");
}
List accessorsList = null;
if (topic == null)
{
return null;
}
if (topic.getActorPermissions() == null
|| topic.getActorPermissions().getAccessors() == null)
{
// hibernate does not permit this b/c saving forum and topics will
// throw uniqueobjectexception
topic.setActorPermissions(getDeepCopyOfParentActorPermissions(forum
.getActorPermissions()));
accessorsList = topic.getActorPermissions().getAccessors();
}
else
{
accessorsList = topic.getActorPermissions().getAccessors();
}
Iterator iterator = accessorsList.iterator();
return getContributorAccessorList(iterator);
}
public DBMembershipItem getAreaDBMember(Set originalSet, String name,
Integer type)
{
DBMembershipItem newItem = getDBMember(originalSet, name, type);
return newItem;
}
public DBMembershipItem getDBMember(Set originalSet, String name,
Integer type) {
return getDBMember(originalSet, name, type, getContextSiteId());
}
public DBMembershipItem getDBMember(Set originalSet, String name,
Integer type, String contextSiteId)
{
DBMembershipItem membershipItem = null;
DBMembershipItem membershipItemIter;
if (originalSet != null){
Iterator iter = originalSet.iterator();
while (iter.hasNext())
{
membershipItemIter = (DBMembershipItem) iter.next();
if (membershipItemIter.getType().equals(type)
&& membershipItemIter.getName().equals(name))
{
membershipItem = membershipItemIter;
break;
}
}
}
if (membershipItem == null || membershipItem.getPermissionLevel() == null){
PermissionLevel level = null;
//for groups awareness
if (type.equals(DBMembershipItem.TYPE_ROLE) || type.equals(DBMembershipItem.TYPE_GROUP))
{
String levelName = null;
if (membershipItem != null){
/** use level from stored item */
levelName = membershipItem.getPermissionLevelName();
}
else{
/** get level from config file */
levelName = ServerConfigurationService.getString(MC_DEFAULT
+ name);
}
if (levelName != null && levelName.trim().length() > 0)
{
level = permissionLevelManager.getPermissionLevelByName(levelName);
} else if (name == null || ".anon".equals(name)) {
level = permissionLevelManager.getDefaultNonePermissionLevel();
} else{
Collection siteIds = new Vector();
siteIds.add(contextSiteId);
if(type.equals(DBMembershipItem.TYPE_GROUP))
{
level = permissionLevelManager.getDefaultNonePermissionLevel();
}else{
//check cache first:
Set allowedFunctions = null;
String cacheId = contextSiteId + "/" + name;
Object el = allowedFunctionsCache.get(cacheId);
if(el == null){
allowedFunctions = authzGroupService.getAllowedFunctions(name, siteIds);
allowedFunctionsCache.put(cacheId, allowedFunctions);
}else{
allowedFunctions = (Set) el;
}
if (allowedFunctions.contains(SiteService.SECURE_UPDATE_SITE)){
level = permissionLevelManager.getDefaultOwnerPermissionLevel();
}else{
level = permissionLevelManager.getDefaultContributorPermissionLevel();
}
}
}
}
PermissionLevel noneLevel = permissionLevelManager.getDefaultNonePermissionLevel();
membershipItem = new DBMembershipItemImpl();
membershipItem.setName(name);
membershipItem.setPermissionLevelName((level == null) ? noneLevel.getName() : level.getName() );
membershipItem.setType(type);
membershipItem.setPermissionLevel((level == null) ? noneLevel : level);
}
return membershipItem;
}
//Attachment
public Attachment createDFAttachment(String attachId, String name)
{
try
{
Attachment attach = messageManager.createAttachment();
attach.setAttachmentId(attachId);
attach.setAttachmentName(name);
ContentResource cr = contentHostingService.getResource(attachId);
attach.setAttachmentSize((Long.valueOf(cr.getContentLength())).toString());
attach.setCreatedBy(cr.getProperties().getProperty(
cr.getProperties().getNamePropCreator()));
attach.setModifiedBy(cr.getProperties().getProperty(
cr.getProperties().getNamePropModifiedBy()));
attach.setAttachmentType(cr.getContentType());
attach.setAttachmentUrl("/url");
return attach;
}
catch (Exception e)
{
log.error(e.getMessage(), e);
return null;
}
}
public List getDiscussionForumsWithTopics()
{
log.debug("getDiscussionForumsWithTopics()");
return forumManager.getForumByTypeAndContextWithTopicsAllAttachments(typeManager
.getDiscussionForumType());
}
public List getDiscussionForumsWithTopics(String contextId) {
if (log.isDebugEnabled()) log.debug("getDiscussionForumsWithTopics(String contextId)");
if (contextId == null) {
throw new IllegalArgumentException("Null contextId passed to getDiscussionForumsWithTopics");
}
String dfType = typeManager.getDiscussionForumType();
return forumManager.getForumByTypeAndContextWithTopicsAllAttachments(dfType, contextId);
}
public Map<Long, Boolean> getReadStatusForMessagesWithId(List<Long> msgIds, String userId)
{
log.debug("getDiscussionForumsWithTopics()");
Map<Long, Boolean> msgIdStatusMap = new HashMap<>();
if (CollectionUtils.isEmpty(msgIds)) {
log.debug("empty map returns b/c no msgIds passed to getReadStatusForMessagesWithId");
return msgIdStatusMap;
}
if (userId == null) {
log.debug("empty user assume that all messages are read");
for (int i =0; i < msgIds.size(); i++) {
msgIdStatusMap.put(msgIds.get(i), Boolean.TRUE);
}
return msgIdStatusMap;
}
if (msgIds.size() < MAX_NUMBER_OF_SQL_PARAMETERS_IN_LIST) {
return messageManager.getReadStatusForMessagesWithId(msgIds, userId);
} else {
// if there are more than MAX_NUMBER_OF_SQL_PARAMETERS_IN_LIST msgs, we need to do multiple queries
int begIndex = 0;
int endIndex = 0;
while (begIndex < msgIds.size()) {
endIndex = begIndex + MAX_NUMBER_OF_SQL_PARAMETERS_IN_LIST;
if (endIndex > msgIds.size()) {
endIndex = msgIds.size();
}
List tempMsgIdList = new ArrayList();
tempMsgIdList.addAll(msgIds.subList(begIndex, endIndex));
Map<Long, Boolean> statusMap = messageManager.getReadStatusForMessagesWithId(tempMsgIdList, userId);
msgIdStatusMap.putAll(statusMap);
begIndex = endIndex;
}
}
return msgIdStatusMap;
}
public List getDiscussionForumsWithTopicsMembershipNoAttachments(String contextId)
{
log.debug("getDiscussionForumsWithTopicsMembershipNoAttachments()");
return forumManager.getForumByTypeAndContextWithTopicsMembership(typeManager.getDiscussionForumType(), contextId);
}
public List getPendingMsgsInTopic(Long topicId)
{
return messageManager.getPendingMsgsInTopic(topicId);
}
public int getNumModTopicsWithModPermissionByPermissionLevel(List membershipList)
{
return forumManager.getNumModTopicCurrentUserHasModPermForWithPermissionLevel(membershipList);
}
public int getNumModTopicsWithModPermissionByPermissionLevelName(List membershipList)
{
return forumManager.getNumModTopicCurrentUserHasModPermForWithPermissionLevelName(membershipList);
}
private String getEventMessage(Object object) {
String eventMessagePrefix = "/forums";
Tool tool = toolManager.getCurrentTool();
if (tool != null) {
switch (tool.getId()) {
case DiscussionForumService.MESSAGE_CENTER_ID:
eventMessagePrefix = "/messagesAndForums";
break;
case DiscussionForumService.MESSAGES_TOOL_ID:
eventMessagePrefix = "/messages";
break;
}
}
return eventMessagePrefix + getContextSiteId() + "/" + object.toString() + "/" + sessionManager.getCurrentSessionUserId();
}
public String getContextForTopicById(Long topicId) {
return getTopicById(topicId).getOpenForum().getArea().getContextId();
}
public String getContextForForumById(Long forumId) {
return getForumById(forumId).getArea().getContextId();
}
public String getContextForMessageById(Long messageId) {
return getMessageById(messageId).getTopic().getOpenForum().getArea().getContextId();
}
public String ForumIdForMessage(Long messageId) {
return getMessageById(messageId).getTopic().getOpenForum().getId().toString();
}
public Set<String> getUsersAllowedForTopic(Long topicId, boolean checkReadPermission, boolean checkModeratePermission) {
log.debug("getUsersAllowedForTopic(" + topicId + ", " + checkReadPermission + ", " + checkModeratePermission + ")");
if (topicId == null) {
throw new IllegalArgumentException("Null topicId passed to getUsersAllowedToReadTopic");
}
Set<String> usersAllowed = new HashSet<String>();
// we need to get all of the membership items associated with this topic
// first, check to see if it is in the thread
Set<DBMembershipItem> topicItems = new HashSet<DBMembershipItem>();
DiscussionTopic topicWithMemberships = (DiscussionTopic)forumManager.getTopicByIdWithMemberships(topicId);
if (topicWithMemberships != null && topicWithMemberships.getMembershipItemSet() != null) {
topicItems = topicWithMemberships.getMembershipItemSet();
}
Set<Role> rolesInSite = null;
Set<Group> groupsInSite = new HashSet<Group>();
Site currentSite;
String siteId = toolManager.getCurrentPlacement().getContext();
try {
currentSite = siteService.getSite(siteId);
// get all of the roles in this site
rolesInSite = currentSite.getRoles();
Collection<Group> groups = currentSite.getGroups();
if (groups != null) {
groupsInSite = new HashSet<Group>(groups);
}
} catch (IdUnusedException iue) {
log.warn("No site found with id: " + siteId + ". No users returned by getUsersAllowedToReadTopic");
return new HashSet<String>();
}
List<DBMembershipItem> revisedMembershipItemSet = new ArrayList<DBMembershipItem>();
// we need to get the membership items for the roles separately b/c of default permissions
if (rolesInSite != null) {
for (Role role : rolesInSite) {
DBMembershipItem roleItem = getDBMember(topicItems, role.getId(), DBMembershipItem.TYPE_ROLE);
if (roleItem != null) {
revisedMembershipItemSet.add(roleItem);
}
}
}
// now add in the group perms
for (Group group : groupsInSite) {
DBMembershipItem groupItem = getDBMember(topicItems, group.getTitle(), DBMembershipItem.TYPE_GROUP);
if (groupItem != null) {
revisedMembershipItemSet.add(groupItem);
}
}
// now we have the membership items. let's see which ones can read
for (DBMembershipItem membershipItem : revisedMembershipItemSet) {
if ((checkReadPermission && membershipItem.getPermissionLevel().getRead() && !checkModeratePermission) ||
(!checkReadPermission && checkModeratePermission && membershipItem.getPermissionLevel().getModeratePostings()) ||
(checkReadPermission && membershipItem.getPermissionLevel().getRead() && checkModeratePermission && membershipItem.getPermissionLevel().getModeratePostings())) {
if (membershipItem.getType().equals(DBMembershipItem.TYPE_ROLE)) {
// add the users who are a member of this role
log.debug("Adding users in role: " + membershipItem.getName() + " with read: " + membershipItem.getPermissionLevel().getRead());
Set<String> usersInRole = currentSite.getUsersHasRole(membershipItem.getName());
usersAllowed.addAll(usersInRole);
} else if (membershipItem.getType().equals(DBMembershipItem.TYPE_GROUP)) {
String groupName = membershipItem.getName();
for (Group group : groupsInSite) {
if (group.getTitle().equals(groupName)) {
Set<Member> groupMembers = group.getMembers();
if (groupMembers != null) {
for (Member member : groupMembers) {
usersAllowed.add(member.getUserId());
}
}
}
}
}
}
}
return usersAllowed;
}
public List getRecentDiscussionForumThreadsByTopicIds(List<Long> topicIds, int numberOfMessages)
{
if (log.isDebugEnabled())
{
log.debug("getRecentDiscussionForumMessagesByContext( Size of list is " + topicIds.size() + ")");
}
return messageManager.getRecentDiscussionForumThreadsByTopicIds(topicIds, numberOfMessages);
}
public List<Attachment> getTopicAttachments(Long topicId) {
return forumManager.getTopicAttachments(topicId);
}
public List<Topic> getTopicsInSite(final String contextId)
{
return forumManager.getTopicsInSite(contextId);
}
public List<Topic> getAnonymousTopicsInSite(final String contextId)
{
return forumManager.getAnonymousTopicsInSite(contextId);
}
public boolean isSiteHasAnonymousTopics(final String contextId)
{
return forumManager.isSiteHasAnonymousTopics(contextId);
}
public MemoryService getMemoryService() {
return memoryService;
}
public void setMemoryService(MemoryService memoryService) {
this.memoryService = memoryService;
}
public String getAllowedGroupForRestrictedForum(final Long forumId, final String permissionName) {
return forumManager.getAllowedGroupForRestrictedForum(forumId, permissionName);
}
public String getAllowedGroupForRestrictedTopic(final Long topicId, final String permissionName) {
return forumManager.getAllowedGroupForRestrictedTopic(topicId, permissionName);
}
@Override
public Optional<LRS_Statement> getStatementForUserPosted(String subject, SAKAI_VERB sakaiVerb) {
return LRSDelegate.getStatementForUserPosted(learningResourceStoreService, sessionManager.getCurrentSessionUserId(), subject, sakaiVerb);
}
@Override
public Optional<LRS_Statement> getStatementForUserReadViewed(String subject, String target) {
return LRSDelegate.getStatementForUserReadViewed(learningResourceStoreService, sessionManager.getCurrentSessionUserId(), subject, target);
}
@Override
public Optional<LRS_Statement> getStatementForGrade(String studentUid, String forumTitle, double score) {
return LRSDelegate.getStatementForGrade(learningResourceStoreService, userDirectoryService, studentUid, forumTitle, score);
}
}
|
/*
* Copyright (C) 2018 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.example.roomwordssample;
import androidx.sqlite.db.SupportSQLiteDatabase;
import androidx.room.Database;
import androidx.room.Room;
import androidx.room.RoomDatabase;
import android.content.Context;
import android.os.AsyncTask;
import androidx.annotation.NonNull;
/**
* WordRoomDatabase. Includes code to create the database.
* After the app creates the database, all further interactions
* with it happen through the WordViewModel.
*/
@Database(entities = {Word.class}, version = 2, exportSchema = false)
public abstract class WordRoomDatabase extends RoomDatabase {
public abstract WordDao wordDao();
private static WordRoomDatabase INSTANCE;
public static WordRoomDatabase getDatabase(final Context context) {
if (INSTANCE == null) {
synchronized (WordRoomDatabase.class) {
if (INSTANCE == null) {
// Create database here.
INSTANCE = Room.databaseBuilder(context.getApplicationContext(),
WordRoomDatabase.class, "word_database")
// Wipes and rebuilds instead of migrating if no Migration object.
// Migration is not part of this practical.
.fallbackToDestructiveMigration()
.addCallback(sRoomDatabaseCallback)
.build();
}
}
}
return INSTANCE;
}
// This callback is called when the database has opened.
// In this case, use PopulateDbAsync to populate the database
// with the initial data set if the database has no entries.
private static RoomDatabase.Callback sRoomDatabaseCallback =
new RoomDatabase.Callback(){
@Override
public void onOpen (@NonNull SupportSQLiteDatabase db){
super.onOpen(db);
new PopulateDbAsync(INSTANCE).execute();
}
};
// Populate the database with the initial data set
// only if the database has no entries.
private static class PopulateDbAsync extends AsyncTask<Void, Void, Void> {
private final WordDao mDao;
// Initial data set
private static String [] words = {"dolphin", "crocodile", "cobra", "elephant", "goldfish",
"tiger", "snake"};
PopulateDbAsync(WordRoomDatabase db) {
mDao = db.wordDao();
}
@Override
protected Void doInBackground(final Void... params) {
// If we have no words, then create the initial list of words.
if (mDao.getAnyWord().length < 1) {
for (int i = 0; i <= words.length - 1; i++) {
Word word = new Word(words[i]);
mDao.insert(word);
}
}
return null;
}
}
}
|
package com.github.tdurieux.repair.maven.plugin;
import org.apache.maven.plugin.Mojo;
import java.io.File;
public class NPEfixMojoTest extends BetterAbstractMojoTestCase {
private final String projectPath = "src/test/resources/projects/example2/";
@Override
protected void setUp() throws Exception {
super.setUp();
Process mvn_clean_test = Runtime.getRuntime().exec("mvn clean test", null, new File(projectPath));
mvn_clean_test.waitFor();
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
Process mvn_clean = Runtime.getRuntime().exec("mvn clean", null, new File(projectPath));
mvn_clean.waitFor();
}
public void testNPEFixRepair() throws Exception {
File f = getTestFile(projectPath + "pom.xml");
Mojo mojo = lookupConfiguredMojo(f, "npefix");
assertNotNull( mojo );
assertTrue("Wrong class: "+mojo, mojo instanceof NPEFixMojo);
NPEFixMojo repair = (NPEFixMojo) mojo;
repair.execute();
assertTrue(repair.getResult().size() > 0);
}
}
|
package com.github.t1.annotations.impl;
class MethodNotFoundException extends RuntimeException {
MethodNotFoundException(Class<?> type, String methodName, String[] argTypes) {
this(type, methodName, argTypes, null);
}
MethodNotFoundException(Class<?> type, String methodName, String[] argTypes, Throwable cause) {
super("no method " + Utils.signature(methodName, argTypes) + " in " + type, cause);
}
}
class FieldNotFoundException extends RuntimeException {
FieldNotFoundException(String fieldName, Class<?> type) {
this(fieldName, type, null);
}
FieldNotFoundException(String fieldName, Class<?> type, Throwable cause) {
super("no field '" + fieldName + "' in " + type, cause);
}
}
|
package com.examples.littledrivers;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}
|
package dataset;
import benchmark.internal.Benchmark;
import benchmark.tool.BasicClass;
class Test40Lib{
public static Test40Class1 local1;
public static Test40Class1 local2;
public static Test40Class1 local3;
public static Test40Class1 local4;
public static Test40Class1 local5;
public static Test40Class2 local6;
public static Test40Class3 local7;
}
class Test40Class1 extends BasicClass {
public Test40Class2 field1;
public Test40Class2 field2;
public Test40Class1 method1(int depth) {
if (depth == 0) return Test40Lib.local1;
this.field2.field4.method4(this.field1.field4,depth-1);
return this.field1.field4.field1;
}
}
class Test40Class2 extends Test40Class1 {
public Test40Class3 field3;
public Test40Class3 field4;
public static void method2(Test40Class2 arg0, int depth) {
if (depth == 0) return;
}
public Test40Class3 method3(Test40Class1 arg0, int depth) {
if (depth == 0) return Test40Lib.local7;
this.field4.field4.field4.method1(depth-1);
return arg0.field1.field2.field3;
}
}
class Test40Class3 extends Test40Class2 {
public Test40Class2 field5;
public Test40Class1 field6;
public static Test40Class2 method4(Test40Class2 arg0, int depth) {
if (depth == 0) return Test40Lib.local7;
return arg0.field4.field1;
}
public void method5(Test40Class2 arg0, int depth) {
if (depth == 0) return;
}
}
public class Test40{
public static void main(String[] args) {
int inputValue = 0;
Benchmark.alloc(1);
Test40Lib.local1 = new Test40Class1();
Benchmark.alloc(2);
Test40Lib.local2 = new Test40Class1();
Benchmark.alloc(3);
Test40Lib.local3 = new Test40Class1();
Benchmark.alloc(4);
Test40Lib.local4 = new Test40Class1();
Benchmark.alloc(5);
Test40Lib.local5 = new Test40Class1();
Benchmark.alloc(6);
Test40Lib.local6 = new Test40Class2();
Benchmark.alloc(7);
Test40Lib.local7 = new Test40Class3();
Test40Lib.local1.field1 = Test40Lib.local7;
Test40Lib.local1.field2 = Test40Lib.local7;
Test40Lib.local2.field1 = Test40Lib.local7;
Test40Lib.local2.field2 = Test40Lib.local7;
Test40Lib.local3.field1 = Test40Lib.local7;
Test40Lib.local3.field2 = Test40Lib.local7;
Test40Lib.local4.field1 = Test40Lib.local6;
Test40Lib.local4.field2 = Test40Lib.local6;
Test40Lib.local5.field1 = Test40Lib.local7;
Test40Lib.local5.field2 = Test40Lib.local6;
Test40Lib.local6.field3 = Test40Lib.local7;
Test40Lib.local6.field4 = Test40Lib.local7;
Test40Lib.local6.field1 = Test40Lib.local7;
Test40Lib.local6.field2 = Test40Lib.local7;
Test40Lib.local7.field5 = Test40Lib.local7;
Test40Lib.local7.field6 = Test40Lib.local4;
Test40Lib.local7.field3 = Test40Lib.local7;
Test40Lib.local7.field4 = Test40Lib.local7;
Test40Lib.local7.field1 = Test40Lib.local7;
Test40Lib.local7.field2 = Test40Lib.local7;
if (inputValue==1) {
Test40Lib.local3.field1.field3.field5=Test40Lib.local7.field1.field4.field2;
}else {
for (int local8 = 0; local8<=2; local8 += 1) {
Test40Lib.local6.field4.field2=Test40Lib.local6.field3.field6.field2;
}
}
for (int local9 = 0; local9<=2; local9 += 1) {
if (inputValue<4) {
if (inputValue<=2) {
if (inputValue>2) {
if (inputValue>=3) {
}
}else {
for (int local10 = 0; local10<=2; local10 += 1) {
}
}
}else {
Test40Lib.local7.field3.field4.field2=Test40Lib.local7.field2.field4.field5;
}
}else {
for (int local11 = 0; local11<=3; local11 += 1) {
for (int local12 = 0; local12<=3; local12 += 1) {
Test40Lib.local5.field1.field2.field1=Test40Lib.local4.field2.field4.field3.method4(Test40Lib.local7.field5.field3.field5,3);
}
}
}
}
if (inputValue<3) {
for (int local13 = 0; local13<=2; local13 += 1) {
for (int local14 = 0; local14<=1; local14 += 1) {
if (inputValue<2) {
for (int local15 = 0; local15<=0; local15 += 1) {
}
}else {
Test40Lib.local7.field3.field6.field2=Test40Lib.local4.field1.field1.field3.method4(Test40Lib.local6.field3.field6.field1,3);
}
}
}
}
Benchmark.test(1,Test40Lib.local1);
Benchmark.test(2,Test40Lib.local2);
Benchmark.test(3,Test40Lib.local3);
Benchmark.test(4,Test40Lib.local4);
Benchmark.test(5,Test40Lib.local5);
Benchmark.test(6,Test40Lib.local6);
Benchmark.test(7,Test40Lib.local7);
Benchmark.test(8,Test40Lib.local1.field1);
Benchmark.test(9,Test40Lib.local1.field2);
Benchmark.test(10,Test40Lib.local2.field1);
Benchmark.test(11,Test40Lib.local2.field2);
Benchmark.test(12,Test40Lib.local3.field1);
Benchmark.test(13,Test40Lib.local3.field2);
Benchmark.test(14,Test40Lib.local4.field1);
Benchmark.test(15,Test40Lib.local4.field2);
Benchmark.test(16,Test40Lib.local5.field1);
Benchmark.test(17,Test40Lib.local5.field2);
Benchmark.test(18,Test40Lib.local6.field3);
Benchmark.test(19,Test40Lib.local6.field4);
Benchmark.test(20,Test40Lib.local6.field1);
Benchmark.test(21,Test40Lib.local6.field2);
Benchmark.test(22,Test40Lib.local7.field5);
Benchmark.test(23,Test40Lib.local7.field6);
Benchmark.test(24,Test40Lib.local7.field3);
Benchmark.test(25,Test40Lib.local7.field4);
Benchmark.test(26,Test40Lib.local7.field1);
Benchmark.test(27,Test40Lib.local7.field2);
Benchmark.print();
}
}
|
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.googlesource.gerrit.plugins.github.git;
import static com.google.gerrit.entities.RefNames.REFS_HEADS;
import com.google.common.collect.Lists;
import com.google.gerrit.entities.Account;
import com.google.gerrit.entities.Change.Id;
import com.google.gerrit.entities.Project;
import com.google.gerrit.entities.Project.NameKey;
import com.google.gerrit.extensions.restapi.BadRequestException;
import com.google.gerrit.extensions.restapi.ResourceConflictException;
import com.google.gerrit.extensions.restapi.UnprocessableEntityException;
import com.google.gerrit.server.account.AccountImporter;
import com.google.gerrit.server.account.externalids.ExternalId;
import com.google.gerrit.server.account.externalids.ExternalIds;
import com.google.gerrit.server.config.AuthConfig;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.project.ProjectCache;
import com.google.gerrit.server.project.ProjectState;
import com.google.inject.Inject;
import com.google.inject.assistedinject.Assisted;
import com.googlesource.gerrit.plugins.github.git.GitJobStatus.Code;
import com.googlesource.gerrit.plugins.github.oauth.GitHubLogin;
import com.googlesource.gerrit.plugins.github.oauth.ScopedProvider;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
import org.eclipse.jgit.api.FetchCommand;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.InvalidRemoteException;
import org.eclipse.jgit.api.errors.TransportException;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevSort;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.transport.RefSpec;
import org.kohsuke.github.GHPullRequest;
import org.kohsuke.github.GHPullRequestCommitDetail;
import org.kohsuke.github.GHRepository;
import org.kohsuke.github.GHUser;
import org.kohsuke.github.GitUser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PullRequestImportJob implements GitJob, ProgressMonitor {
public interface Factory {
PullRequestImportJob create(
@Assisted("index") int jobIndex,
@Assisted("organisation") String organisation,
@Assisted("name") String repository,
@Assisted int pullRequestId,
@Assisted PullRequestImportType importType);
}
private static final Logger LOG = LoggerFactory.getLogger(PullRequestImportJob.class);
private static final String TOPIC_FORMAT = "GitHub #%d";
private final GitHubRepository ghRepository;
private final GitHubLogin ghLogin;
private final String organisation;
private final String repoName;
private final int prId;
private final GitRepositoryManager repoMgr;
private final AuthConfig authConfig;
private final int jobIndex;
private final ExternalIds externalIds;
private PullRequestCreateChange createChange;
private Optional<Project> project;
private GitJobStatus status;
private boolean cancelRequested;
private AccountImporter accountImporter;
@Inject
public PullRequestImportJob(
GitRepositoryManager repoMgr,
PullRequestCreateChange createChange,
ProjectCache projectCache,
AccountImporter accountImporter,
GitHubRepository.Factory gitHubRepoFactory,
ScopedProvider<GitHubLogin> ghLoginProvider,
ExternalIds externalIds,
AuthConfig authConfig,
@Assisted("index") int jobIndex,
@Assisted("organisation") String organisation,
@Assisted("name") String repoName,
@Assisted int pullRequestId) {
this.authConfig = authConfig;
this.jobIndex = jobIndex;
this.repoMgr = repoMgr;
this.ghLogin = ghLoginProvider.get();
this.organisation = organisation;
this.repoName = repoName;
this.prId = pullRequestId;
this.createChange = createChange;
this.project = fetchGerritProject(projectCache, organisation, repoName);
this.ghRepository = gitHubRepoFactory.create(organisation, repoName);
this.status = new GitJobStatus(jobIndex);
this.accountImporter = accountImporter;
this.externalIds = externalIds;
}
private Optional<Project> fetchGerritProject(
ProjectCache projectCache, String fetchOrganisation, String fetchRepoName) {
NameKey projectNameKey = Project.NameKey.parse(fetchOrganisation + "/" + fetchRepoName);
return projectCache.get(projectNameKey).map(ProjectState::getProject);
}
@Override
public void run() {
try {
status.update(GitJobStatus.Code.SYNC);
exitWhenCancelled();
GHPullRequest pr = fetchGitHubPullRequestInfo();
exitWhenCancelled();
try (Repository gitRepo =
repoMgr.openRepository(Project.nameKey(organisation + "/" + repoName))) {
exitWhenCancelled();
fetchGitHubPullRequest(gitRepo, pr);
exitWhenCancelled();
List<Id> changeIds = addPullRequestToChange(pr, gitRepo);
status.update(
GitJobStatus.Code.COMPLETE, "Imported", "PullRequest imported as Changes " + changeIds);
}
} catch (JobCancelledException e) {
status.update(GitJobStatus.Code.CANCELLED);
} catch (Throwable e) {
LOG.error(
"Pull request "
+ prId
+ " into repository "
+ organisation
+ "/"
+ repoName
+ " was failed",
e);
status.update(GitJobStatus.Code.FAILED, "Failed", e.getLocalizedMessage());
}
}
private List<Id> addPullRequestToChange(GHPullRequest pr, Repository gitRepo) throws Exception {
String destinationBranch = REFS_HEADS + pr.getBase().getRef();
List<Id> prChanges = Lists.newArrayList();
ObjectId baseObjectId = ObjectId.fromString(pr.getBase().getSha());
ObjectId prHeadObjectId = ObjectId.fromString(pr.getHead().getSha());
try (RevWalk walk = new RevWalk(gitRepo)) {
walk.markUninteresting(walk.lookupCommit(baseObjectId));
walk.markStart(walk.lookupCommit(prHeadObjectId));
walk.sort(RevSort.REVERSE);
int patchNr = 1;
for (GHPullRequestCommitDetail ghCommitDetail : pr.listCommits()) {
status.update(
Code.SYNC,
"Patch #" + patchNr,
"Patch#" + patchNr + ": Inserting PullRequest into Gerrit");
RevCommit revCommit = walk.parseCommit(ObjectId.fromString(ghCommitDetail.getSha()));
GHUser prUser = pr.getUser();
GitUser commitAuthor = ghCommitDetail.getCommit().getAuthor();
GitHubUser gitHubUser = GitHubUser.from(prUser, commitAuthor);
Account.Id pullRequestOwner = getOrRegisterAccount(gitHubUser);
if (project.isPresent()) {
Id changeId =
createChange.addCommitToChange(
project.get(),
gitRepo,
destinationBranch,
pullRequestOwner,
revCommit,
getChangeMessage(pr),
String.format(TOPIC_FORMAT, new Integer(pr.getNumber())));
if (changeId != null) {
prChanges.add(changeId);
}
}
}
return prChanges;
}
}
private com.google.gerrit.entities.Account.Id getOrRegisterAccount(GitHubUser author)
throws BadRequestException, ResourceConflictException, UnprocessableEntityException,
IOException, ConfigInvalidException {
return getOrRegisterAccount(author.getLogin(), author.getName(), author.getEmail());
}
private com.google.gerrit.entities.Account.Id getOrRegisterAccount(
String login, String name, String email)
throws BadRequestException, ResourceConflictException, UnprocessableEntityException,
IOException, ConfigInvalidException {
Optional<ExternalId> gerritId = externalIdByScheme(ExternalId.SCHEME_GERRIT, login);
if (gerritId.isPresent()) {
return gerritId.get().accountId();
}
return accountImporter.importAccount(login, name, email);
}
private Optional<ExternalId> externalIdByScheme(String scheme, String id) {
try {
return externalIds.get(
ExternalId.Key.create(scheme, id, authConfig.isUserNameCaseInsensitive()));
} catch (IOException e) {
LOG.error("Unable to get external id for " + scheme + ":" + id, e);
return Optional.empty();
}
}
private String getChangeMessage(GHPullRequest pr) {
return "GitHub Pull Request: "
+ pr.getHtmlUrl()
+ "\n\n"
+ pr.getTitle()
+ "\n\n"
+ pr.getBody();
}
private void exitWhenCancelled() throws JobCancelledException {
if (cancelRequested) {
throw new JobCancelledException();
}
}
private void fetchGitHubPullRequest(Repository gitRepo, GHPullRequest pr)
throws GitAPIException, InvalidRemoteException, TransportException {
status.update(Code.SYNC, "Fetching", "Fetching PullRequests from GitHub");
try (Git git = Git.wrap(gitRepo)) {
FetchCommand fetch = git.fetch();
fetch.setRemote(ghRepository.getCloneUrl());
fetch.setRefSpecs(
new RefSpec(
"+refs/pull/" + pr.getNumber() + "/head:refs/remotes/origin/pr/" + pr.getNumber()));
fetch.setProgressMonitor(this);
fetch.setCredentialsProvider(ghRepository.getCredentialsProvider());
fetch.call();
}
}
private GHPullRequest fetchGitHubPullRequestInfo() throws IOException {
status.update(Code.SYNC, "Fetch GitHub", "Getting PullRequest info");
GHPullRequest pr = getGHRepository().getPullRequest(prId);
return pr;
}
@Override
public GitJobStatus getStatus() {
return status;
}
@Override
public int getIndex() {
return jobIndex;
}
@Override
public String getOrganisation() {
return organisation;
}
public GHRepository getGHRepository() throws IOException {
if (ghLogin.getMyself().getLogin().equals(organisation)) {
return ghLogin.getMyself().getRepository(repoName);
}
return ghLogin.getHub().getOrganization(organisation).getRepository(repoName);
}
@Override
public void cancel() {
cancelRequested = true;
}
@Override
public String getRepository() {
return repoName;
}
@Override
public void beginTask(String taskName, int numSteps) {
status.update(Code.SYNC, taskName, taskName + " ...");
}
@Override
public void endTask() {}
@Override
public boolean isCancelled() {
return cancelRequested;
}
@Override
public void start(int tot) {}
@Override
public void update(int progress) {}
}
|
package org.ovirt.engine.ui.uicommonweb.models.gluster;
import org.ovirt.engine.core.common.businessentities.gluster.GlusterGeoRepSessionDetails;
import org.ovirt.engine.ui.uicommonweb.models.EntityModel;
import org.ovirt.engine.ui.uicommonweb.models.ListModel;
import org.ovirt.engine.ui.uicommonweb.models.Model;
import org.ovirt.engine.ui.uicompat.PropertyChangedEventArgs;
public class VolumeGeoRepSessionDetailsModel extends Model{
private ListModel<EntityModel<GlusterGeoRepSessionDetails>> geoRepSessionSummary;
public ListModel<EntityModel<GlusterGeoRepSessionDetails>> getGeoRepSessionSummary() {
return geoRepSessionSummary;
}
public void setGeoRepSessionSummary(ListModel<EntityModel<GlusterGeoRepSessionDetails>> geoRepSessionSummary) {
this.geoRepSessionSummary = geoRepSessionSummary;
}
public VolumeGeoRepSessionDetailsModel() {
setGeoRepSessionSummary(new ListModel<EntityModel<GlusterGeoRepSessionDetails>>());
getGeoRepSessionSummary().getSelectedItemChangedEvent().addListener((ev, sender, args) -> {
if(geoRepSessionSummary != null && geoRepSessionSummary.getSelectedItem() != null) {
onPropertyChanged(new PropertyChangedEventArgs("selectedSessionSummaryRow"));//$NON-NLS-1$
}
});
}
}
|
package seedu.address.logic.commands;
import static java.util.Objects.requireNonNull;
import static seedu.address.logic.parser.CliSyntax.PREFIX_SORT_ORDER;
import static seedu.address.logic.parser.CliSyntax.PREFIX_SORT_TYPE;
import java.util.Comparator;
import java.util.logging.Logger;
import seedu.address.commons.core.LogsCenter;
import seedu.address.logic.commands.exceptions.CommandException;
import seedu.address.model.Model;
import seedu.address.model.information.Person;
/**
* Sorts the persons using the specified comparator.
*/
public class SortPersonCommand extends Command {
public static final String COMMAND_WORD = "sort can";
public static final String MESSAGE_USAGE = COMMAND_WORD + ": Sorts the candidates in the address book"
+ "according to the specified order given by the user input. \n"
+ "Parameters: "
+ PREFIX_SORT_TYPE + "FIELD_TO_BE_SORTED "
+ PREFIX_SORT_ORDER + "ORDER_TO_SORT\n"
+ "Example: " + COMMAND_WORD + " "
+ PREFIX_SORT_TYPE + "exp "
+ PREFIX_SORT_ORDER + "asc ";
public static final String MESSAGE_SUCCESS = "Successfully sorted list of candidates ";
public static final String MESSAGE_SORT_TYPE_INVALID = "Invalid Sort Type.\n"
+ "Sort type must be one of n, exp, sal, bl or doa";
private static final Logger logger = LogsCenter.getLogger(SortPersonCommand.class);
private final Comparator<Person> comparator;
private final String sortMessage;
/**
* Constructor for SortPersonCommand. Checks the order of sort required
* and producers the appropriate comparator.
*/
public SortPersonCommand(Comparator<Person> comparator, Boolean isAscending) {
assert (comparator != null);
if (!isAscending) {
this.sortMessage = comparator.toString() + "in descending order.";
this.comparator = comparator.reversed();
} else {
this.sortMessage = comparator.toString() + "in ascending order.";
this.comparator = comparator;
}
}
@Override
public CommandResult execute(Model model) throws CommandException {
requireNonNull(model);
model.updateSortedPersonList(comparator);
logger.info("Sorting People");
return new CommandResult(MESSAGE_SUCCESS + this.sortMessage, Person.TAB_NAME);
}
@Override
public boolean equals(Object other) {
// short circuit if same object
if (other == this) {
return true;
}
// instanceof handles nulls
if (!(other instanceof SortPersonCommand)) {
return false;
}
// state check
SortPersonCommand s = (SortPersonCommand) other;
return comparator.equals(s.comparator);
}
}
|
package jds;
import java.util.Enumeration;
import java.io.Serializable;
/**
* Base class for jds Container hierarcy;
* for use with book
* <a href="http://www.cs.orst.edu/~budd/books/jds/">Classic Data Structures
* in Java</a>
* by <a href="http://www.cs.orst.edu/~budd">Timothy A Budd</a>,
* published by <a href="http://www.awl.com">Addison-Wesley</a>, 2001.
*
* @author Timothy A. Budd
* @version 1.1 September 1999
* @see java.util.Enumeration
* @see java.io.Serializable
*/
public interface Collection extends Serializable {
/**
* Determines whether the collection is empty
*
* @return true if the collection is empty
*/
public boolean isEmpty ();
/**
* Determines number of elements in collection
*
* @return number of elements in collection as integer
*/
public int size ();
/**
* Yields enumerator for collection
*
* @return an <code>Enumeration</code> that will yield the elements of the collection
* @see java.util.Enumeration
*/
public Enumeration elements ();
}
|
package ch.johannes.cg;
import ch.johannes.descriptor.ClassDescriptor;
import ch.johannes.descriptor.FieldDescriptor;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.FieldSpec;
import com.squareup.javapoet.JavaFile;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.TypeSpec;
import javax.lang.model.element.Modifier;
import java.util.ArrayList;
import java.util.List;
public class BeanSourceGenerator {
public String generateCode(ClassDescriptor classDescriptor) {
List<FieldSpec> fields = new ArrayList<>();
List<MethodSpec> setterAndGetterMethods = new ArrayList<>();
for (FieldDescriptor fieldDescriptor : classDescriptor.getFields()) {
String nameOfField = fieldDescriptor.getFieldName();
ClassName classOfField = ClassName.get(fieldDescriptor.getFieldType().getClassPackage().getPackageName(), fieldDescriptor.getFieldType().getClassName().getClassName());
FieldSpec fieldSpec = FieldSpec.builder(classOfField, nameOfField)
.addModifiers(Modifier.PRIVATE)
.build();
fields.add(fieldSpec);
MethodSpec getter = MethodSpec.methodBuilder(BeanUtil.createGetterName(nameOfField))
.addModifiers(Modifier.PUBLIC)
.returns(classOfField)
.addStatement("return this.$L", nameOfField)
.build();
setterAndGetterMethods.add(getter);
MethodSpec setter = MethodSpec.methodBuilder(BeanUtil.createSetterName(nameOfField))
.addModifiers(Modifier.PUBLIC)
.addParameter(classOfField, nameOfField)
.returns(void.class)
.addStatement("this.$L = $L", nameOfField, nameOfField)
.build();
setterAndGetterMethods.add(setter);
}
TypeSpec targetType = TypeSpec.classBuilder(classDescriptor.getTypeDescriptor().getClassName().getClassName())
.addModifiers(Modifier.PUBLIC)
.addFields(fields)
.addMethods(setterAndGetterMethods)
.build();
JavaFile javaFile = JavaFile.builder(classDescriptor.getTypeDescriptor().getClassPackage().getPackageName(), targetType)
.build();
return javaFile.toString();
}
}
|
/**
* This Source Code Form is subject to the terms of the Mozilla Public License,
* v. 2.0. If a copy of the MPL was not distributed with this file, You can
* obtain one at http://mozilla.org/MPL/2.0/. OpenMRS is also distributed under
* the terms of the Healthcare Disclaimer located at http://openmrs.org/license.
*
* Copyright (C) OpenMRS Inc. OpenMRS is a registered trademark and the OpenMRS
* graphic logo is a trademark of OpenMRS Inc.
*/
package org.openmrs.customdatatype.datatype;
import org.apache.commons.lang3.StringUtils;
import org.openmrs.Provider;
import org.openmrs.api.context.Context;
import org.springframework.stereotype.Component;
/**
* This is a class for custom datatypes for providers
*
* @since 2.0.0
*/
@Component
public class ProviderDatatype extends BaseMetadataDatatype<Provider> {
/**
* @see org.openmrs.customdatatype.SerializingCustomDatatype#deserialize(String)
* @override
*/
@Override
public Provider deserialize(String uuid) {
if (StringUtils.isBlank(uuid)) {
return null;
}
return Context.getProviderService().getProviderByUuid(uuid);
}
}
|
package com.ge.ceed.domeapi;
import mit.cadlab.dome3.api.DomeModel;
@Deprecated
public class ModelDef implements Comparable<ModelDef> {
private String name;
private String guid;
private String desc;
private long dateModified;
private String folder;
private Server server;
public ModelDef(DomeModel dm, Server server, String folder) {
this.name = dm.getModelName();
this.guid = dm.getModelId();
this.desc = dm.getDescription();
this.dateModified = dm.getLastModified().getTime();
this.folder = folder;
this.server = server;
}
public String getName() {
return name;
}
public String getGuid() {
return guid;
}
public String getDesc() {
return desc;
}
public long getDateModified() {
return dateModified;
}
public String getFolder() {
return folder;
}
public Server getServer() {
return server;
}
@Override
public int compareTo(ModelDef other) {
return guid.compareTo(other.guid);
}
}
|
package br.project.dao;
import java.util.List;
import br.project.transferobject.FotoTO;
public interface IFotoDAO {
public void insert(FotoTO to) throws DAOException;
public void update(FotoTO to) throws DAOException;
public void delete(FotoTO to) throws DAOException;
public FotoTO findByPrimaryKey(FotoTO to) throws DAOException;
public List getCollection() throws DAOException;
}
|
//
// Este arquivo foi gerado pela Arquitetura JavaTM para Implementação de Referência (JAXB) de Bind XML, v2.2.8-b130911.1802
// Consulte <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Todas as modificações neste arquivo serão perdidas após a recompilação do esquema de origem.
// Gerado em: 2021.03.05 às 03:20:52 PM BRT
//
package br.jus.tst.esocial.esquemas.eventos.altcadastral;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Classe Java de TS_tpAcConv.
*
* <p>O seguinte fragmento do esquema especifica o conteúdo esperado contido dentro desta classe.
* <p>
* <pre>
* <simpleType name="TS_tpAcConv">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="A"/>
* <enumeration value="B"/>
* <enumeration value="C"/>
* <enumeration value="D"/>
* <enumeration value="E"/>
* <enumeration value="F"/>
* <enumeration value="G"/>
* <enumeration value="H"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "TS_tpAcConv")
@XmlEnum
public enum TSTpAcConv {
/**
* Acordo Coletivo de Trabalho
*
*/
A,
/**
* Legislação federal, estadual, municipal ou distrital
*
*/
B,
/**
* Convenção Coletiva de Trabalho
*
*/
C,
/**
* Sentença normativa - Dissídio
*
*/
D,
/**
* Conversão de licença saúde em acidente de trabalho
*
*/
E,
/**
* Outras verbas de natureza salarial ou não salarial devidas após o desligamento
*
*/
F,
/**
* Antecipação de diferenças de acordo, convenção ou dissídio coletivo
*
*/
G,
/**
* Recolhimento mensal de FGTS anterior ao início de obrigatoriedade dos eventos periódicos
*
*/
H;
public String value() {
return name();
}
public static TSTpAcConv fromValue(String v) {
return valueOf(v);
}
}
|
package io.spoud.agoora.agents.test.mock;
import io.spoud.agoora.agents.api.client.LookerClient;
import io.spoud.sdm.looker.domain.v1alpha1.DataProfile;
import lombok.experimental.UtilityClass;
import java.util.UUID;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.when;
@UtilityClass
public class LookerClientMockProvider {
public static void defaultMock(LookerClient mock) {
reset(mock);
when(mock.addDataProfile(any())).thenReturn(DataProfile.newBuilder()
.setId(UUID.randomUUID().toString())
.build());
}
}
|
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.idea.gradle.dsl.api;
import com.android.tools.idea.gradle.dsl.api.util.GradleDslModel;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import java.util.List;
import java.util.Map;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public interface GradleFileModel extends GradleDslModel {
@NotNull
Project getProject();
void reparse();
boolean isModified();
void resetState();
@NotNull
VirtualFile getVirtualFile();
void applyChanges();
@NotNull
Map<String, List<BuildModelNotification>> getNotifications();
/**
* @return the psi file representing this GradleFileModel. In order to continue using this instance of the model no modifications should
* be made to the underlying psi tree of the file for the models lifetime. This method is exposed to allow the PsiFile to be passed into
* Intellij IDEA APIs. This method makes no guarantees about the validity of the returned element, callers should perform the necessary
* checks before using.
*/
@Nullable
PsiFile getPsiFile();
}
|
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.springframework.geode.boot.autoconfigure.configuration.support;
import java.util.Properties;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.cache.server.ClientSubscriptionConfig;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.data.gemfire.server.SubscriptionEvictionPolicy;
/**
* Spring Boot {@link ConfigurationProperties} used to configure an Apache Geode / Pivotal GemFire {@link CacheServer}.
*
* The configuration {@link Properties} are based on well-known, documented Spring Data for Apache Geode/Pivotal GemFire
* (SDG) {@link Properties}.
*
* @author John Blum
* @see java.util.Properties
* @see org.apache.geode.cache.server.CacheServer
* @see org.springframework.boot.context.properties.ConfigurationProperties
* @since 1.0.0
*/
@SuppressWarnings("unused")
public class CacheServerProperties {
private static final boolean DEFAULT_AUTO_STARTUP = true;
private boolean autoStartup = DEFAULT_AUTO_STARTUP;
private boolean tcpNoDelay = CacheServer.DEFAULT_TCP_NO_DELAY;
private int maxConnections = CacheServer.DEFAULT_MAX_CONNECTIONS;
private int maxMessageCount = CacheServer.DEFAULT_MAXIMUM_MESSAGE_COUNT;
private int maxThreads = CacheServer.DEFAULT_MAX_THREADS;
private int maxTimeBetweenPings = CacheServer.DEFAULT_MAXIMUM_TIME_BETWEEN_PINGS;
private int messageTimeToLive = CacheServer.DEFAULT_MESSAGE_TIME_TO_LIVE;
private int port = CacheServer.DEFAULT_PORT;
private int socketBufferSize = CacheServer.DEFAULT_SOCKET_BUFFER_SIZE;
private int subscriptionCapacity = ClientSubscriptionConfig.DEFAULT_CAPACITY;
private long loadPollInterval = CacheServer.DEFAULT_LOAD_POLL_INTERVAL;
private String bindAddress = CacheServer.DEFAULT_BIND_ADDRESS;
private String hostnameForClients = CacheServer.DEFAULT_HOSTNAME_FOR_CLIENTS;
private String subscriptionDiskStoreName;
private SubscriptionEvictionPolicy subscriptionEvictionPolicy = SubscriptionEvictionPolicy.NONE;
public boolean isAutoStartup() {
return this.autoStartup;
}
public void setAutoStartup(boolean autoStartup) {
this.autoStartup = autoStartup;
}
public String getBindAddress() {
return this.bindAddress;
}
public void setBindAddress(String bindAddress) {
this.bindAddress = bindAddress;
}
public String getHostnameForClients() {
return this.hostnameForClients;
}
public void setHostnameForClients(String hostnameForClients) {
this.hostnameForClients = hostnameForClients;
}
public long getLoadPollInterval() {
return this.loadPollInterval;
}
public void setLoadPollInterval(long loadPollInterval) {
this.loadPollInterval = loadPollInterval;
}
public int getMaxConnections() {
return this.maxConnections;
}
public void setMaxConnections(int maxConnections) {
this.maxConnections = maxConnections;
}
public int getMaxMessageCount() {
return this.maxMessageCount;
}
public void setMaxMessageCount(int maxMessageCount) {
this.maxMessageCount = maxMessageCount;
}
public int getMaxThreads() {
return this.maxThreads;
}
public void setMaxThreads(int maxThreads) {
this.maxThreads = maxThreads;
}
public int getMaxTimeBetweenPings() {
return this.maxTimeBetweenPings;
}
public void setMaxTimeBetweenPings(int maxTimeBetweenPings) {
this.maxTimeBetweenPings = maxTimeBetweenPings;
}
public int getMessageTimeToLive() {
return this.messageTimeToLive;
}
public void setMessageTimeToLive(int messageTimeToLive) {
this.messageTimeToLive = messageTimeToLive;
}
public int getPort() {
return this.port;
}
public void setPort(int port) {
this.port = port;
}
public int getSocketBufferSize() {
return this.socketBufferSize;
}
public void setSocketBufferSize(int socketBufferSize) {
this.socketBufferSize = socketBufferSize;
}
public int getSubscriptionCapacity() {
return this.subscriptionCapacity;
}
public void setSubscriptionCapacity(int subscriptionCapacity) {
this.subscriptionCapacity = subscriptionCapacity;
}
public String getSubscriptionDiskStoreName() {
return this.subscriptionDiskStoreName;
}
public void setSubscriptionDiskStoreName(String subscriptionDiskStoreName) {
this.subscriptionDiskStoreName = subscriptionDiskStoreName;
}
public SubscriptionEvictionPolicy getSubscriptionEvictionPolicy() {
return this.subscriptionEvictionPolicy;
}
public void setSubscriptionEvictionPolicy(SubscriptionEvictionPolicy subscriptionEvictionPolicy) {
this.subscriptionEvictionPolicy = subscriptionEvictionPolicy;
}
public boolean isTcpNoDelay() {
return this.tcpNoDelay;
}
public void setTcpNoDelay(boolean tcpNoDelay) {
this.tcpNoDelay = tcpNoDelay;
}
}
|
package org.alljo.jandall.service;
import io.github.jhipster.security.RandomUtil;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Collectors;
import org.alljo.jandall.config.Constants;
import org.alljo.jandall.domain.Authority;
import org.alljo.jandall.domain.User;
import org.alljo.jandall.repository.AuthorityRepository;
import org.alljo.jandall.repository.UserRepository;
import org.alljo.jandall.security.AuthoritiesConstants;
import org.alljo.jandall.security.SecurityUtils;
import org.alljo.jandall.service.dto.UserDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cache.CacheManager;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* Service class for managing users.
*/
@Service
@Transactional
public class UserService {
private final Logger log = LoggerFactory.getLogger(UserService.class);
private final UserRepository userRepository;
private final PasswordEncoder passwordEncoder;
private final AuthorityRepository authorityRepository;
private final CacheManager cacheManager;
public UserService(
UserRepository userRepository,
PasswordEncoder passwordEncoder,
AuthorityRepository authorityRepository,
CacheManager cacheManager
) {
this.userRepository = userRepository;
this.passwordEncoder = passwordEncoder;
this.authorityRepository = authorityRepository;
this.cacheManager = cacheManager;
}
public Optional<User> activateRegistration(String key) {
log.debug("Activating user for activation key {}", key);
return userRepository
.findOneByActivationKey(key)
.map(
user -> {
// activate given user for the registration key.
user.setActivated(true);
user.setActivationKey(null);
this.clearUserCaches(user);
log.debug("Activated user: {}", user);
return user;
}
);
}
public Optional<User> completePasswordReset(String newPassword, String key) {
log.debug("Reset user password for reset key {}", key);
return userRepository
.findOneByResetKey(key)
.filter(user -> user.getResetDate().isAfter(Instant.now().minusSeconds(86400)))
.map(
user -> {
user.setPassword(passwordEncoder.encode(newPassword));
user.setResetKey(null);
user.setResetDate(null);
this.clearUserCaches(user);
return user;
}
);
}
public Optional<User> requestPasswordReset(String mail) {
return userRepository
.findOneByEmailIgnoreCase(mail)
.filter(User::getActivated)
.map(
user -> {
user.setResetKey(RandomUtil.generateResetKey());
user.setResetDate(Instant.now());
this.clearUserCaches(user);
return user;
}
);
}
public User registerUser(UserDTO userDTO, String password) {
userRepository
.findOneByLogin(userDTO.getLogin().toLowerCase())
.ifPresent(
existingUser -> {
boolean removed = removeNonActivatedUser(existingUser);
if (!removed) {
throw new UsernameAlreadyUsedException();
}
}
);
userRepository
.findOneByEmailIgnoreCase(userDTO.getEmail())
.ifPresent(
existingUser -> {
boolean removed = removeNonActivatedUser(existingUser);
if (!removed) {
throw new EmailAlreadyUsedException();
}
}
);
User newUser = new User();
String encryptedPassword = passwordEncoder.encode(password);
newUser.setLogin(userDTO.getLogin().toLowerCase());
// new user gets initially a generated password
newUser.setPassword(encryptedPassword);
newUser.setFirstName(userDTO.getFirstName());
newUser.setLastName(userDTO.getLastName());
if (userDTO.getEmail() != null) {
newUser.setEmail(userDTO.getEmail().toLowerCase());
}
newUser.setImageUrl(userDTO.getImageUrl());
newUser.setLangKey(userDTO.getLangKey());
// new user is not active
newUser.setActivated(false);
// new user gets registration key
newUser.setActivationKey(RandomUtil.generateActivationKey());
Set<Authority> authorities = new HashSet<>();
authorityRepository.findById(AuthoritiesConstants.USER).ifPresent(authorities::add);
newUser.setAuthorities(authorities);
userRepository.save(newUser);
this.clearUserCaches(newUser);
log.debug("Created Information for User: {}", newUser);
return newUser;
}
private boolean removeNonActivatedUser(User existingUser) {
if (existingUser.getActivated()) {
return false;
}
userRepository.delete(existingUser);
userRepository.flush();
this.clearUserCaches(existingUser);
return true;
}
public User createUser(UserDTO userDTO) {
User user = new User();
user.setLogin(userDTO.getLogin().toLowerCase());
user.setFirstName(userDTO.getFirstName());
user.setLastName(userDTO.getLastName());
if (userDTO.getEmail() != null) {
user.setEmail(userDTO.getEmail().toLowerCase());
}
user.setImageUrl(userDTO.getImageUrl());
if (userDTO.getLangKey() == null) {
user.setLangKey(Constants.DEFAULT_LANGUAGE); // default language
} else {
user.setLangKey(userDTO.getLangKey());
}
String encryptedPassword = passwordEncoder.encode(RandomUtil.generatePassword());
user.setPassword(encryptedPassword);
user.setResetKey(RandomUtil.generateResetKey());
user.setResetDate(Instant.now());
user.setActivated(true);
if (userDTO.getAuthorities() != null) {
Set<Authority> authorities = userDTO
.getAuthorities()
.stream()
.map(authorityRepository::findById)
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toSet());
user.setAuthorities(authorities);
}
userRepository.save(user);
this.clearUserCaches(user);
log.debug("Created Information for User: {}", user);
return user;
}
/**
* Update basic information (first name, last name, email, language) for the current user.
*
* @param firstName first name of user.
* @param lastName last name of user.
* @param email email id of user.
* @param langKey language key.
* @param imageUrl image URL of user.
*/
public void updateUser(String firstName, String lastName, String email, String langKey, String imageUrl) {
SecurityUtils
.getCurrentUserLogin()
.flatMap(userRepository::findOneByLogin)
.ifPresent(
user -> {
user.setFirstName(firstName);
user.setLastName(lastName);
if (email != null) {
user.setEmail(email.toLowerCase());
}
user.setLangKey(langKey);
user.setImageUrl(imageUrl);
this.clearUserCaches(user);
log.debug("Changed Information for User: {}", user);
}
);
}
/**
* Update all information for a specific user, and return the modified user.
*
* @param userDTO user to update.
* @return updated user.
*/
public Optional<UserDTO> updateUser(UserDTO userDTO) {
return Optional
.of(userRepository.findById(userDTO.getId()))
.filter(Optional::isPresent)
.map(Optional::get)
.map(
user -> {
this.clearUserCaches(user);
user.setLogin(userDTO.getLogin().toLowerCase());
user.setFirstName(userDTO.getFirstName());
user.setLastName(userDTO.getLastName());
if (userDTO.getEmail() != null) {
user.setEmail(userDTO.getEmail().toLowerCase());
}
user.setImageUrl(userDTO.getImageUrl());
user.setActivated(userDTO.isActivated());
user.setLangKey(userDTO.getLangKey());
Set<Authority> managedAuthorities = user.getAuthorities();
managedAuthorities.clear();
userDTO
.getAuthorities()
.stream()
.map(authorityRepository::findById)
.filter(Optional::isPresent)
.map(Optional::get)
.forEach(managedAuthorities::add);
this.clearUserCaches(user);
log.debug("Changed Information for User: {}", user);
return user;
}
)
.map(UserDTO::new);
}
public void deleteUser(String login) {
userRepository
.findOneByLogin(login)
.ifPresent(
user -> {
userRepository.delete(user);
this.clearUserCaches(user);
log.debug("Deleted User: {}", user);
}
);
}
public void changePassword(String currentClearTextPassword, String newPassword) {
SecurityUtils
.getCurrentUserLogin()
.flatMap(userRepository::findOneByLogin)
.ifPresent(
user -> {
String currentEncryptedPassword = user.getPassword();
if (!passwordEncoder.matches(currentClearTextPassword, currentEncryptedPassword)) {
throw new InvalidPasswordException();
}
String encryptedPassword = passwordEncoder.encode(newPassword);
user.setPassword(encryptedPassword);
this.clearUserCaches(user);
log.debug("Changed password for User: {}", user);
}
);
}
@Transactional(readOnly = true)
public Page<UserDTO> getAllManagedUsers(Pageable pageable) {
return userRepository.findAllByLoginNot(pageable, Constants.ANONYMOUS_USER).map(UserDTO::new);
}
@Transactional(readOnly = true)
public Optional<User> getUserWithAuthoritiesByLogin(String login) {
return userRepository.findOneWithAuthoritiesByLogin(login);
}
@Transactional(readOnly = true)
public Optional<User> getUserWithAuthorities(Long id) {
return userRepository.findOneWithAuthoritiesById(id);
}
@Transactional(readOnly = true)
public Optional<User> getUserWithAuthorities() {
return SecurityUtils.getCurrentUserLogin().flatMap(userRepository::findOneWithAuthoritiesByLogin);
}
/**
* Not activated users should be automatically deleted after 3 days.
* <p>
* This is scheduled to get fired everyday, at 01:00 (am).
*/
@Scheduled(cron = "0 0 1 * * ?")
public void removeNotActivatedUsers() {
userRepository
.findAllByActivatedIsFalseAndActivationKeyIsNotNullAndCreatedDateBefore(Instant.now().minus(3, ChronoUnit.DAYS))
.forEach(
user -> {
log.debug("Deleting not activated user {}", user.getLogin());
userRepository.delete(user);
this.clearUserCaches(user);
}
);
}
/**
* Gets a list of all the authorities.
* @return a list of all the authorities.
*/
public List<String> getAuthorities() {
return authorityRepository.findAll().stream().map(Authority::getName).collect(Collectors.toList());
}
private void clearUserCaches(User user) {
Objects.requireNonNull(cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE)).evict(user.getLogin());
if (user.getEmail() != null) {
Objects.requireNonNull(cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE)).evict(user.getEmail());
}
}
}
|
/*
* Copyright ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.tests.acceptance.clique;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.account.Account;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import java.io.IOException;
import org.junit.Test;
public class CliqueMiningAcceptanceTest extends AcceptanceTestBase {
@Test
public void shouldMineTransactionsOnSingleNode() throws IOException {
final BesuNode minerNode = besu.createCliqueNode("miner1");
cluster.start(minerNode);
final Account sender = accounts.createAccount("account1");
final Account receiver = accounts.createAccount("account2");
minerNode.execute(accountTransactions.createTransfer(sender, 50));
cluster.verify(sender.balanceEquals(50));
minerNode.execute(accountTransactions.createIncrementalTransfers(sender, receiver, 1));
cluster.verify(receiver.balanceEquals(1));
minerNode.execute(accountTransactions.createIncrementalTransfers(sender, receiver, 2));
cluster.verify(receiver.balanceEquals(3));
}
@Test
public void shouldMineTransactionsOnMultipleNodes() throws IOException {
final BesuNode minerNode1 = besu.createCliqueNode("miner1");
final BesuNode minerNode2 = besu.createCliqueNode("miner2");
final BesuNode minerNode3 = besu.createCliqueNode("miner3");
cluster.start(minerNode1, minerNode2, minerNode3);
final Account sender = accounts.createAccount("account1");
final Account receiver = accounts.createAccount("account2");
minerNode1.execute(accountTransactions.createTransfer(sender, 50));
cluster.verify(sender.balanceEquals(50));
minerNode2.execute(accountTransactions.createIncrementalTransfers(sender, receiver, 1));
cluster.verify(receiver.balanceEquals(1));
minerNode3.execute(accountTransactions.createIncrementalTransfers(sender, receiver, 2));
cluster.verify(receiver.balanceEquals(3));
}
@Test
public void shouldStallMiningWhenInsufficientValidators() throws IOException {
final BesuNode minerNode1 = besu.createCliqueNode("miner1");
final BesuNode minerNode2 = besu.createCliqueNode("miner2");
final BesuNode minerNode3 = besu.createCliqueNode("miner3");
cluster.start(minerNode1, minerNode2, minerNode3);
cluster.stopNode(minerNode2);
cluster.stopNode(minerNode3);
minerNode1.verify(net.awaitPeerCount(0));
minerNode1.verify(clique.blockIsCreatedByProposer(minerNode1));
minerNode1.verify(clique.noNewBlockCreated(minerNode1));
}
@Test
public void shouldStillMineWhenANodeFailsAndHasSufficientValidators() throws IOException {
final BesuNode minerNode1 = besu.createCliqueNode("miner1");
final BesuNode minerNode2 = besu.createCliqueNode("miner2");
final BesuNode minerNode3 = besu.createCliqueNode("miner3");
cluster.start(minerNode1, minerNode2, minerNode3);
cluster.verifyOnActiveNodes(blockchain.reachesHeight(minerNode1, 1, 85));
cluster.stopNode(minerNode3);
cluster.verifyOnActiveNodes(net.awaitPeerCount(1));
cluster.verifyOnActiveNodes(blockchain.reachesHeight(minerNode1, 2));
cluster.verifyOnActiveNodes(clique.blockIsCreatedByProposer(minerNode1));
cluster.verifyOnActiveNodes(clique.blockIsCreatedByProposer(minerNode2));
}
}
|
package com.leon.biuvideo.adapters.otherAdapters;
import android.content.Context;
import android.graphics.Color;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import com.leon.biuvideo.R;
import com.leon.biuvideo.adapters.baseAdapters.BaseAdapter;
import com.leon.biuvideo.adapters.baseAdapters.BaseViewHolder;
import com.leon.biuvideo.utils.PreferenceUtils;
import java.util.List;
/**
* @Author Leon
* @Time 2021/4/19
* @Desc 视频画质选择适配器
*/
public class VideoQualityAdapter extends BaseAdapter<String[]> {
private final List<String[]> qualityList;
private final int currentQuality;
private final boolean loginStatus = PreferenceUtils.getLoginStatus();
private final boolean vipStatus = PreferenceUtils.getVipStatus();
private OnVideoQualityListener onVideoQualityListener;
public interface OnVideoQualityListener {
/**
* 清晰度选择
*
* @param qualityId 清晰度ID
*/
void onQuality (int qualityId);
}
public void setOnVideoQualityListener(OnVideoQualityListener onVideoQualityListener) {
this.onVideoQualityListener = onVideoQualityListener;
}
public VideoQualityAdapter(int currentQuality, List<String[]> beans, Context context) {
super(beans, context);
this.currentQuality = currentQuality;
this.qualityList = beans;
}
@Override
public int getLayout(int viewType) {
return R.layout.video_quality_item;
}
@Override
public void onBindViewHolder(@NonNull BaseViewHolder holder, int position) {
String[] strings = qualityList.get(position);
final int qualityCode = Integer.parseInt(strings[0]);
TextView videoQualityItemContent = holder.findById(R.id.video_quality_item_content);
videoQualityItemContent.setText(strings[1]);
if (currentQuality == qualityCode) {
videoQualityItemContent.setTextColor(Color.parseColor("#FB7299"));
} else {
videoQualityItemContent.setTextColor(Color.WHITE);
}
if (strings[2] != null) {
holder.setText(R.id.video_quality_item_mark, strings[2]);
} else {
holder.setVisibility(R.id.video_quality_item_mark, View.GONE);
}
holder.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (qualityCode == currentQuality) {
return;
}
if (loginStatus && vipStatus) {
Toast.makeText(context, context.getString(R.string.qualityLoginWarn), Toast.LENGTH_SHORT).show();
return;
}
if (qualityCode > 32) {
if (!loginStatus) {
Toast.makeText(context, context.getString(R.string.qualityLoginWarn), Toast.LENGTH_SHORT).show();
return;
}
if (qualityCode == 74 || qualityCode >= 112) {
Toast.makeText(context, context.getString(R.string.qualityVipWarn), Toast.LENGTH_SHORT).show();
} else {
if (onVideoQualityListener != null) {
onVideoQualityListener.onQuality(qualityCode);
}
}
} else {
if (onVideoQualityListener != null) {
onVideoQualityListener.onQuality(qualityCode);
}
}
}
});
}
}
|
package com.tracker.supply.pojo;
public class ResumeUpload {
private String fileName;
private String fileDownloadUri;
private String fileType;
private long size;
public ResumeUpload(String fileName, String fileDownloadUri, String fileType, long size) {
this.fileName = fileName;
this.fileDownloadUri = fileDownloadUri;
this.fileType = fileType;
this.size = size;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public String getFileDownloadUri() {
return fileDownloadUri;
}
public void setFileDownloadUri(String fileDownloadUri) {
this.fileDownloadUri = fileDownloadUri;
}
public String getFileType() {
return fileType;
}
public void setFileType(String fileType) {
this.fileType = fileType;
}
public long getSize() {
return size;
}
public void setSize(long size) {
this.size = size;
}
}
|
package org.smartregister.goldsmith.fragment;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import org.smartregister.AllConstants;
import org.smartregister.chw.core.utils.Utils;
import org.smartregister.commonregistry.CommonPersonObjectClient;
import org.smartregister.family.fragment.BaseFamilyProfileMemberFragment;
import org.smartregister.family.model.BaseFamilyProfileMemberModel;
import org.smartregister.family.presenter.BaseFamilyProfileMemberPresenter;
import org.smartregister.family.util.Constants;
import org.smartregister.goldsmith.R;
import org.smartregister.goldsmith.activity.FamilyOtherMemberProfileActivity;
import java.util.HashMap;
public class FamilyProfileMemberFragment extends BaseFamilyProfileMemberFragment {
private String familyBaseEntityId;
private String familyHead;
private String primaryCareGiver;
private String villageTown;
private String familyName;
public static BaseFamilyProfileMemberFragment newInstance(Bundle bundle) {
Bundle args = bundle;
BaseFamilyProfileMemberFragment fragment = new FamilyProfileMemberFragment();
if (args == null) {
args = new Bundle();
}
fragment.setArguments(args);
return fragment;
}
@Override
protected void initializePresenter() {
CommonPersonObjectClient client = (CommonPersonObjectClient) getArguments().getSerializable(AllConstants.INTENT_KEY.COMMON_PERSON_CLIENT);
familyBaseEntityId = client.getCaseId();
familyName = Utils.getValue(client.getColumnmaps(), AllConstants.Client.FIRST_NAME, false);
familyHead = Utils.getValue(client.getColumnmaps(), Constants.INTENT_KEY.FAMILY_HEAD, false);
primaryCareGiver = Utils.getValue(client.getColumnmaps(), Constants.INTENT_KEY.PRIMARY_CAREGIVER, false);
villageTown = Utils.getValue(client.getColumnmaps(), Constants.INTENT_KEY.VILLAGE_TOWN, false);
presenter = new BaseFamilyProfileMemberPresenter(this, new BaseFamilyProfileMemberModel(), null, familyBaseEntityId, familyHead, primaryCareGiver);
}
@Override
protected void onViewClicked(View view) {
super.onViewClicked(view);
int id = view.getId();
if (id == R.id.patient_column && view.getTag() != null && view.getTag(R.id.VIEW_ID) == CLICK_VIEW_NORMAL) {
goToOtherMemberProfileActivity((CommonPersonObjectClient) view.getTag());
}
}
public void goToOtherMemberProfileActivity(CommonPersonObjectClient patient) {
Intent intent = new Intent(getActivity(), FamilyOtherMemberProfileActivity.class);
intent.putExtra(Constants.INTENT_KEY.FAMILY_BASE_ENTITY_ID, familyBaseEntityId);
intent.putExtra(Constants.INTENT_KEY.VILLAGE_TOWN, villageTown);
intent.putExtra(Constants.INTENT_KEY.FAMILY_HEAD, familyHead);
intent.putExtra(Constants.INTENT_KEY.PRIMARY_CAREGIVER, primaryCareGiver);
intent.putExtra(Constants.INTENT_KEY.FAMILY_NAME, familyName);
intent.putExtra(AllConstants.INTENT_KEY.COMMON_PERSON_CLIENT, patient);
startActivity(intent);
}
@Override
public void setAdvancedSearchFormData(HashMap<String, String> advancedSearchFormData) {
//do nothing
}
}
|
package v3;
import common.Resource;
import common.Task;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.concurrent.TimeUnit;
class Main {
public static void main(String[] args) throws InterruptedException {
final HashMap<Resource, Long> resources = new HashMap<Resource, Long>() {{
put(Resource.CATS, 42L);
put(Resource.DOGS, 23L);
put(Resource.OWLS, 10L);
put(Resource.MICE, 99L);
}};
final LinkedList<Task> tasks = new LinkedList<>(new ArrayList<Task>() {
{
add(new Task() {{
name = "20 cats are sleeping for 5s";
highPriority = false;
requirements = new HashMap<Resource, Long>() {{
put(Resource.CATS, 20L);
}};
function = () -> {
System.out.println(name + "\n");
try {
TimeUnit.SECONDS.sleep(5);
} catch (InterruptedException ignored) {
}
};
}});
add(new Task() {{
name = "22 cats are sleeping for 3s";
highPriority = true;
requirements = new HashMap<Resource, Long>() {{
put(Resource.CATS, 22L);
}};
function = () -> {
System.out.println(name + "\n");
try {
TimeUnit.SECONDS.sleep(3);
} catch (InterruptedException ignored) {
}
};
}});
add(new Task() {{
name = "42 cats are sleeping for 5s";
highPriority = true;
requirements = new HashMap<Resource, Long>() {{
put(Resource.CATS, 42L);
}};
function = () -> {
System.out.println(name + "\n");
try {
TimeUnit.SECONDS.sleep(5);
} catch (InterruptedException ignored) {
}
};
}});
add(new Task() {{
name = "10 dogs are sleeping for 10s";
highPriority = false;
requirements = new HashMap<Resource, Long>() {{
put(Resource.DOGS, 10L);
}};
function = () -> {
System.out.println(name + "\n");
try {
TimeUnit.SECONDS.sleep(10);
} catch (InterruptedException ignored) {
}
};
}});
add(new Task() {{
name = "13 dogs are sleeping for 5s";
highPriority = false;
requirements = new HashMap<Resource, Long>() {{
put(Resource.DOGS, 13L);
}};
function = () -> {
System.out.println(name + "\n");
try {
TimeUnit.SECONDS.sleep(5);
} catch (InterruptedException ignored) {
}
};
}});
add(new Task() {{
name = "23 dogs are sleeping for 15s";
highPriority = true;
requirements = new HashMap<Resource, Long>() {{
put(Resource.DOGS, 23L);
}};
function = () -> {
System.out.println(name + "\n");
try {
TimeUnit.SECONDS.sleep(15);
} catch (InterruptedException ignored) {
}
};
}});
add(new Task() {{
name = "10 owls are watching for 7s";
highPriority = false;
requirements = new HashMap<Resource, Long>() {{
put(Resource.OWLS, 10L);
}};
function = () -> {
System.out.println(name + "\n");
try {
TimeUnit.SECONDS.sleep(7);
} catch (InterruptedException ignored) {
}
};
}});
add(new Task() {{
name = "99 mice are running for 7s";
highPriority = false;
requirements = new HashMap<Resource, Long>() {{
put(Resource.MICE, 99L);
}};
function = () -> {
System.out.println(name + "\n");
try {
TimeUnit.SECONDS.sleep(7);
} catch (InterruptedException ignored) {
}
};
}});
add(new Task() {{
name = "This is the last task";
highPriority = false;
requirements = new HashMap<>(resources);
function = () -> {
System.out.println(name + "\n");
};
}});
}
});
Model model = new Model(4, resources);
Thread taskThread = new Thread(() -> {
for (Task task : tasks) {
model.push(task);
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
taskThread.start();
taskThread.join();
model.stop();
}
}
|
package cn.dsc.manager.mapper;
import cn.dsc.manager.pojo.TbThanks;
import cn.dsc.manager.pojo.TbThanksExample;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface TbThanksMapper {
long countByExample(TbThanksExample example);
int deleteByExample(TbThanksExample example);
int deleteByPrimaryKey(Integer id);
int insert(TbThanks record);
int insertSelective(TbThanks record);
List<TbThanks> selectByExample(TbThanksExample example);
TbThanks selectByPrimaryKey(Integer id);
int updateByExampleSelective(@Param("record") TbThanks record, @Param("example") TbThanksExample example);
int updateByExample(@Param("record") TbThanks record, @Param("example") TbThanksExample example);
int updateByPrimaryKeySelective(TbThanks record);
int updateByPrimaryKey(TbThanks record);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.