text
stringlengths 7
1.01M
|
|---|
package org.pac4j.jwt.credentials.authenticator;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.pac4j.core.context.HttpConstants;
import org.pac4j.core.context.Pac4jConstants;
import org.pac4j.core.context.WebContext;
import org.pac4j.core.credentials.TokenCredentials;
import org.pac4j.core.credentials.authenticator.Authenticator;
import org.pac4j.core.exception.CredentialsException;
import org.pac4j.core.exception.HttpAction;
import org.pac4j.core.exception.TechnicalException;
import org.pac4j.core.profile.CommonProfile;
import org.pac4j.core.profile.ProfileHelper;
import org.pac4j.core.profile.creator.AuthenticatorProfileCreator;
import org.pac4j.core.profile.definition.CommonProfileDefinition;
import org.pac4j.core.profile.definition.ProfileDefinitionAware;
import org.pac4j.core.profile.jwt.JwtClaims;
import org.pac4j.jwt.config.encryption.EncryptionConfiguration;
import org.pac4j.jwt.config.signature.SignatureConfiguration;
import org.pac4j.jwt.profile.JwtGenerator;
import org.pac4j.jwt.profile.JwtProfile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.nimbusds.jose.EncryptionMethod;
import com.nimbusds.jose.JOSEException;
import com.nimbusds.jose.JWEAlgorithm;
import com.nimbusds.jose.JWEHeader;
import com.nimbusds.jose.JWSAlgorithm;
import com.nimbusds.jwt.EncryptedJWT;
import com.nimbusds.jwt.JWT;
import com.nimbusds.jwt.JWTClaimsSet;
import com.nimbusds.jwt.JWTParser;
import com.nimbusds.jwt.PlainJWT;
import com.nimbusds.jwt.SignedJWT;
import static org.pac4j.core.util.CommonHelper.*;
/**
* Authenticator for JWT. It creates the user profile and stores it in the credentials
* for the {@link AuthenticatorProfileCreator}.
*
* @author Jerome Leleu
* @since 1.8.0
*/
public class JwtAuthenticator extends ProfileDefinitionAware<JwtProfile> implements Authenticator<TokenCredentials> {
protected final Logger logger = LoggerFactory.getLogger(getClass());
private List<EncryptionConfiguration> encryptionConfigurations = new ArrayList<>();
private List<SignatureConfiguration> signatureConfigurations = new ArrayList<>();
private String realmName = Pac4jConstants.DEFAULT_REALM_NAME;
private Date expirationTime;
public JwtAuthenticator() {}
public JwtAuthenticator(final List<SignatureConfiguration> signatureConfigurations) {
this.signatureConfigurations = signatureConfigurations;
}
public JwtAuthenticator(final List<SignatureConfiguration> signatureConfigurations,
final List<EncryptionConfiguration> encryptionConfigurations) {
this.signatureConfigurations = signatureConfigurations;
this.encryptionConfigurations = encryptionConfigurations;
}
public JwtAuthenticator(final SignatureConfiguration signatureConfiguration) {
setSignatureConfiguration(signatureConfiguration);
}
public JwtAuthenticator(final SignatureConfiguration signatureConfiguration, final EncryptionConfiguration encryptionConfiguration) {
setSignatureConfiguration(signatureConfiguration);
setEncryptionConfiguration(encryptionConfiguration);
}
@Override
protected void internalInit() {
assertNotBlank("realmName", this.realmName);
defaultProfileDefinition(new CommonProfileDefinition<>(x -> new JwtProfile()));
if (signatureConfigurations.isEmpty()) {
logger.warn("No signature configurations have been defined: non-signed JWT will be accepted!");
}
}
/**
* Validates the token and returns the corresponding user profile.
*
* @param token the JWT
* @return the corresponding user profile
*/
public Map<String, Object> validateTokenAndGetClaims(final String token) {
final CommonProfile profile = validateToken(token);
final Map<String, Object> claims = new HashMap<>(profile.getAttributes());
claims.put(JwtClaims.SUBJECT, profile.getId());
return claims;
}
/**
* Validates the token and returns the corresponding user profile.
*
* @param token the JWT
* @return the corresponding user profile
*/
public CommonProfile validateToken(final String token) {
final TokenCredentials credentials = new TokenCredentials(token);
try {
validate(credentials, null);
} catch (final HttpAction e) {
throw new TechnicalException(e);
} catch (final CredentialsException e) {
logger.info("Failed to retrieve or validate credentials: {}", e.getMessage());
logger.debug("Failed to retrieve or validate credentials", e);
return null;
}
return credentials.getUserProfile();
}
@Override
public void validate(final TokenCredentials credentials, final WebContext context) {
init();
final String token = credentials.getToken();
if (context != null) {
// set the www-authenticate in case of error
context.setResponseHeader(HttpConstants.AUTHENTICATE_HEADER, "Bearer realm=\"" + realmName + "\"");
}
try {
// Parse the token
JWT jwt = JWTParser.parse(token);
if (jwt instanceof PlainJWT) {
if (signatureConfigurations.isEmpty()) {
logger.debug("JWT is not signed and no signature configurations -> verified");
} else {
throw new CredentialsException("A non-signed JWT cannot be accepted as signature configurations have been defined");
}
} else {
SignedJWT signedJWT = null;
if (jwt instanceof SignedJWT) {
signedJWT = (SignedJWT) jwt;
}
// encrypted?
if (jwt instanceof EncryptedJWT) {
logger.debug("JWT is encrypted");
final EncryptedJWT encryptedJWT = (EncryptedJWT) jwt;
boolean found = false;
final JWEHeader header = encryptedJWT.getHeader();
final JWEAlgorithm algorithm = header.getAlgorithm();
final EncryptionMethod method = header.getEncryptionMethod();
for (final EncryptionConfiguration config : encryptionConfigurations) {
if (config.supports(algorithm, method)) {
logger.debug("Using encryption configuration: {}", config);
try {
config.decrypt(encryptedJWT);
signedJWT = encryptedJWT.getPayload().toSignedJWT();
if (signedJWT != null) {
jwt = signedJWT;
}
found = true;
break;
} catch (final JOSEException e) {
logger.debug("Decryption fails with encryption configuration: {}, passing to the next one", config);
}
}
}
if (!found) {
throw new CredentialsException("No encryption algorithm found for JWT: " + token);
}
}
// signed?
if (signedJWT != null) {
logger.debug("JWT is signed");
boolean verified = false;
boolean found = false;
final JWSAlgorithm algorithm = signedJWT.getHeader().getAlgorithm();
for (final SignatureConfiguration config : signatureConfigurations) {
if (config.supports(algorithm)) {
logger.debug("Using signature configuration: {}", config);
try {
verified = config.verify(signedJWT);
found = true;
if (verified) {
break;
}
} catch (final JOSEException e) {
logger.debug("Verification fails with signature configuration: {}, passing to the next one", config);
}
}
}
if (!found) {
throw new CredentialsException("No signature algorithm found for JWT: " + token);
}
if (!verified) {
throw new CredentialsException("JWT verification failed: " + token);
}
}
}
createJwtProfile(credentials, jwt);
} catch (final ParseException e) {
throw new CredentialsException("Cannot decrypt / verify JWT", e);
}
}
@SuppressWarnings("unchecked")
protected void createJwtProfile(final TokenCredentials credentials, final JWT jwt) throws ParseException {
final JWTClaimsSet claimSet = jwt.getJWTClaimsSet();
final String subject = claimSet.getSubject();
if (subject == null) {
throw new TechnicalException("JWT must contain a subject ('sub' claim)");
}
final Date expTime = claimSet.getExpirationTime();
if (expTime != null) {
final Date now = new Date();
if (expTime.before(now)) {
logger.error("The JWT is expired: no profile is built");
return;
}
if (this.expirationTime != null && expTime.after(this.expirationTime)) {
logger.error("The JWT is expired: no profile is built");
return;
}
}
final Map<String, Object> attributes = new HashMap<>(claimSet.getClaims());
attributes.remove(JwtClaims.SUBJECT);
final List<String> roles = (List<String>) attributes.get(JwtGenerator.INTERNAL_ROLES);
attributes.remove(JwtGenerator.INTERNAL_ROLES);
final List<String> permissions = (List<String>) attributes.get(JwtGenerator.INTERNAL_PERMISSIONS);
attributes.remove(JwtGenerator.INTERNAL_PERMISSIONS);
final CommonProfile profile = ProfileHelper.restoreOrBuildProfile(getProfileDefinition(), subject, attributes, null);
if (roles != null) {
profile.addRoles(roles);
}
if (permissions != null) {
profile.addPermissions(permissions);
}
credentials.setUserProfile(profile);
}
public List<SignatureConfiguration> getSignatureConfigurations() {
return signatureConfigurations;
}
public void setSignatureConfiguration(final SignatureConfiguration signatureConfiguration) {
addSignatureConfiguration(signatureConfiguration);
}
public void addSignatureConfiguration(final SignatureConfiguration signatureConfiguration) {
assertNotNull("signatureConfiguration", signatureConfiguration);
signatureConfigurations.add(signatureConfiguration);
}
public void setSignatureConfigurations(final List<SignatureConfiguration> signatureConfigurations) {
assertNotNull("signatureConfigurations", signatureConfigurations);
this.signatureConfigurations = signatureConfigurations;
}
public List<EncryptionConfiguration> getEncryptionConfigurations() {
return encryptionConfigurations;
}
public void setEncryptionConfiguration(final EncryptionConfiguration encryptionConfiguration) {
addEncryptionConfiguration(encryptionConfiguration);
}
public void addEncryptionConfiguration(final EncryptionConfiguration encryptionConfiguration) {
assertNotNull("encryptionConfiguration", encryptionConfiguration);
encryptionConfigurations.add(encryptionConfiguration);
}
public void setEncryptionConfigurations(final List<EncryptionConfiguration> encryptionConfigurations) {
assertNotNull("encryptionConfigurations", encryptionConfigurations);
this.encryptionConfigurations = encryptionConfigurations;
}
public String getRealmName() {
return realmName;
}
public void setRealmName(final String realmName) {
this.realmName = realmName;
}
public void setExpirationTime(final Date expirationTime) {
this.expirationTime = new Date(expirationTime.getTime());
}
public Date getExpirationTime() {
return new Date(expirationTime.getTime());
}
@Override
public String toString() {
return toNiceString(this.getClass(), "signatureConfigurations", signatureConfigurations,
"encryptionConfigurations", encryptionConfigurations, "realmName", this.realmName);
}
}
|
package com.desertskyrangers.flightdeck.core.service;
import com.desertskyrangers.flightdeck.BaseTest;
import com.desertskyrangers.flightdeck.core.model.*;
import com.desertskyrangers.flightdeck.port.MembershipServices;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Set;
import static org.assertj.core.api.Assertions.assertThat;
public class MembershipServiceTest extends BaseTest {
@Autowired
MembershipServices membershipServices;
@Test
void testRequestMembership() {
// given
User owner = statePersisting.upsert( createTestUser( "olivia", "olivia@example.com" ) );
User invitee = statePersisting.upsert( createTestUser( "nate", "nate@example.com" ) );
statePersisting.upsert( createTestToken( invitee, invitee.username(), "" ) );
Group group = statePersisting.upsert( createTestGroup( "Group A", GroupType.CLUB ) );
statePersisting.upsert( createTestMember( owner, group, MemberStatus.OWNER ) );
// when
Member invitation = membershipServices.requestMembership( owner, invitee, group, MemberStatus.INVITED );
// then
Set<Member> memberships = membershipServices.findMembershipsByUser( invitee );
assertThat( memberships ).containsExactlyInAnyOrder( invitation );
}
}
|
package org.ikasan.dashboard.ui.framework.constants;
/**
* Created by Ikasan Development Team on 02/08/2017.
*/
public class ConfigurationConstants
{
public static final String SOLR_ENABLED = "solrEnabled";
public static final String SOLR_URLS = "solrUrls";
public static final String SOLR_DAYS_TO_KEEP = "solrDaysToKeep";
public static final String SOLR_OPERATING_MODE = "solrOperatingMode";
public static final String DASHBOARD_BASE_URL = "dashboardBaseUrl";
public static final String WEBSERVICE_USERNAME = "webServiceUserAccount";
public static final String WEBSERVICE_PASSWORD = "webServiceUserPassword";
public static final String REPLAY_TARGET_SERVERS = "replayTargetServers";
public static final String SEARCH_RESULT_SET_SIZE = "resultSetSize";
public static final String NOTIFICATION_INTERVAL_MINUTES = "notificationIntervalMinutes";
}
|
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.autonomy.aci.client.annotations.testobjects.bugs.subclass;
import com.autonomy.aci.client.annotations.IdolDocument;
import com.autonomy.aci.client.annotations.IdolField;
import com.autonomy.aci.client.annotations.IdolProcessorField;
import java.util.LinkedList;
import java.util.List;
@IdolDocument("autn:cluster")
public class NewsResultCluster {
private String title;
private String category;
private float score;
private final List<String> terms = new LinkedList<>();
private final List<NewsResultDocument> documents = new LinkedList<>();
public String getCategory() {
return category;
}
public List<NewsResultDocument> getDocuments() {
return documents;
}
public float getScore() {
return score;
}
public List<String> getTerms() {
return terms;
}
public String getTitle() {
return title;
}
@IdolField("autn:title")
public NewsResultCluster setTitle(final String value) {
this.title = value;
return this;
}
public NewsResultCluster setCategory(final String value) {
this.category = value;
return this;
}
@IdolField("autn:score")
public NewsResultCluster setScore(final float value) {
this.score = value;
return this;
}
@IdolField("autn:term")
public NewsResultCluster addTerm(final String value) {
this.terms.add(value);
return this;
}
@IdolProcessorField("autn:doc")
public NewsResultCluster addDocument(final NewsResultDocument value) {
this.documents.add(value);
return this;
}
}
|
package net.minecraft.world.biome;
import net.minecraft.entity.EntityClassification;
import net.minecraft.entity.EntityType;
import net.minecraft.world.gen.GenerationStage;
import net.minecraft.world.gen.feature.Feature;
import net.minecraft.world.gen.feature.IFeatureConfig;
import net.minecraft.world.gen.feature.structure.MineshaftConfig;
import net.minecraft.world.gen.feature.structure.MineshaftStructure;
import net.minecraft.world.gen.placement.ChanceConfig;
import net.minecraft.world.gen.placement.FrequencyConfig;
import net.minecraft.world.gen.placement.Placement;
import net.minecraft.world.gen.surfacebuilders.SurfaceBuilder;
public final class SunflowerPlainsBiome extends Biome {
protected SunflowerPlainsBiome() {
super((new Biome.Builder()).surfaceBuilder(SurfaceBuilder.DEFAULT, SurfaceBuilder.GRASS_DIRT_GRAVEL_CONFIG).precipitation(Biome.RainType.RAIN).category(Biome.Category.PLAINS).depth(0.125F).scale(0.05F).temperature(0.8F).downfall(0.4F).waterColor(4159204).waterFogColor(329011).parent("plains"));
this.addStructure(Feature.MINESHAFT.withConfiguration(new MineshaftConfig(0.004D, MineshaftStructure.Type.NORMAL)));
this.addStructure(Feature.STRONGHOLD.withConfiguration(IFeatureConfig.NO_FEATURE_CONFIG));
DefaultBiomeFeatures.addCarvers(this);
DefaultBiomeFeatures.addStructures(this);
DefaultBiomeFeatures.addLakes(this);
DefaultBiomeFeatures.addMonsterRooms(this);
DefaultBiomeFeatures.addPlainsTallGrass(this);
this.addFeature(GenerationStage.Decoration.VEGETAL_DECORATION, Feature.RANDOM_PATCH.withConfiguration(DefaultBiomeFeatures.SUNFLOWER_CONFIG).withPlacement(Placement.COUNT_HEIGHTMAP_32.configure(new FrequencyConfig(10))));
DefaultBiomeFeatures.addStoneVariants(this);
DefaultBiomeFeatures.addOres(this);
DefaultBiomeFeatures.addSedimentDisks(this);
DefaultBiomeFeatures.addOakTreesFlowersGrass(this);
this.addFeature(GenerationStage.Decoration.VEGETAL_DECORATION, Feature.RANDOM_PATCH.withConfiguration(DefaultBiomeFeatures.SUGAR_CANE_CONFIG).withPlacement(Placement.COUNT_HEIGHTMAP_DOUBLE.configure(new FrequencyConfig(10))));
DefaultBiomeFeatures.addMushrooms(this);
this.addFeature(GenerationStage.Decoration.VEGETAL_DECORATION, Feature.RANDOM_PATCH.withConfiguration(DefaultBiomeFeatures.PUMPKIN_PATCH_CONFIG).withPlacement(Placement.CHANCE_HEIGHTMAP_DOUBLE.configure(new ChanceConfig(32))));
DefaultBiomeFeatures.addSprings(this);
DefaultBiomeFeatures.addFreezeTopLayer(this);
this.addSpawn(EntityClassification.CREATURE, new Biome.SpawnListEntry(EntityType.SHEEP, 12, 4, 4));
this.addSpawn(EntityClassification.CREATURE, new Biome.SpawnListEntry(EntityType.PIG, 10, 4, 4));
this.addSpawn(EntityClassification.CREATURE, new Biome.SpawnListEntry(EntityType.CHICKEN, 10, 4, 4));
this.addSpawn(EntityClassification.CREATURE, new Biome.SpawnListEntry(EntityType.COW, 8, 4, 4));
this.addSpawn(EntityClassification.CREATURE, new Biome.SpawnListEntry(EntityType.HORSE, 5, 2, 6));
this.addSpawn(EntityClassification.CREATURE, new Biome.SpawnListEntry(EntityType.DONKEY, 1, 1, 3));
this.addSpawn(EntityClassification.AMBIENT, new Biome.SpawnListEntry(EntityType.BAT, 10, 8, 8));
this.addSpawn(EntityClassification.MONSTER, new Biome.SpawnListEntry(EntityType.SPIDER, 100, 4, 4));
this.addSpawn(EntityClassification.MONSTER, new Biome.SpawnListEntry(EntityType.ZOMBIE, 95, 4, 4));
this.addSpawn(EntityClassification.MONSTER, new Biome.SpawnListEntry(EntityType.ZOMBIE_VILLAGER, 5, 1, 1));
this.addSpawn(EntityClassification.MONSTER, new Biome.SpawnListEntry(EntityType.SKELETON, 100, 4, 4));
this.addSpawn(EntityClassification.MONSTER, new Biome.SpawnListEntry(EntityType.CREEPER, 100, 4, 4));
this.addSpawn(EntityClassification.MONSTER, new Biome.SpawnListEntry(EntityType.SLIME, 100, 4, 4));
this.addSpawn(EntityClassification.MONSTER, new Biome.SpawnListEntry(EntityType.ENDERMAN, 10, 1, 4));
this.addSpawn(EntityClassification.MONSTER, new Biome.SpawnListEntry(EntityType.WITCH, 5, 1, 1));
}
}
|
/*
* <!--
* ~ Copyright 2015-2017 OpenCB
* ~
* ~ Licensed under the Apache License, Version 2.0 (the "License");
* ~ you may not use this file except in compliance with the License.
* ~ You may obtain a copy of the License at
* ~
* ~ http://www.apache.org/licenses/LICENSE-2.0
* ~
* ~ Unless required by applicable law or agreed to in writing, software
* ~ distributed under the License is distributed on an "AS IS" BASIS,
* ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* ~ See the License for the specific language governing permissions and
* ~ limitations under the License.
* -->
*
*/
package org.opencb.biodata.tools.variant.normalizer.extensions;
import htsjdk.variant.vcf.*;
import org.apache.commons.lang3.StringUtils;
import org.opencb.biodata.models.variant.StudyEntry;
import org.opencb.biodata.models.variant.Variant;
import org.opencb.biodata.models.variant.VariantFileMetadata;
import org.opencb.biodata.models.variant.avro.FileEntry;
import org.opencb.biodata.models.variant.avro.SampleEntry;
import org.opencb.biodata.models.variant.metadata.VariantFileHeaderComplexLine;
import org.opencb.biodata.tools.variant.converters.avro.VCFHeaderToVariantFileHeaderConverter;
import org.opencb.commons.utils.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class CustomNormalizerExtension extends VariantNormalizerExtension {
private List<String> header;
private Map<String, String> variantValuesMap;
private boolean isVCustomFileValid;
private boolean normalizeFile;
private boolean normalizeSample;
public static final String CUSTOM_FILE_EXTENSION = ".custom.annotation.txt";
private static Logger logger = LoggerFactory.getLogger(CustomNormalizerExtension.class);
public CustomNormalizerExtension() {
}
@Override
public void init() {
// Custom annotation file must end with ".custom.annotation.txt"
Path customFilePath = Paths.get(fileMetadata.getPath() + CUSTOM_FILE_EXTENSION);
if (!Files.exists(customFilePath)) {
// File doesn't exist. Skip using extension
logger.info("Not using " + getClass().getSimpleName() + " as file {} does not exist.", customFilePath);
isVCustomFileValid = false;
return;
}
try {
FileUtils.checkFile(customFilePath);
// Store the INFO and FORMAT lines
header = new ArrayList<>();
variantValuesMap = new HashMap<>();
int lines = 0;
// Init valid variable and check everything is good
isVCustomFileValid = true;
try (BufferedReader bufferedReader = FileUtils.newBufferedReader(customFilePath)) {
String line = bufferedReader.readLine();
while (StringUtils.isNotEmpty(line)) {
lines++;
if (line.startsWith("##")) {
header.add(line);
} else {
String[] split = line.split("\t");
if (split.length == 2) {
variantValuesMap.put(split[0], split[1]);
} else {
throw new IOException("Malformed custom normalization file " + customFilePath + " in line: " + lines);
}
}
// read next line
line = bufferedReader.readLine();
}
}
// Header is mandatory
if (header.isEmpty()) {
throw new IOException("Missing header in custom normalization file " + customFilePath);
} else {
for (String line : header) {
normalizeFile |= line.startsWith(VCFConstants.INFO_HEADER_START);
normalizeSample |= line.startsWith(VCFConstants.FORMAT_HEADER_START);
}
if (normalizeFile && normalizeSample) {
throw new IOException("Unable to mix FORMAT and INFO in the same custom normalizer extension file.");
}
}
} catch (IOException e) {
isVCustomFileValid = false;
throw new UncheckedIOException(e);
}
}
@Override
protected boolean canUseExtension(VariantFileMetadata fileMetadata) {
return isVCustomFileValid;
}
@Override
protected void normalizeHeader(VariantFileMetadata fileMetadata) {
for (String line : header) {
VCFCompoundHeaderLine vcfCompoundHeaderLine;
if (line.startsWith(VCFConstants.INFO_HEADER_START)) {
vcfCompoundHeaderLine =
new VCFInfoHeaderLine(line.substring(VCFConstants.INFO_HEADER_START.length() + 1), VCFHeaderVersion.VCF4_2);
} else if (line.startsWith(VCFConstants.FORMAT_HEADER_START)) {
vcfCompoundHeaderLine =
new VCFFormatHeaderLine(line.substring(VCFConstants.FORMAT_HEADER_START.length() + 1), VCFHeaderVersion.VCF4_2);
} else {
logger.info("Ignore custom header line: " + line);
continue;
}
VariantFileHeaderComplexLine newSampleMetadataLine = VCFHeaderToVariantFileHeaderConverter
.convertComplexLine(vcfCompoundHeaderLine);
fileMetadata.getHeader().getComplexLines().add(newSampleMetadataLine);
}
}
@Override
protected void normalizeFile(Variant variant, StudyEntry study, FileEntry file) {
// Check if we can get SVTYPE from this caller
if (normalizeFile) {
String value = getVariantValue(variant, file);
if (value != null) {
String[] split = value.split(";");
for (String s : split) {
String[] keyValue = s.split("=");
study.addFileData(file.getFileId(), keyValue[0], keyValue[1]);
}
}
}
}
@Override
protected void normalizeSample(Variant variant, StudyEntry study, FileEntry file, String sampleId, SampleEntry sample) {
if (normalizeSample) {
String value = getVariantValue(variant, file);
if (value != null) {
String[] split = value.split(";");
for (String s : split) {
String[] keyValue = s.split("=");
study.addSampleDataKey(keyValue[0]);
study.addSampleData(sampleId, keyValue[0], keyValue[1]);
}
}
}
}
private String getVariantValue(Variant variant, FileEntry file) {
String variantId;
if (file.getCall() == null || file.getCall().getVariantId() == null) {
variantId = variant.toStringSimple();
} else {
variantId = new Variant(file.getCall().getVariantId()).toStringSimple();
}
return variantValuesMap.get(variantId);
}
}
|
package io.pivotal.workshop.domain;
public class Marketing {
private String header;
private String description;
public Marketing(String header, String description) {
this.header = header;
this.description = description;
}
public String getHeader() {
return header;
}
public void setHeader(String header) {
this.header = header;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
|
package com.voipfuture.connectfour.algorithms;
import com.voipfuture.connectfour.GameState;
import com.voipfuture.connectfour.IInputProvider;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Random;
/**
* A computer player that just does a random (but valid) move.
* @author tgierke
*/
public class DummyPlayer implements IInputProvider
{
private final Random random = new Random();
@Override
public Optional<InputEvent> readInput(GameState gameState)
{
final List<Integer> possibleMoves = new ArrayList<>();
for ( int col = 0 ; col < gameState.board.width ; col++ )
{
if ( gameState.board.hasSpaceInColumn( col ) ) {
possibleMoves.add( col );
}
}
if ( ! possibleMoves.isEmpty() )
{
Collections.shuffle(possibleMoves,random);
return Optional.of( new MoveEvent( gameState.currentPlayer(), possibleMoves.get( 0 ) ) );
}
return Optional.empty();
}
}
|
package net.datenstrudel.bulbs.core.application.services;
import net.datenstrudel.bulbs.core.domain.model.bulb.Bulb;
import net.datenstrudel.bulbs.core.domain.model.bulb.BulbBridge;
import net.datenstrudel.bulbs.core.domain.model.bulb.BulbBridgeId;
import net.datenstrudel.bulbs.core.domain.model.bulb.BulbId;
import net.datenstrudel.bulbs.core.domain.model.identity.BulbsContextUserId;
import net.datenstrudel.bulbs.core.domain.model.identity.BulbsPrincipal;
import net.datenstrudel.bulbs.shared.domain.model.bulb.BulbBridgeAddress;
import net.datenstrudel.bulbs.shared.domain.model.bulb.BulbState;
import net.datenstrudel.bulbs.shared.domain.model.bulb.BulbsPlatform;
/**
* Describes application service capabilities regarding {@link BulbBridge}s for core internal
* use cases only (such as reacting to domain events).
* @author Thomas Wendzinski
*/
public interface IBulbBridgeAdminServiceInternal {
//~ Member(s) //////////////////////////////////////////////////////////////
//~ Method(s) //////////////////////////////////////////////////////////////
/**
* Update the internal state of a {@link Bulb}. Supposed to be invoked _after_
* the state was applied to the actual hardware. This method makes sure that the
* state that was applied to the hardware is synchronized with the model that represents
* the hardware.
* @param bulbId
* @param state
*/
public void updateBulbStateInternal(BulbId bulbId, BulbState state);
public void syncToHardwareStateInternal(BulbBridgeId bridgeId, BulbsPrincipal principal);
/**
* Make sure that all {@link net.datenstrudel.bulbs.core.domain.model.identity.BulbsPrincipal}s
* ,belonging to the bridge with its given <code>bridgeId</code>, are deleted from the
* actual underlying hardware.
* @param userId
* @param bridgeId
* @param address
* @param platform
*/
public void removeAllBulbsPrincipalsAfterBridgeDeletion(
final BulbsContextUserId userId, final BulbBridgeId bridgeId, BulbBridgeAddress address, BulbsPlatform platform);
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javax.servlet.jsp;
/**
* Contains information about an error, for error pages. The information
* contained in this instance is meaningless if not used in the context of an
* error page. To indicate a JSP is an error page, the page author must set the
* isErrorPage attribute of the page directive to "true".
*
* @see PageContext#getErrorData
* @since 2.0
*/
public final class ErrorData {
private final Throwable throwable;
private final int statusCode;
private final String uri;
private final String servletName;
/**
* Creates a new ErrorData object.
*
* @param throwable The Throwable that is the cause of the error
* @param statusCode The status code of the error
* @param uri The request URI
* @param servletName The name of the servlet invoked
*/
public ErrorData(Throwable throwable, int statusCode, String uri,
String servletName) {
this.throwable = throwable;
this.statusCode = statusCode;
this.uri = uri;
this.servletName = servletName;
}
/**
* Returns the Throwable that caused the error.
*
* @return The Throwable that caused the error
*/
public Throwable getThrowable() {
return this.throwable;
}
/**
* Returns the status code of the error.
*
* @return The status code of the error
*/
public int getStatusCode() {
return this.statusCode;
}
/**
* Returns the request URI.
*
* @return The request URI
*/
public String getRequestURI() {
return this.uri;
}
/**
* Returns the name of the servlet invoked.
*
* @return The name of the servlet invoked
*/
public String getServletName() {
return this.servletName;
}
}
|
package org.liveontologies.pinpointing;
/*-
* #%L
* Axiom Pinpointing Experiments
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2017 - 2018 Live Ontologies Project
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import org.liveontologies.puli.Inference;
import org.liveontologies.puli.InferenceJustifier;
import org.liveontologies.puli.Proof;
import org.liveontologies.puli.pinpointing.InterruptMonitor;
import org.liveontologies.puli.pinpointing.MinimalSubsetEnumerator;
import org.liveontologies.puli.pinpointing.MinimalSubsetEnumerator.Factory;
import org.liveontologies.puli.pinpointing.MinimalSubsetsFromProofs;
import org.liveontologies.puli.pinpointing.PriorityComparators;
public class MinimalSubsetCollector<C, I extends Inference<? extends C>, A> {
private final Factory<C, A> enumeratorFactory_;
private final CancellableMonitor monitor_ = new CancellableMonitor();
public MinimalSubsetCollector(
final MinimalSubsetsFromProofs.Factory<C, I, A> factory,
final Proof<? extends I> proof,
final InferenceJustifier<? super I, ? extends Set<? extends A>> justifier) {
this.enumeratorFactory_ = factory.create(proof, justifier, monitor_);
}
public Collection<? extends Set<A>> collect(final C query,
final int sizeLimit) {
final int limit = sizeLimit <= 0 ? Integer.MAX_VALUE : sizeLimit;
final List<Set<A>> sets = new ArrayList<>();
final MinimalSubsetEnumerator.Listener<A> listener = new MinimalSubsetEnumerator.Listener<A>() {
@Override
public void newMinimalSubset(final Set<A> set) {
if (set.size() <= limit) {
sets.add(set);
} else {
monitor_.cancel();
}
}
};
enumeratorFactory_.newEnumerator(query).enumerate(listener,
PriorityComparators.<A> cardinality());
return sets;
}
public Collection<? extends Set<A>> collect(final C query) {
return collect(query, Integer.MAX_VALUE);
}
private static class CancellableMonitor implements InterruptMonitor {
private volatile boolean cancelled_ = false;
@Override
public boolean isInterrupted() {
return cancelled_;
}
public void cancel() {
cancelled_ = true;
}
}
}
|
package com.capitalone.dashboard.service;
import com.capitalone.dashboard.model.CodeQuality;
import com.capitalone.dashboard.model.DataResponse;
import com.capitalone.dashboard.request.CodeQualityRequest;
public interface CodeQualityService {
/**
* Finds all of the CodeQuality data matching the specified request criteria.
*
* @param request search criteria
* @return quality data matching criteria
*/
DataResponse<Iterable<CodeQuality>> search(CodeQualityRequest request);
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.shared.kerberos.components;
import java.util.Arrays;
import org.apache.directory.api.util.Strings;
import org.apache.directory.shared.kerberos.codec.types.AuthorizationType;
/**
* The class storing the individual AuthorizationDatas
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class AuthorizationDataEntry
{
/** the type of authorization data */
private AuthorizationType adType;
/** the authorization data */
private byte[] adData;
/**
* Creates a new instance of AD entry
*/
public AuthorizationDataEntry()
{
}
/**
* Creates a new Instance of AD entry
*
* @param adType The AuthorizationData type
* @param adData The AuthorizationData data
*/
public AuthorizationDataEntry( AuthorizationType adType, byte[] adData )
{
this.adType = adType;
if ( adData != null )
{
this.adData = new byte[adData.length];
System.arraycopy( adData, 0, this.adData, 0, adData.length );
}
}
/**
* @return the adType
*/
public AuthorizationType getAdType()
{
return adType;
}
/**
* @param adType the adType to set
*/
public void setAdType( AuthorizationType adType )
{
this.adType = adType;
}
/**
* @return a copy of adData
*/
public byte[] getAdData()
{
if ( Strings.isEmpty( adData ) )
{
return Strings.EMPTY_BYTES;
}
else
{
byte[] copy = new byte[adData.length];
System.arraycopy( adData, 0, copy, 0, adData.length );
return copy;
}
}
/**
* @return the reference on adData
*/
public byte[] getAdDataRef()
{
return adData;
}
/**
* @param adData the adData to set
*/
public void setAdData( byte[] adData )
{
if ( Strings.isEmpty( adData ) )
{
this.adData = Strings.EMPTY_BYTES;
}
else
{
this.adData = new byte[adData.length];
System.arraycopy( adData, 0, this.adData, 0, adData.length );
}
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode()
{
final int prime = 31;
int result = 17;
result = prime * result + Arrays.hashCode( adData );
result = prime * result + ( ( adType == null ) ? 0 : adType.hashCode() );
return result;
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals( Object obj )
{
if ( this == obj )
{
return true;
}
if ( !( obj instanceof AuthorizationDataEntry ) )
{
return false;
}
AuthorizationDataEntry other = ( AuthorizationDataEntry ) obj;
if ( !Arrays.equals( adData, other.adData ) )
{
return false;
}
return adType == other.adType;
}
/**
* @see Object#toString()
*/
public String toString( String tabs )
{
StringBuilder sb = new StringBuilder();
sb.append( tabs ).append( "AuthorizationDataEntry : {\n" );
sb.append( tabs ).append( " adType : " ).append( adType ).append( "\n" );
sb.append( tabs ).append( " adData : " ).append( Strings.dumpBytes( adData ) ).append( "\n" );
sb.append( tabs ).append( "}" );
return sb.toString();
}
/**
* @see Object#toString()
*/
public String toString()
{
return toString( "" );
}
}
|
package com.direwolf20.buildinggadgets.common.tools;
import com.direwolf20.buildinggadgets.common.items.pastes.ConstructionPasteContainer;
import com.direwolf20.buildinggadgets.common.items.pastes.GenericPasteContainer;
import net.minecraft.client.renderer.ItemMeshDefinition;
import net.minecraft.client.renderer.block.model.ModelResourceLocation;
import net.minecraft.item.ItemStack;
public class PasteContainerMeshDefinition implements ItemMeshDefinition {
@Override
public ModelResourceLocation getModelLocation(ItemStack stack) {
int pasteAmt = ConstructionPasteContainer.getPasteAmount(stack);
int maxCapacity = ((GenericPasteContainer) stack.getItem()).getMaxCapacity();
if (pasteAmt < maxCapacity / 4) {
return new ModelResourceLocation(stack.getItem().getRegistryName(), "inventory");
} else if (pasteAmt >= maxCapacity / 4 && pasteAmt < maxCapacity / 2) {
return new ModelResourceLocation(stack.getItem().getRegistryName() + "-quarter", "inventory");
} else if (pasteAmt >= maxCapacity / 2 && pasteAmt < maxCapacity * 3 / 4) {
return new ModelResourceLocation(stack.getItem().getRegistryName() + "-half", "inventory");
} else if (pasteAmt >= maxCapacity * 3 / 4 && pasteAmt < maxCapacity) {
return new ModelResourceLocation(stack.getItem().getRegistryName() + "-3quarter", "inventory");
} else {
return new ModelResourceLocation(stack.getItem().getRegistryName() + "-full", "inventory");
}
}
}
|
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.dataexchange;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.dom4j.Element;
import org.unitime.timetable.model.Advisor;
import org.unitime.timetable.model.DepartmentalInstructor;
import org.unitime.timetable.model.Roles;
import org.unitime.timetable.model.Session;
import org.unitime.timetable.model.Student;
import org.unitime.timetable.model.StudentSectioningQueue;
import org.unitime.timetable.model.dao.StudentDAO;
/**
* @author Tomas Muller
*/
public class StudentAdvisorsImport extends BaseImport {
public void loadXml(Element root) throws Exception {
if (!root.getName().equalsIgnoreCase("studentAdvisors")) {
throw new Exception("Given XML file is not student groups load file.");
}
try {
beginTransaction();
String campus = root.attributeValue("campus");
String year = root.attributeValue("year");
String term = root.attributeValue("term");
boolean incremental = "true".equalsIgnoreCase(root.attributeValue("incremental", "false"));
if (incremental)
info("Incremental mode.");
Session session = Session.getSessionUsingInitiativeYearTerm(campus, year, term);
if(session == null) {
throw new Exception("No session found for the given campus, year, and term.");
}
Map<String, Advisor> id2advisor = new Hashtable<String, Advisor>();
for (Advisor advisor: (List<Advisor>)getHibSession().createQuery(
"from Advisor where session.uniqueId=:sessionId").setLong("sessionId", session.getUniqueId()).list()) {
id2advisor.put(advisor.getExternalUniqueId(), advisor);
}
Map<String, Roles> ref2role = new Hashtable<String, Roles>();
for (Roles role: Roles.findAll(true, getHibSession())) {
ref2role.put(role.getReference(), role);
}
Map<String, Student> id2student = new Hashtable<String, Student>();
for (Student student: StudentDAO.getInstance().findBySession(getHibSession(), session.getUniqueId())) {
if (student.getExternalUniqueId() != null)
id2student.put(student.getExternalUniqueId(), student);
}
Set<Long> studentIds = new HashSet<Long>();
for (Iterator i = root.elementIterator("studentAdvisor"); i.hasNext(); ) {
Element element = (Element)i.next();
String externalId = element.attributeValue("externalId");
Advisor advisor = id2advisor.remove(externalId);
if (advisor == null) {
advisor = new Advisor();
advisor.setSession(session);
advisor.setExternalUniqueId(externalId);
}
advisor.setStudents(new HashSet<Student>());
advisor.setFirstName(element.attributeValue("firstName"));
advisor.setMiddleName(element.attributeValue("middleName"));
advisor.setLastName(element.attributeValue("lastName"));
advisor.setEmail(element.attributeValue("email"));
advisor.setAcademicTitle(element.attributeValue("acadTitle"));
advisor.setRole(ref2role.get(element.attributeValue("role", "Advisor")));
if (advisor.getRole() == null) {
warn("Advisor role " + element.attributeValue("role", "Advisor") + " does not exist."); continue;
}
info("Advisor " + (advisor.hasName() ? advisor.getName(DepartmentalInstructor.sNameFormatLastFirstMiddle) + " (" + externalId + ")" : externalId) + (advisor.getUniqueId() == null ? " created." : " updated."));
Element updateStudentsEl = element.element("updateStudents");
if (updateStudentsEl != null) {
Hashtable<String, Student> students = new Hashtable<String, Student>();
for (Student s: advisor.getStudents())
students.put(s.getExternalUniqueId(), s);
for (Iterator j = updateStudentsEl.elementIterator("student"); j.hasNext(); ) {
Element studentEl = (Element)j.next();
String extId = studentEl.attributeValue("externalId");
if (extId == null) {
warn("A student has no external id.");
continue;
}
if (students.remove(extId) != null) continue;
Student student = id2student.get(extId);
if (student == null) {
warn("Student " + extId + " does not exist.");
continue;
}
if (student != null) {
advisor.getStudents().add(student);
student.getAdvisors().add(advisor);
studentIds.add(student.getUniqueId());
}
}
if (!students.isEmpty()) {
for (Student student: students.values()) {
student.getAdvisors().remove(advisor);
studentIds.add(student.getUniqueId());
}
advisor.getStudents().removeAll(students.values());
}
}
getHibSession().saveOrUpdate(advisor);
}
if (!incremental)
for (Advisor advisor: id2advisor.values()) {
info("Advisor " + (advisor.hasName() ? advisor.getName(DepartmentalInstructor.sNameFormatLastFirstMiddle) + " (" + advisor.getExternalUniqueId() + ")" : advisor.getExternalUniqueId()) + " deleted.");
if (advisor.getStudents() != null)
for (Student student: advisor.getStudents()) {
studentIds.add(student.getUniqueId());
student.getAdvisors().remove(advisor);
}
getHibSession().delete(advisor);
}
if (!studentIds.isEmpty()) {
StudentSectioningQueue.studentChanged(getHibSession(), null, session.getUniqueId(), studentIds);
}
commitTransaction();
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
rollbackTransaction();
throw e;
}
}
}
|
package beny.spring.service;
import beny.spring.model.CurrentUser;
public interface CurrentUserService {
boolean canAccessUser(CurrentUser currentUser, Long userId);
}
|
package main;
import java.io.Serializable;
import java.util.ArrayList;
/**
* Created by losoliveirasilva on 7/4/16.
*/
public class GreenhouseFileInfo implements Serializable {
private static final long serialVersionUID = 1L;
private ArrayList<StandFileInfo> stands;
public GreenhouseFileInfo(){
super();
stands = new ArrayList();
}
public void add(StandFileInfo s){
stands.add(s);
}
public StandFileInfo[] get(){
StandFileInfo[] s = new StandFileInfo[stands.size()];
for (int i = 0; i < s.length; i++) {
s[i] = stands.get(i);
}
return s;
}
}
|
package com.tszh.intercepter;
import com.tszh.entity.Role;
import com.tszh.entity.User;
import org.apache.commons.lang3.StringUtils;
import org.springframework.web.servlet.HandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Created by Administrator on 2018/4/27 0027.
*/
public class AuthInterceptor implements HandlerInterceptor {
@Override
public boolean preHandle(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Object o) throws Exception {
String servletPath=httpServletRequest.getServletPath();
if(StringUtils.equals(servletPath,"/login") || StringUtils.equals(servletPath,"/login/") ||
StringUtils.equals(servletPath,"/") )
return true;
User user=(User)httpServletRequest.getAttribute("currentUser");
if(user!=null) {
Role role = user.getRole();
if (role != null || StringUtils.equals(role.getRoleName(),"user") || StringUtils.equals(role.getRoleName(),"admin") )
return true;
else{
return false;
}
}
return false;
}
@Override
public void postHandle(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Object o, ModelAndView modelAndView) throws Exception {
}
@Override
public void afterCompletion(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Object o, Exception e) throws Exception {
}
}
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions;
import javax.annotation.Nonnull;
import org.jetbrains.plugins.groovy.lang.psi.api.GroovyResolveResult;
import org.jetbrains.plugins.groovy.lang.psi.api.types.GrTypeElement;
import com.intellij.psi.PsiPolyVariantReference;
/**
* @author ven
*/
public interface GrSafeCastExpression extends GrExpression, PsiPolyVariantReference {
@javax.annotation.Nullable
GrTypeElement getCastTypeElement();
@Nonnull
GrExpression getOperand();
@Nonnull
@Override
GroovyResolveResult[] multiResolve(final boolean incompleteCode);
}
|
/*
* Copyright 2016-2017 Testify Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.testifyproject.annotation;
import static java.lang.annotation.ElementType.ANNOTATION_TYPE;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
/**
* A repeatable meta-annotation for {@link Module}.
*
* @author saden
*/
@Documented
@Retention(RUNTIME)
@Target({ANNOTATION_TYPE, TYPE})
public @interface Modules {
/**
* Specifies a list of {@link Module} annotations.
*
* @return an array of Module annotations.
*/
Module[] value();
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.xsort.managed;
import java.io.IOException;
import java.util.List;
import org.apache.drill.exec.expr.TypeHelper;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.ops.OperatorContext;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.RecordBatch.IterOutcome;
import org.apache.drill.exec.record.RecordBatchSizer;
import org.apache.drill.exec.physical.impl.xsort.MSortTemplate;
import org.apache.drill.exec.physical.impl.xsort.managed.BatchGroup.InputBatch;
import org.apache.drill.exec.physical.impl.xsort.managed.SortMemoryManager.MergeTask;
import org.apache.drill.exec.record.BatchSchema;
import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
import org.apache.drill.exec.record.VectorAccessible;
import org.apache.drill.exec.record.VectorAccessibleUtilities;
import org.apache.drill.exec.record.VectorContainer;
import org.apache.drill.exec.record.VectorInitializer;
import org.apache.drill.exec.record.VectorWrapper;
import org.apache.drill.exec.record.selection.SelectionVector2;
import org.apache.drill.exec.record.selection.SelectionVector4;
import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting;
import org.apache.drill.exec.vector.ValueVector;
import static org.apache.drill.exec.record.RecordBatch.IterOutcome.EMIT;
/**
* Implementation of the external sort which is wrapped into the Drill
* "next" protocol by the {@link ExternalSortBatch} class.
* <p>
* Accepts incoming batches. Sorts each and will spill to disk as needed.
* When all input is delivered, can either do an in-memory merge or a
* merge from disk. If runs spilled, may have to do one or more "consolidation"
* passes to reduce the number of runs to the level that will fit in memory.
*/
public class SortImpl {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExternalSortBatch.class);
/**
* Iterates over the final sorted results. Implemented differently
* depending on whether the results are in-memory or spilled to
* disk.
*/
public interface SortResults {
/**
* Container into which results are delivered. May the
* the original operator container, or may be a different
* one. This is the container that should be sent
* downstream. This is a fixed value for all returned
* results.
* @return
*/
VectorContainer getContainer();
boolean next();
void close();
int getBatchCount();
int getRecordCount();
SelectionVector2 getSv2();
SelectionVector4 getSv4();
void updateOutputContainer(VectorContainer container, SelectionVector4 sv4,
IterOutcome outcome, BatchSchema schema);
}
public static class EmptyResults implements SortResults {
private final VectorContainer dest;
public EmptyResults(VectorContainer dest) {
dest.setRecordCount(0);
dest.buildSchema(SelectionVectorMode.NONE);
this.dest = dest;
}
@Override
public boolean next() { return false; }
@Override
public void close() { }
@Override
public int getBatchCount() { return 0; }
@Override
public int getRecordCount() { return 0; }
@Override
public SelectionVector4 getSv4() { return null; }
@Override
public SelectionVector2 getSv2() { return null; }
@Override
public VectorContainer getContainer() { return dest; }
@Override
public void updateOutputContainer(VectorContainer container, SelectionVector4 sv4,
IterOutcome outcome, BatchSchema schema) {
// First output batch of current schema, populate container with ValueVectors
if (container.getNumberOfColumns() == 0) {
for (MaterializedField field : schema) {
final ValueVector vv = TypeHelper.getNewVector(field, container.getAllocator());
vv.clear();
final ValueVector[] hyperVector = { vv };
container.add(hyperVector, true);
}
container.buildSchema(SelectionVectorMode.FOUR_BYTE);
} // since it's an empty batch no need to do anything in else
sv4.clear();
container.zeroVectors();
container.setRecordCount(0);
}
}
/**
* Return results for a single input batch. No merge is needed;
* the original (sorted) input batch is simply passed as the result.
* Note that this version requires replacing the operator output
* container with the batch container. (Vector ownership transfer
* was already done when accepting the input batch.)
*/
public static class SingleBatchResults implements SortResults {
private boolean done;
private final VectorContainer outputContainer;
private final BatchGroup.InputBatch batch;
public SingleBatchResults(BatchGroup.InputBatch batch, VectorContainer outputContainer) {
this.batch = batch;
this.outputContainer = outputContainer;
}
@Override
public boolean next() {
if (done) {
return false;
}
// The following implementation is wrong. Must transfer buffers,
// not vectors. The output container already contains vectors
// for the output schema.
for (VectorWrapper<?> vw : batch.getContainer()) {
outputContainer.add(vw.getValueVector());
}
outputContainer.buildSchema(SelectionVectorMode.TWO_BYTE);
outputContainer.setRecordCount(batch.getRecordCount());
done = true;
return true;
}
@Override
public void close() {
try {
batch.close();
} catch (IOException e) {
// Should never occur for an input batch
throw new IllegalStateException(e);
}
}
@Override
public int getBatchCount() { return 1; }
@Override
public int getRecordCount() { return outputContainer.getRecordCount(); }
@Override
public SelectionVector4 getSv4() { return null; }
@Override
public SelectionVector2 getSv2() { return batch.getSv2(); }
@Override
public VectorContainer getContainer() { return outputContainer; }
@Override
public void updateOutputContainer(VectorContainer container, SelectionVector4 sv4,
IterOutcome outcome, BatchSchema schema) {
if (outcome == EMIT) {
throw new UnsupportedOperationException("SingleBatchResults for sort with SV2 is currently not supported with" +
" EMIT outcome");
}
// Not used in Sort so don't need to do anything for now
}
}
private final SortConfig config;
private final SortMetrics metrics;
private final SortMemoryManager memManager;
private VectorContainer outputBatch;
private OperatorContext context;
/**
* Memory allocator for this operator itself. Incoming batches are
* transferred into this allocator. Intermediate batches used during
* merge also reside here.
*/
private final BufferAllocator allocator;
private final SpilledRuns spilledRuns;
private final BufferedBatches bufferedBatches;
private RecordBatchSizer sizer;
private VectorInitializer allocHelper;
public SortImpl(OperatorContext opContext, SortConfig sortConfig,
SpilledRuns spilledRuns, VectorContainer batch) {
this.context = opContext;
outputBatch = batch;
this.spilledRuns = spilledRuns;
allocator = opContext.getAllocator();
config = sortConfig;
memManager = new SortMemoryManager(config, allocator.getLimit());
metrics = new SortMetrics(opContext.getStats());
bufferedBatches = new BufferedBatches(opContext);
// Request leniency from the allocator. Leniency
// will reduce the probability that random chance causes the allocator
// to kill the query because of a small, spurious over-allocation.
// long maxMem = memManager.getMemoryLimit();
// long newMax = (long)(maxMem * 1.10);
// allocator.setLimit(newMax);
// logger.debug("Config: Resetting allocator to 10% safety margin: {}", newMax);
boolean allowed = allocator.setLenient();
logger.debug("Config: Is allocator lenient? {}", allowed);
}
@VisibleForTesting
public OperatorContext opContext() { return context; }
public void setSchema(BatchSchema schema) {
bufferedBatches.setSchema(schema);
spilledRuns.setSchema(schema);
}
public boolean forceSpill() {
if (bufferedBatches.size() < 2) {
return false;
}
spillFromMemory();
return true;
}
/**
* Process the converted incoming batch by adding it to the in-memory store
* of data, or spilling data to disk when necessary.
* @param incoming
*/
public void addBatch(VectorAccessible incoming) {
// Skip empty batches (such as the first one.)
if (incoming.getRecordCount() == 0) {
VectorAccessibleUtilities.clear(incoming);
return;
}
// Determine actual sizes of the incoming batch before taking
// ownership. Allows us to figure out if we need to spill first,
// to avoid overflowing memory simply due to ownership transfer.
analyzeIncomingBatch(incoming);
// The heart of the external sort operator: spill to disk when
// the in-memory generation exceeds the allowed memory limit.
// Preemptively spill BEFORE accepting the new batch into our memory
// pool. Although the allocator will allow us to exceed the memory limit
// during the transfer, we immediately follow the transfer with an SV2
// allocation that will fail if we are over the allocation limit.
if (isSpillNeeded(sizer.getActualSize())) {
spillFromMemory();
}
// Sanity check. We should now be below the buffer memory maximum.
long startMem = allocator.getAllocatedMemory();
bufferedBatches.add(incoming, sizer.getNetBatchSize());
// Compute batch size, including allocation of an sv2.
long endMem = allocator.getAllocatedMemory();
long batchSize = endMem - startMem;
// Update the minimum buffer space metric.
metrics.updateInputMetrics(sizer.rowCount(), sizer.getActualSize());
metrics.updateMemory(memManager.freeMemory(endMem));
metrics.updatePeakBatches(bufferedBatches.size());
// Update the size based on the actual record count, not
// the effective count as given by the selection vector
// (which may exclude some records due to filtering.)
validateBatchSize(sizer.getActualSize(), batchSize);
if (memManager.updateEstimates((int) batchSize, sizer.getNetRowWidth(), sizer.rowCount())) {
// If estimates changed, discard the helper based on the old estimates.
allocHelper = null;
}
}
/**
* Scan the vectors in the incoming batch to determine batch size.
*
* @return an analysis of the incoming batch
*/
private void analyzeIncomingBatch(VectorAccessible incoming) {
sizer = new RecordBatchSizer(incoming);
sizer.applySv2();
if (metrics.getInputBatchCount() == 0) {
logger.debug("{}", sizer.toString());
}
}
/**
* Determine if spill is needed before receiving the new record batch.
* Spilling is driven purely by memory availability (and an optional
* batch limit for testing.)
*
* @return true if spilling is needed (and possible), false otherwise
*/
private boolean isSpillNeeded(long incomingSize) {
if (bufferedBatches.size() >= config.getBufferedBatchLimit()) {
return true;
}
// Can't spill if less than two batches else the merge
// can't make progress.
final boolean spillNeeded = memManager.isSpillNeeded(allocator.getAllocatedMemory(), incomingSize);
if (bufferedBatches.size() < 2) {
// If we can't fit the batch into memory, then place a definite error
// message into the log to simplify debugging.
if (spillNeeded) {
logger.error("Insufficient memory to merge two batches. Incoming batch size: {}, available memory: {}",
incomingSize, memManager.freeMemory(allocator.getAllocatedMemory()));
}
return false;
}
return spillNeeded;
}
private void validateBatchSize(long actualBatchSize, long memoryDelta) {
if (actualBatchSize != memoryDelta) {
ExternalSortBatch.logger.debug("Memory delta: {}, actual batch size: {}, Diff: {}",
memoryDelta, actualBatchSize, memoryDelta - actualBatchSize);
}
}
/**
* This operator has accumulated a set of sorted incoming record batches.
* We wish to spill some of them to disk. To do this, a "copier"
* merges the target batches to produce a stream of new (merged) batches
* which are then written to disk.
* <p>
* This method spills only half the accumulated batches
* minimizing unnecessary disk writes. The exact count must lie between
* the minimum and maximum spill counts.
*/
private void spillFromMemory() {
int startCount = bufferedBatches.size();
List<BatchGroup> batchesToSpill = bufferedBatches.prepareSpill(config.spillFileSize());
// Do the actual spill.
logger.trace("Spilling {} of {} batches, allocated memory = {} bytes",
batchesToSpill.size(), startCount,
allocator.getAllocatedMemory());
int spillBatchRowCount = memManager.getSpillBatchRowCount();
spilledRuns.mergeAndSpill(batchesToSpill, spillBatchRowCount, allocHelper());
metrics.incrSpillCount();
}
private VectorInitializer allocHelper() {
if (allocHelper == null) {
allocHelper = sizer.buildVectorInitializer();
}
return allocHelper;
}
public SortMetrics getMetrics() { return metrics; }
public SortResults startMerge() {
if (metrics.getInputRowCount() == 0) {
return new EmptyResults(outputBatch);
}
logger.debug("Completed load phase: read {} batches, spilled {} times, total input bytes: {}",
metrics.getInputBatchCount(), spilledRuns.size(),
metrics.getInputBytes());
// Do the merge of the loaded batches. The merge can be done entirely in
// memory if the results fit; else we have to do a disk-based merge of
// pre-sorted spilled batches. Special case the single-batch query;
// this accelerates small, quick queries.
//
// Note: disabling this optimization because it turns out to be
// quite hard to transfer a set of vectors from one place to another.
/* if (metrics.getInputBatchCount() == 1) {
return singleBatchResult();
} else */ if (canUseMemoryMerge()) {
return mergeInMemory();
} else {
return mergeSpilledRuns();
}
}
/**
* Input consists of a single batch. Just return that batch as
* the output.
* @return results iterator over the single input batch
*/
// Disabled temporarily
@SuppressWarnings("unused")
private SortResults singleBatchResult() {
List<InputBatch> batches = bufferedBatches.removeAll();
return new SingleBatchResults(batches.get(0), outputBatch);
}
/**
* All data has been read from the upstream batch. Determine if we
* can use a fast in-memory sort, or must use a merge (which typically,
* but not always, involves spilled batches.)
*
* @return whether sufficient resources exist to do an in-memory sort
* if all batches are still in memory
*/
private boolean canUseMemoryMerge() {
if (spilledRuns.hasSpilled()) {
return false; }
// Do we have enough memory for MSorter (the in-memory sorter)?
if (! memManager.hasMemoryMergeCapacity(allocator.getAllocatedMemory(), MSortTemplate.memoryNeeded(metrics.getInputRowCount()))) {
return false; }
// Make sure we don't exceed the maximum number of batches SV4 can address.
if (bufferedBatches.size() > Character.MAX_VALUE) {
return false; }
// We can do an in-memory merge.
return true;
}
/**
* Perform an in-memory sort of the buffered batches. Obviously can
* be used only for the non-spilling case.
*
* @return DONE if no rows, OK_NEW_SCHEMA if at least one row
*/
private SortResults mergeInMemory() {
logger.debug("Starting in-memory sort. Batches = {}, Records = {}, Memory = {}",
bufferedBatches.size(), metrics.getInputRowCount(),
allocator.getAllocatedMemory());
// Note the difference between how we handle batches here and in the spill/merge
// case. In the spill/merge case, this class decides on the batch size to send
// downstream. However, in the in-memory case, we must pass along all batches
// in a single SV4. Attempts to do paging will result in errors. In the memory
// merge case, the downstream Selection Vector Remover will split the one
// big SV4 into multiple smaller batches to send further downstream.
// If the sort fails or is empty, clean up here. Otherwise, cleanup is done
// by closing the resultsIterator after all results are returned downstream.
MergeSortWrapper memoryMerge = new MergeSortWrapper(context, outputBatch);
try {
memoryMerge.merge(bufferedBatches.removeAll(), config.getMSortBatchSize());
} catch (Throwable t) {
memoryMerge.close();
throw t;
}
logger.debug("Completed in-memory sort. Memory = {}",
allocator.getAllocatedMemory());
return memoryMerge;
}
/**
* Perform merging of (typically spilled) batches. First consolidates batches
* as needed, then performs a final merge that is read one batch at a time
* to deliver batches to the downstream operator.
*
* @return an iterator over the merged batches
*/
private SortResults mergeSpilledRuns() {
logger.debug("Starting consolidate phase. Batches = {}, Records = {}, Memory = {}, In-memory batches {}, spilled runs {}",
metrics.getInputBatchCount(), metrics.getInputRowCount(),
allocator.getAllocatedMemory(),
bufferedBatches.size(), spilledRuns.size());
// Consolidate batches to a number that can be merged in
// a single last pass.
loop:
for (;;) {
MergeTask task = memManager.consolidateBatches(
allocator.getAllocatedMemory(),
bufferedBatches.size(),
spilledRuns.size());
switch (task.action) {
case SPILL:
logger.debug("Consolidate: spill");
spillFromMemory();
break;
case MERGE:
logger.debug("Consolidate: merge {} batches", task.count);
mergeRuns(task.count);
break;
case NONE:
break loop;
default:
throw new IllegalStateException("Unexpected action: " + task.action);
}
}
int mergeRowCount = memManager.getMergeBatchRowCount();
return spilledRuns.finalMerge(bufferedBatches.removeAll(), outputBatch, mergeRowCount, allocHelper);
}
private void mergeRuns(int targetCount) {
long mergeMemoryPool = memManager.getMergeMemoryLimit();
int spillBatchRowCount = memManager.getSpillBatchRowCount();
spilledRuns.mergeRuns(targetCount, mergeMemoryPool, spillBatchRowCount, allocHelper);
metrics.incrMergeCount();
}
public void close() {
metrics.updateWriteBytes(spilledRuns.getWriteBytes());
RuntimeException ex = null;
try {
spilledRuns.close();
} catch (RuntimeException e) {
ex = e;
}
try {
bufferedBatches.close();
} catch (RuntimeException e) {
ex = ex == null ? e : ex;
}
// Note: don't close the operator context here. It must
// remain open until all containers are cleared, which
// is done in the ExternalSortBatch class.
if (ex != null) {
throw ex;
}
}
@Override
public String toString() {
return "SortImpl[config=" + config
+ ", outputBatch=" + outputBatch
+ ", sizer=" + sizer
+ "]";
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.parquet;
import io.netty.buffer.DrillBuf;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.parquet.bytes.BytesInput;
import org.apache.parquet.format.PageHeader;
import org.apache.parquet.format.Util;
import org.apache.parquet.hadoop.util.HadoopStreams;
public class ColumnDataReader {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ColumnDataReader.class);
private final long endPosition;
public final FSDataInputStream input;
public ColumnDataReader(FSDataInputStream input, long start, long length) throws IOException{
this.input = input;
this.input.seek(start);
this.endPosition = start + length;
}
public PageHeader readPageHeader() throws IOException{
return Util.readPageHeader(input);
}
public FSDataInputStream getInputStream() {
return input;
}
public BytesInput getPageAsBytesInput(int pageLength) throws IOException{
byte[] b = new byte[pageLength];
input.read(b);
return new HadoopBytesInput(b);
}
public void loadPage(DrillBuf target, int pageLength) throws IOException {
target.clear();
HadoopStreams.wrap(input).read(target.nioBuffer(0, pageLength));
target.writerIndex(pageLength);
}
public void clear(){
try{
input.close();
}catch(IOException ex){
logger.warn("Error while closing input stream.", ex);
}
}
public boolean hasRemainder() throws IOException{
return input.getPos() < endPosition;
}
public class HadoopBytesInput extends BytesInput{
private final byte[] pageBytes;
public HadoopBytesInput(byte[] pageBytes) {
super();
this.pageBytes = pageBytes;
}
@Override
public byte[] toByteArray() throws IOException {
return pageBytes;
}
@Override
public long size() {
return pageBytes.length;
}
@Override
public void writeAllTo(OutputStream out) throws IOException {
out.write(pageBytes);
}
}
}
|
package io.dropwizard.kafka;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.health.HealthCheckRegistry;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.io.Resources;
import io.dropwizard.configuration.YamlConfigurationFactory;
import io.dropwizard.jackson.DiscoverableSubtypeResolver;
import io.dropwizard.jackson.Jackson;
import io.dropwizard.jersey.validation.Validators;
import io.dropwizard.lifecycle.setup.LifecycleEnvironment;
import org.apache.kafka.clients.consumer.Consumer;
import org.junit.Test;
import java.io.File;
import javax.validation.Validator;
import static org.assertj.core.api.Assertions.assertThat;
public class BasicKafkaConsumerFactoryTest {
private final ObjectMapper objectMapper = Jackson.newObjectMapper();
private final Validator validator = Validators.newValidator();
private final MetricRegistry metrics = new MetricRegistry();
private final YamlConfigurationFactory<KafkaConsumerFactory> configFactory =
new YamlConfigurationFactory<>(KafkaConsumerFactory.class, validator, objectMapper, "dw");
@Test
public void shouldBuildABasicKafkaConsumer() throws Exception {
final File yml = new File(Resources.getResource("yaml/basic-consumer.yaml").toURI());
final KafkaConsumerFactory factory = configFactory.build(yml);
assertThat(factory)
.isInstanceOf(KafkaConsumerFactory.class);
final LifecycleEnvironment lifecycle = new LifecycleEnvironment(metrics);
final HealthCheckRegistry healthChecks = new HealthCheckRegistry();
final Consumer consumer = factory.build(lifecycle, healthChecks, null, null);
assertThat(consumer)
.isNotNull();
}
@Test
public void isDiscoverable() {
assertThat(new DiscoverableSubtypeResolver().getDiscoveredSubtypes())
.contains(BasicKafkaConsumerFactory.class);
}
}
|
/**
* Copyright (c) 2011-2020, hubin (jobob@qq.com).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR EntityWrapperS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.baomidou.mybatisplus.mapper;
import com.baomidou.mybatisplus.toolkit.StringUtils;
/**
* <p>
* 条件查询构造器
* </p>
*
* @author hubin Caratacus
* @date 2016-11-7
*/
@SuppressWarnings({"rawtypes", "serial"})
public class Condition extends Wrapper {
/**
* 构建一个Empty条件构造 避免传递参数使用null
*/
public static final Wrapper EMPTY = new Wrapper() {
@Override
public String getSqlSegment() {
return null;
}
};
/**
* 获取实例
*/
public static Condition create() {
return new Condition();
}
/**
* SQL 片段
*/
@Override
public String getSqlSegment() {
if (SqlHelper.isEmptyOfWrapper(this)) {
return null;
}
/*
* 无条件
*/
String sqlWhere = sql.toString();
if (StringUtils.isEmpty(sqlWhere)) {
return null;
}
/*
* 根据当前实体判断是否需要将WHERE替换成 AND 增加实体不为空但所有属性为空的情况
*/
return isWhere != null ? (isWhere ? sqlWhere : sqlWhere.replaceFirst("WHERE", AND_OR)) : sqlWhere.replaceFirst("WHERE", AND_OR);
}
/**
* 构造一个空的Wrapper<T></>
*
* @param <T>
* @return
*/
public static <T> Wrapper<T> empty() {
return (Wrapper<T>) EMPTY;
}
/**
* 构造一个Wrapper<T></>
*
* @param <T>
* @return
*/
public static <T> Wrapper<T> wrapper() {
return (Wrapper<T>) new EntityWrapper<>();
}
}
|
package models;
public class Piece {
private Color color;
Piece(Color color){
this.color = color;
}
public Color getColor() {
return this.color;
}
public boolean isBlack() {
return this.color == Color.BLACK;
}
public boolean isAdvanced(Coordinate origin, Coordinate target) {
int difference = origin.getRow() - target.getRow();
if (color == Color.WHITE){
return difference>0;
}
return difference<0;
}
}
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master;
import alluxio.AlluxioURI;
import alluxio.LocalAlluxioClusterResource;
import alluxio.PropertyKey;
import alluxio.BaseIntegrationTest;
import alluxio.client.file.FileSystem;
import alluxio.client.file.FileSystemMasterClient;
import alluxio.client.file.options.CreateFileOptions;
import alluxio.exception.AlluxioException;
import alluxio.exception.status.NotFoundException;
import com.google.common.base.Throwables;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import java.io.IOException;
/**
* Tests the internal implementation of alluxio Master via a {@link FileSystemMasterClient}.
*/
public final class FileSystemMasterClientIntegrationTest extends BaseIntegrationTest {
@Rule
public LocalAlluxioClusterResource mLocalAlluxioClusterResource =
new LocalAlluxioClusterResource.Builder()
.setProperty(PropertyKey.USER_RPC_RETRY_MAX_NUM_RETRY, 30).build();
@Test
public void openClose() throws AlluxioException, IOException {
FileSystemMasterClient fsMasterClient = FileSystemMasterClient.Factory
.create(mLocalAlluxioClusterResource.get().getLocalAlluxioMaster().getAddress());
AlluxioURI file = new AlluxioURI("/file");
Assert.assertFalse(fsMasterClient.isConnected());
fsMasterClient.connect();
Assert.assertTrue(fsMasterClient.isConnected());
fsMasterClient.createFile(file, CreateFileOptions.defaults());
Assert.assertNotNull(fsMasterClient.getStatus(file));
fsMasterClient.disconnect();
Assert.assertFalse(fsMasterClient.isConnected());
fsMasterClient.connect();
Assert.assertTrue(fsMasterClient.isConnected());
Assert.assertNotNull(fsMasterClient.getStatus(file));
fsMasterClient.close();
}
@Test(timeout = 3000, expected = NotFoundException.class)
public void getFileInfoReturnsOnError() throws Exception {
// This test was created to show that an infinite loop occurs.
// The timeout will protect against this, and the change was to throw a IOException
// in the cases we don't want to disconnect from master
FileSystemMasterClient fsMasterClient = FileSystemMasterClient.Factory
.create(mLocalAlluxioClusterResource.get().getLocalAlluxioMaster().getAddress());
fsMasterClient.getStatus(new AlluxioURI("/doesNotExist"));
fsMasterClient.close();
}
@Test(timeout = 300000)
public void masterUnavailable() throws Exception {
FileSystem fileSystem = mLocalAlluxioClusterResource.get().getClient();
mLocalAlluxioClusterResource.get().getLocalAlluxioMaster().stop();
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(3000);
mLocalAlluxioClusterResource.get().getLocalAlluxioMaster().start();
} catch (InterruptedException e) {
throw Throwables.propagate(e);
}
}
});
thread.start();
fileSystem.listStatus(new AlluxioURI("/"));
thread.join();
}
}
|
package id.ac.tazkia.akademik.aplikasiakademik.dao;
import id.ac.tazkia.akademik.aplikasiakademik.entity.Program;
import id.ac.tazkia.akademik.aplikasiakademik.entity.StatusRecord;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.repository.PagingAndSortingRepository;
import java.util.List;
public interface ProgramDao extends PagingAndSortingRepository<Program,String> {
Page<Program> findByStatusNotIn(StatusRecord status, Pageable page);
List<Program> findByStatusNotIn(StatusRecord status);
List<Program> findByStatus(StatusRecord status);
Page<Program> findByStatusNotInAndNamaProgramContainingIgnoreCaseOrderByNamaProgram (StatusRecord status,String nama, Pageable page);
}
|
/**
* Copyright 2016 Novartis Institutes for BioMedical Research Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.novartis.opensource.yada.security;
import com.novartis.opensource.yada.YADARequest;
/**
*
* @author David Varon
* @since 7.0.0
*/
public interface TokenValidator {
/**
* Approve or not the credentials or actions based on the credentials
* @throws YADASecurityException when unable to validate the token
*/
public void validateToken() throws YADASecurityException;
/**
* Standard accessor for variable
* @return the security token object
*/
public Object getToken();
/**
* Standard mutator for variable
* @param token tho security token
*/
public void setToken(Object token);
/**
* No arg mutator for variable, gets FQCN from args or properties
* @since 7.0.0
*/
public void setToken();
/**
* @param yadaReq the current {@link YADARequest}
* @throws YADASecurityException when there is an issue obtaining the token
* @since 8.7.6
*/
public void obtainToken(YADARequest yadaReq) throws YADASecurityException;
}
|
package org.cloudbus.foggatewaylib.core;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.util.HashMap;
import java.util.Map;
/**
* Collection of {@code Data} sorted by their {@code id}.
* <p>
* Inside the {@link ExecutionManager}, there may be different {@code Store}s of the same
* {@code Data} since each store is identified by an {@code outputKey},
* which identifies the data flow it stores.
*
* @param <T> the {@code Data} type this {@code Store} stores.
*
* @author Riccardo Mancini
*/
public abstract class Store<T extends Data> {
/**
* Registered observers. Every observer is identified by a {@code key} for easier access.
* They will be called on every call to {@link #store(Data[])}.
*
* @see #notifyObservers(Data[])
* @see #addObserver(String, Observer)
* @see #removeObserver(String)
* @see Observer
* @see Trigger
*/
private Map<String, Observer<T>> observers;
/**
* Type of the stored elements.
*/
private Class<T> dataType;
/**
* @param dataType the type of the elements that will be stored.
*/
public Store(@NonNull Class<T> dataType){
observers = new HashMap<>();
this.dataType = dataType;
}
/**
* @return type of the stored elements.
*/
@NonNull
public Class<T> getDataType() {
return dataType;
}
/**
* @return number of elements in this {@code Store}.
*/
public abstract int size();
/**
* Get last element.
*
* @return last stored element or null if the {@code Store} is empty.
* @see #retrieveLast(long)
* @see #retrieveLastN(int)
* @see #retrieveLastN(int, long)
*/
@Nullable
public abstract T retrieveLast();
/**
* Get last element with given request id.
*
* @param requestID the request id to look for.
* @return last stored element with {@code requestID} request id or null if no element
* is found.
* @see #retrieveLast()
* @see #retrieveLastN(int)
* @see #retrieveLastN(int, long)
*/
@Nullable
public abstract T retrieveLast(long requestID);
/**
* Get last N elements.
*
* @return array with the retrieved elements. The array may contain fewer elements than N if
* there are not enough elements in the {@code Store}. It can also be empty.
* @see #retrieveLast()
* @see #retrieveLast(long)
* @see #retrieveLastN(int, long)
*/
public abstract T[] retrieveLastN(int N);
/**
* Get last N elements with given request id.
*
* @param N the number of requests to
* @param requestID the request id to look for.
* @return array with the retrieved elements. The array may contain fewer elements than N if
* there are not enough elements in the {@code Store}. It can also be empty.
* @see #retrieveLast()
* @see #retrieveLast(long)
* @see #retrieveLastN(int)
*/
public abstract T[] retrieveLastN(int N, long requestID);
/**
* Retrieve all data in an interval of {@code id}s.
*
* @param from lower bound of the {@code id}s (included).
* @param to upper bound of the {@code id}s (excluded).
* @return array with the retrieved elements.
* @see #retrieveInterval(long, long, long)
* @see #retrieveIntervalFrom(long)
* @see #retrieveIntervalFrom(long, long)
*/
public abstract T[] retrieveInterval(long from, long to);
/**
* Retrieve all data with given request id in an interval of {@code id}s.
*
* @param from lower bound of the {@code id}s (included).
* @param to upper bound of the {@code id}s (excluded).
* @param requestID the request id to look for.
* @return array with the retrieved elements.
* @see #retrieveInterval(long, long, long)
* @see #retrieveIntervalFrom(long)
* @see #retrieveIntervalFrom(long, long)
*/
public abstract T[] retrieveInterval(long from, long to, long requestID);
/**
* Retrieve all data with {@code id} greater than {@code from}.
*
* @param from lower bound of the {@code id}s (included).
* @return array with the retrieved elements.
* @see #retrieveInterval(long, long)
* @see #retrieveInterval(long, long, long)
* @see #retrieveIntervalFrom(long, long)
*/
public T[] retrieveIntervalFrom(long from){
return retrieveInterval(from, Long.MAX_VALUE);
}
/**
* Retrieve all data with {@code id} greater than {@code from} and with given
* request id.
*
* @param from lower bound of the {@code id}s (included).
* @param requestID the request id to look for.
* @return array with the retrieved elements.
* @see #retrieveInterval(long, long)
* @see #retrieveInterval(long, long, long)
* @see #retrieveIntervalFrom(long)
*/
public T[] retrieveIntervalFrom(long from, long requestID){
return retrieveInterval(from, Long.MAX_VALUE, requestID);
}
/**
* Inner implementation of the {@link #store(Data[])} method.
*
* @param data the data to be stored.
* @see #store(Data[])
*/
protected abstract void __store(T... data);
/**
* Calls callbacks of {@link Observer}s in {@link #observers}.
*
* @param data the data that has been stored.
* @see Observer
* @see Trigger
* @see #store(Data[])
* @see #observers
* @see #addObserver(String, Observer)
* @see #removeObserver(String)
*/
protected void notifyObservers(T... data){
for (Observer<T> observer:observers.values()){
observer.onDataStored(this, data);
}
}
/**
* Stores given data. If no data is given. nothing will be done.
* Every element must have the same {@link Data#request_id}.
*
* @param data data to be stored.
* @see #__store(Data[])
* @see #notifyObservers(Data[])
*/
public void store(T... data){
if (data.length == 0)
return;
long requestID = data[0].getRequestID();
for (T t:data){
if (t.getRequestID() != requestID)
throw new RuntimeException("Every data must have the same request_id.");
}
__store(data);
notifyObservers(data);
}
/**
* Adds a {@link Observer} to the @{link #observers}.
*
* @param key the key for the new {@code observer} to use for later removal.
* @param observer the {@link Observer} to be added to the @{link Store}.
* @see Observer
* @see Trigger
* @see #observers
* @see #removeObserver(String)
* @see #notifyObservers(Data[])
*/
public void addObserver(String key, Observer<T> observer){
observers.put(key, observer);
}
/**
* Removes the {@link Observer} identified by the given {@code key} from the
* {@link #observers}.
*
* @param key the key for the new {@code observer} to use for later removal.
* @return the removed {@link Observer} or null if it is not found.
* @see Observer
* @see Trigger
* @see #observers
* @see #addObserver(String, Observer)
* @see #notifyObservers(Data[])
*/
@Nullable
public Observer<T> removeObserver(String key){
return observers.remove(key);
}
/**
* Simple interface for registering callbacks when a new element is added to a {@link Store}.
*
* @param <T> the type of {@link Data} in the {@link Store} that this {@link Observer} is
* observing.
*/
public interface Observer<T extends Data> {
/**
* Callback called after data is being added to the {@link Store}.
*
* @param store reference to the {@link Store} the {@code data} is being added to.
* @param data the data that has been added to the {@code store}.
* @see Store#addObserver(String, Observer)
* @see Store#removeObserver(String)
* @see Store#notifyObservers(Data[])
*/
void onDataStored(Store<T> store, T... data);
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.distributedlog.logsegment;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Sets;
import org.apache.distributedlog.DistributedLogConstants;
import org.apache.distributedlog.LogSegmentMetadata;
import org.apache.distributedlog.exceptions.UnexpectedException;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* Managing log segments in local cache.
*
* <p>
* Caching of log segment metadata assumes that the data contained in the ZNodes for individual
* log segments is never updated after creation i.e we never call setData. A log segment
* is finalized by creating a new ZNode and deleting the in progress node. This code will have
* to change if we change the behavior
* </p>
*/
public class PerStreamLogSegmentCache {
static final Logger LOG = LoggerFactory.getLogger(PerStreamLogSegmentCache.class);
protected final String streamName;
protected final boolean validateLogSegmentSequenceNumber;
protected final Map<String, LogSegmentMetadata> logSegments =
new HashMap<String, LogSegmentMetadata>();
protected final ConcurrentMap<Long, LogSegmentMetadata> lid2LogSegments =
new ConcurrentHashMap<Long, LogSegmentMetadata>();
@VisibleForTesting
PerStreamLogSegmentCache(String streamName) {
this(streamName, true);
}
public PerStreamLogSegmentCache(String streamName,
boolean validateLogSegmentSequenceNumber) {
this.streamName = streamName;
this.validateLogSegmentSequenceNumber = validateLogSegmentSequenceNumber;
}
/**
* Retrieve log segments from the cache.
*
* - first sort the log segments in ascending order
* - do validation and assign corresponding sequence id
* - apply comparator after validation
*
* @param comparator
* comparator to sort the returned log segments.
* @return list of sorted and filtered log segments.
* @throws UnexpectedException if unexpected condition detected (e.g. ledger sequence number gap)
*/
public List<LogSegmentMetadata> getLogSegments(Comparator<LogSegmentMetadata> comparator)
throws UnexpectedException {
List<LogSegmentMetadata> segmentsToReturn;
synchronized (logSegments) {
segmentsToReturn = new ArrayList<LogSegmentMetadata>(logSegments.size());
segmentsToReturn.addAll(logSegments.values());
}
Collections.sort(segmentsToReturn, LogSegmentMetadata.COMPARATOR);
LogSegmentMetadata prevSegment = null;
if (validateLogSegmentSequenceNumber) {
// validation ledger sequence number to ensure the log segments are unique.
for (int i = 0; i < segmentsToReturn.size(); i++) {
LogSegmentMetadata segment = segmentsToReturn.get(i);
if (null != prevSegment
&& prevSegment.getVersion() >= LogSegmentMetadata.LogSegmentMetadataVersion.VERSION_V2_LEDGER_SEQNO.value
&& segment.getVersion() >= LogSegmentMetadata.LogSegmentMetadataVersion.VERSION_V2_LEDGER_SEQNO.value
&& prevSegment.getLogSegmentSequenceNumber() + 1 != segment.getLogSegmentSequenceNumber()) {
LOG.error("{} found ledger sequence number gap between log segment {} and {}",
new Object[] { streamName, prevSegment, segment });
throw new UnexpectedException(streamName + " found ledger sequence number gap between log segment "
+ prevSegment.getLogSegmentSequenceNumber() + " and " + segment.getLogSegmentSequenceNumber());
}
prevSegment = segment;
}
}
prevSegment = null;
long startSequenceId = DistributedLogConstants.UNASSIGNED_SEQUENCE_ID;
for (int i = 0; i < segmentsToReturn.size(); i++) {
LogSegmentMetadata segment = segmentsToReturn.get(i);
// assign sequence id
if (!segment.isInProgress()) {
if (segment.supportsSequenceId()) {
startSequenceId = segment.getStartSequenceId() + segment.getRecordCount();
if (null != prevSegment && prevSegment.supportsSequenceId()
&& prevSegment.getStartSequenceId() > segment.getStartSequenceId()) {
LOG.warn("{} found decreasing start sequence id in log segment {}, previous is {}",
new Object[] { streamName, segment, prevSegment });
}
} else {
startSequenceId = DistributedLogConstants.UNASSIGNED_SEQUENCE_ID;
}
} else {
if (segment.supportsSequenceId()) {
LogSegmentMetadata newSegment = segment.mutator()
.setStartSequenceId(startSequenceId == DistributedLogConstants.UNASSIGNED_SEQUENCE_ID ? 0L : startSequenceId)
.build();
segmentsToReturn.set(i, newSegment);
}
break;
}
prevSegment = segment;
}
if (comparator != LogSegmentMetadata.COMPARATOR) {
Collections.sort(segmentsToReturn, comparator);
}
return segmentsToReturn;
}
/**
* Add the segment <i>metadata</i> for <i>name</i> in the cache.
*
* @param name
* segment name.
* @param metadata
* segment metadata.
*/
public void add(String name, LogSegmentMetadata metadata) {
synchronized (logSegments) {
if (!logSegments.containsKey(name)) {
logSegments.put(name, metadata);
LOG.info("{} added log segment ({} : {}) to cache.",
new Object[]{ streamName, name, metadata });
}
LogSegmentMetadata oldMetadata = lid2LogSegments.remove(metadata.getLogSegmentId());
if (null == oldMetadata) {
lid2LogSegments.put(metadata.getLogSegmentId(), metadata);
} else {
if (oldMetadata.isInProgress() && !metadata.isInProgress()) {
lid2LogSegments.put(metadata.getLogSegmentId(), metadata);
} else {
lid2LogSegments.put(oldMetadata.getLogSegmentId(), oldMetadata);
}
}
}
}
/**
* Retrieve log segment <code>name</code> from the cache.
*
* @param name
* name of the log segment.
* @return log segment metadata
*/
public LogSegmentMetadata get(String name) {
synchronized (logSegments) {
return logSegments.get(name);
}
}
/**
* Update the log segment cache with removed/added segments.
*
* @param segmentsRemoved
* segments that removed
* @param segmentsAdded
* segments that added
*/
public void update(Set<String> segmentsRemoved,
Map<String, LogSegmentMetadata> segmentsAdded) {
synchronized (logSegments) {
for (Map.Entry<String, LogSegmentMetadata> entry : segmentsAdded.entrySet()) {
add(entry.getKey(), entry.getValue());
}
for (String segment : segmentsRemoved) {
remove(segment);
}
}
}
/**
* Diff with new received segment list <code>segmentReceived</code>.
*
* @param segmentsReceived
* new received segment list
* @return segments added (left) and removed (right).
*/
public Pair<Set<String>, Set<String>> diff(Set<String> segmentsReceived) {
Set<String> segmentsAdded;
Set<String> segmentsRemoved;
synchronized (logSegments) {
Set<String> segmentsCached = logSegments.keySet();
segmentsAdded = Sets.difference(segmentsReceived, segmentsCached).immutableCopy();
segmentsRemoved = Sets.difference(segmentsCached, segmentsReceived).immutableCopy();
}
return Pair.of(segmentsAdded, segmentsRemoved);
}
/**
* Remove log segment <code>name</code> from the cache.
*
* @param name
* name of the log segment.
* @return log segment metadata.
*/
public LogSegmentMetadata remove(String name) {
synchronized (logSegments) {
LogSegmentMetadata metadata = logSegments.remove(name);
if (null != metadata) {
lid2LogSegments.remove(metadata.getLogSegmentId(), metadata);
LOG.debug("Removed log segment ({} : {}) from cache.", name, metadata);
}
return metadata;
}
}
}
|
package erg6_ask5;
import java.util.Scanner;
public class scannerUserInput {
static int getInteger(){
Scanner ob = new Scanner(System.in);
try{
int i = ob.nextInt();
return i;
}
catch(Exception e){
return -1;
}
}
static byte getByte(){
Scanner ob = new Scanner(System.in);
try{
byte b = ob.nextByte();
return b;
}
catch(Exception e){
return -1;
}
}
static short getShort(){
Scanner ob = new Scanner(System.in);
try{
short s = ob.nextShort();
return s;
}
catch(Exception e){
return -1;
}
}
static long getLongInteger(){
Scanner ob = new Scanner(System.in);
try{
long l = ob.nextLong();
return l;
}
catch(Exception e){
return -1;
}
}
static float getFloat(){
Scanner ob = new Scanner(System.in);
try{
float f = ob.nextFloat();
return f;
}
catch(Exception e){
return -1;
}
}
static double getDouble(){
Scanner ob = new Scanner(System.in);
try{
double d = ob.nextDouble();
return d;
}
catch(Exception e){
return -1;
}
}
static String getString(){
Scanner ob = new Scanner(System.in);
try{
String s = ob.nextLine();
return s;
}
catch(Exception e){
return "";
}
}
static char getChar(){
Scanner ob = new Scanner(System.in);
try{
char ch = ob.next().charAt(0);
return ch;
}
catch(Exception e){
return ' ';
}
}
}
|
package io.chestnut.server.commonAPI.property;
import io.chestnut.core.InternalMessage;
import io.chestnut.core.MsgAnnotate;
import io.chestnut.server.commonAPI.MessageDefine;
@MsgAnnotate(id =MessageDefine.MsgPlayerExpReq)
public class MsgPlayerExpReq extends InternalMessage{
}
|
package ee.fj.http.tinyweb.server.request;
import java.io.IOException;
import java.io.InputStream;
import java.util.logging.Logger;
// TODO: implement this
public class MultipartContent {
private static final Logger logger = Logger.getLogger(MultipartContent.class.getName());
private static final String CONTENT_DISPOSITION = "Content-Disposition: ";
private static final String NAME = "name=";
private static final String FILENAME = "filename=";
private static final String CONTENT_TYPE = "Content-Type: ";
private final String name;
private final String contentDisposition;
private final String contentType;
private final String fileName;
private final byte[] data;
MultipartContent(String name, String contentDisposition, String contentType, String fileName, byte[] data) {
this.name = name;
this.contentDisposition = contentDisposition;
this.contentType = contentType;
this.fileName = fileName;
this.data = data;
}
static MultipartContent getInstance(InputStream in, String boundary) throws IOException {
boundary = "--" + boundary;
ISO_8859_1ByteBuffer lineBuffer = new ISO_8859_1ByteBuffer();
int i = in.read();
while (i > -1) {
if (i != 13) {
if (i == 10) {
if (lineBuffer.equals(boundary)) {
break;
}
} else {
lineBuffer.write(i);
}
}
i = in.read();
}
lineBuffer.reset();
ISO_8859_1ByteBuffer buffer = new ISO_8859_1ByteBuffer();
return MultipartContent.parse(in, boundary, lineBuffer, buffer);
}
static MultipartContent parse(InputStream in, String boundary, ISO_8859_1ByteBuffer lineBuffer, ISO_8859_1ByteBuffer buffer) throws IOException {
int newLineCount = 0;
int i = in.read();
while (i > -1) {
if (i == 10) {
if (++newLineCount > 1) break;
lineBuffer.parseMeaningfulPrefix((prefix, result) -> {
if (prefix == null) {
System.out.println(lineBuffer.getAsString());
} else if (prefix.equals(CONTENT_DISPOSITION) || prefix.equals(CONTENT_TYPE)) {
// empty if block
}
System.out.println(prefix);
System.out.println(result);
}
, CONTENT_DISPOSITION, CONTENT_TYPE);
lineBuffer.reset();
} else if (i != 13) {
newLineCount = 0;
lineBuffer.write(i);
}
i = in.read();
}
System.out.println(newLineCount);
lineBuffer.reset();
i = in.read();
while (i > -1) {
if (i == 10) {
if (lineBuffer.equals(boundary)) {
System.out.println("DONE");
break;
}
lineBuffer.reset();
} else if (i != 13) {
lineBuffer.write(i);
}
buffer.write(i);
i = in.read();
}
while (buffer.endsWith('\n') || buffer.endsWith('\r')) {
buffer.reduce(1);
}
buffer.reduce(lineBuffer.toByteArray().length);
while (buffer.endsWith('\n') || buffer.endsWith('\r')) {
buffer.reduce(1);
}
System.out.println("[" + buffer.toString() + "]");
System.out.println("GOT HERE!");
return null;
}
}
|
package com.badoo.chateau.ui.chat.typing;
import android.support.annotation.NonNull;
import com.badoo.barf.mvp.BaseRxPresenter;
import com.badoo.barf.rx.ScheduleOn;
import com.badoo.chateau.core.model.User;
import com.badoo.chateau.core.usecases.istyping.SendUserIsTyping;
import com.badoo.chateau.core.usecases.istyping.SubscribeToUsersTyping;
import rx.android.schedulers.AndroidSchedulers;
public class BaseIsTypingPresenter<U extends User> extends BaseRxPresenter implements IsTypingPresenter<U> {
@NonNull
private final IsTypingView<U> mView;
@NonNull
private final String mConversationId;
@NonNull
private final SubscribeToUsersTyping<U> mSubscribeToUsersTyping;
@NonNull
private final SendUserIsTyping mSendUserIsTyping;
public BaseIsTypingPresenter(@NonNull IsTypingView<U> view,
@NonNull String conversationId,
@NonNull SubscribeToUsersTyping<U> subscribeToUsersTyping,
@NonNull SendUserIsTyping sendUserIsTyping) {
mView = view;
mConversationId = conversationId;
mSubscribeToUsersTyping = subscribeToUsersTyping;
mSendUserIsTyping = sendUserIsTyping;
}
@Override
public void onStart() {
manage(mSubscribeToUsersTyping.execute(mConversationId)
.observeOn(AndroidSchedulers.mainThread())
.subscribe(mView::showOtherUserTyping));
}
@Override
public void onUserTyping() {
manage(mSendUserIsTyping.execute(mConversationId)
.compose(ScheduleOn.io())
.subscribe());
}
}
|
package cord.roles;
import cord.common.BaseNodeLabels;
import cord.common.RoleNames;
import cord.model.*;
public class RegionalDirectorGlobal extends BaseRole {
public RegionalDirectorGlobal(){
super(RoleNames.RegionalDirectorGlobalRole, RegionalDirectorGlobal.permission);
}
public static IPermission permission = (BaseNodeLabels label, String property) -> {
switch(label){
case Budget: return RegionalDirectorGlobal.Budget( Budget.valueOf(property));
case BudgetRecord: return RegionalDirectorGlobal.BudgetRecord( BudgetRecord.valueOf(property));
case Ceremony: return RegionalDirectorGlobal.Ceremony( Ceremony.valueOf(property));
case Directory: return RegionalDirectorGlobal.Directory( Directory.valueOf(property));
case Education: return RegionalDirectorGlobal.Education( Education.valueOf(property));
case EthnologueLanguage: return RegionalDirectorGlobal.EthnologueLanguage( EthnologueLanguage.valueOf(property));
case FieldRegion: return RegionalDirectorGlobal.FieldRegion( FieldRegion.valueOf(property));
case FieldZone: return RegionalDirectorGlobal.FieldZone( FieldZone.valueOf(property));
case File: return RegionalDirectorGlobal.File( File.valueOf(property));
case FileVersion: return RegionalDirectorGlobal.FileVersion( FileVersion.valueOf(property));
case Film: return RegionalDirectorGlobal.Film( Film.valueOf(property));
case FundingAccount: return RegionalDirectorGlobal.FundingAccount( FundingAccount.valueOf(property));
case InternshipEngagement: return RegionalDirectorGlobal.InternshipEngagement( InternshipEngagement.valueOf(property));
case Language: return RegionalDirectorGlobal.Language( Language.valueOf(property));
case LanguageEngagement: return RegionalDirectorGlobal.LanguageEngagement( LanguageEngagement.valueOf(property));
case LiteracyMaterial: return RegionalDirectorGlobal.LiteracyMaterial( LiteracyMaterial.valueOf(property));
case Location: return RegionalDirectorGlobal.Location( Location.valueOf(property));
case Organization: return RegionalDirectorGlobal.Organization( Organization.valueOf(property));
case Partner: return RegionalDirectorGlobal.Partner( Partner.valueOf(property));
case Partnership: return RegionalDirectorGlobal.Partnership( Partnership.valueOf(property));
case Project: return RegionalDirectorGlobal.Project( Project.valueOf(property));
case ProjectMember: return RegionalDirectorGlobal.ProjectMember( ProjectMember.valueOf(property));
case Product: return RegionalDirectorGlobal.Product( Product.valueOf(property));
case Song: return RegionalDirectorGlobal.Song( Song.valueOf(property));
case Story: return RegionalDirectorGlobal.Story( Story.valueOf(property));
case Unavailability: return RegionalDirectorGlobal.Unavailability( Unavailability.valueOf(property));
case User: return RegionalDirectorGlobal.User( User.valueOf(property));
default: return Perm.NO;
}
};
private static Perm Budget(Budget property){
switch(property){
case universalTemplateFile: return Perm.RO;
case records: return Perm.RO;
case status: return Perm.RO;
}
return Perm.NO;
}
private static Perm BudgetRecord(BudgetRecord property){
switch(property){
case amount: return Perm.RO;
case fiscalYear: return Perm.RO;
case organization: return Perm.RO;
}
return Perm.NO;
}
private static Perm Ceremony(Ceremony property){
switch(property){
case actualDate: return Perm.RO;
case estimatedDate: return Perm.RO;
case planned: return Perm.RO;
case type: return Perm.RO;
}
return Perm.NO;
}
private static Perm Directory(Directory property){
switch(property){
case name: return Perm.RO;
case createdBy: return Perm.RO;
case parent: return Perm.RO;
}
return Perm.NO;
}
private static Perm Education(Education property){
switch(property){
case degree: return Perm.RO;
case institution: return Perm.RO;
case major: return Perm.RO;
}
return Perm.NO;
}
private static Perm EthnologueLanguage(EthnologueLanguage property){
switch(property){
case code: return Perm.RO;
case name: return Perm.RO;
case population: return Perm.RO;
case provisionalCode: return Perm.RO;
}
return Perm.NO;
}
private static Perm FieldRegion(FieldRegion property){
switch(property){
case director: return Perm.RO;
case name: return Perm.RO;
case fieldZone: return Perm.RO;
}
return Perm.NO;
}
private static Perm FieldZone(FieldZone property){
switch(property){
case director: return Perm.RO;
case name: return Perm.RO;
}
return Perm.NO;
}
private static Perm File(File property){
switch(property){
case name: return Perm.RW;
case createdBy: return Perm.RW;
case parent: return Perm.RW;
case mimeType: return Perm.RW;
}
return Perm.NO;
}
private static Perm FileVersion(FileVersion property){
switch(property){
case name: return Perm.RW;
case createdBy: return Perm.RW;
case parent: return Perm.RW;
case mimeType: return Perm.RW;
case size: return Perm.RW;
}
return Perm.NO;
}
private static Perm Film(Film property){
switch(property){
case name: return Perm.RO;
case scriptureReferences: return Perm.RO;
}
return Perm.NO;
}
private static Perm FundingAccount(FundingAccount property){
switch(property){
case name: return Perm.RO;
case accountNumber: return Perm.RO;
}
return Perm.NO;
}
private static Perm InternshipEngagement(InternshipEngagement property){
switch(property){
case ceremony: return Perm.RO;
case communicationsCompleteDate: return Perm.RO;
case completeDate: return Perm.RO;
case countryOfOrigin: return Perm.RO;
case disbursementCompleteDate: return Perm.RO;
case endDate: return Perm.RO;
case endDateOverride: return Perm.RO;
case growthPlan: return Perm.RO;
case initialEndDate: return Perm.RO;
case intern: return Perm.RO;
case lastReactivatedAt: return Perm.RO;
case lastSuspendedAt: return Perm.RO;
case mentor: return Perm.RO;
case methodologies: return Perm.RO;
case position: return Perm.RO;
case startDate: return Perm.RO;
case startDateOverride: return Perm.RO;
case statusModifiedAt: return Perm.RO;
case modifiedAt: return Perm.RO;
case status: return Perm.RO;
}
return Perm.NO;
}
private static Perm Language(Language property){
switch(property){
case displayName: return Perm.RO;
case displayNamePronunciation: return Perm.RO;
case isDialect: return Perm.RO;
case isSignLanguage: return Perm.RO;
case leastOfThese: return Perm.RO;
case name: return Perm.RO;
case leastOfTheseReason: return Perm.RO;
case populationOverride: return Perm.RO;
case registryOfDialectsCode: return Perm.RO;
case signLanguageCode: return Perm.RO;
case sponsorEstimatedEndDate: return Perm.RO;
case ethnologue: return Perm.RO;
case sensitivity: return Perm.RO;
case hasExternalFirstScripture: return Perm.RO;
case locations: return Perm.RO;
case tags: return Perm.RO;
}
return Perm.NO;
}
private static Perm LanguageEngagement(LanguageEngagement property){
switch(property){
case ceremony: return Perm.RO;
case communicationsCompleteDate: return Perm.RO;
case completeDate: return Perm.RO;
case disbursementCompleteDate: return Perm.RO;
case endDate: return Perm.RO;
case endDateOverride: return Perm.RO;
case firstScripture: return Perm.RO;
case initialEndDate: return Perm.RO;
case language: return Perm.RO;
case lastReactivatedAt: return Perm.RO;
case lastSuspendedAt: return Perm.RO;
case lukePartnership: return Perm.RO;
case paraTextRegistryId: return Perm.RO;
case pnp: return Perm.RO;
case sentPrintingDate: return Perm.RO;
case startDate: return Perm.RO;
case startDateOverride: return Perm.RO;
case statusModifiedAt: return Perm.RO;
case modifiedAt: return Perm.RO;
case product: return Perm.RO;
case status: return Perm.RO;
}
return Perm.NO;
}
private static Perm LiteracyMaterial(LiteracyMaterial property){
switch(property){
case name: return Perm.RO;
case scriptureReferences: return Perm.RO;
}
return Perm.NO;
}
private static Perm Location(Location property){
switch(property){
case name: return Perm.RO;
case type: return Perm.RO;
case sensitivity: return Perm.RO;
case isoAlpha3: return Perm.RO;
case fundingAccount: return Perm.RO;
}
return Perm.NO;
}
private static Perm Organization(Organization property){
switch(property){
case name: return Perm.RO;
case address: return Perm.RO;
case locations: return Perm.RO;
}
return Perm.NO;
}
private static Perm Partner(Partner property){
switch(property){
case organization: return Perm.RO;
case pointOfContact: return Perm.RO;
case types: return Perm.RO;
case financialReportingTypes: return Perm.RO;
case pmcEntityCode: return Perm.RO;
case globalInnovationsClient: return Perm.RO;
case active: return Perm.RO;
case address: return Perm.RO;
case modifiedAt: return Perm.RO;
}
return Perm.NO;
}
private static Perm Partnership(Partnership property){
switch(property){
case agreement: return Perm.RO;
case agreementStatus: return Perm.RO;
case financialReportingType: return Perm.RO;
case mou: return Perm.RO;
case mouEnd: return Perm.RO;
case mouEndOverride: return Perm.RO;
case mouStart: return Perm.RO;
case mouStartOverride: return Perm.RO;
case mouStatus: return Perm.RO;
case types: return Perm.RO;
case organization: return Perm.RO;
case partner: return Perm.RO;
}
return Perm.NO;
}
private static Perm Product(Product property){
switch(property){
case mediums: return Perm.RO;
case methodology: return Perm.RO;
case purposes: return Perm.RO;
case scriptureReferences: return Perm.RO;
case produces: return Perm.RO;
case scriptureReferencesOverride:return Perm.RO;
case isOverriding: return Perm.RO;
}
return Perm.NO;
}
private static Perm Project(Project property){
switch(property){
case estimatedSubmission: return Perm.RO;
case step: return Perm.RO;
case name: return Perm.RO;
case status: return Perm.RO;
case departmentId: return Perm.RO;
case mouStart: return Perm.RO;
case mouEnd: return Perm.RO;
case rootDirectory: return Perm.RO;
case member: return Perm.RO;
case otherLocations: return Perm.RO;
case primaryLocation: return Perm.RO;
case marketingLocation: return Perm.RO;
case partnership: return Perm.RO;
case budget: return Perm.RO;
case modifiedAt: return Perm.RO;
case fieldRegion: return Perm.RO;
case engagement: return Perm.RO;
case sensitivity: return Perm.RO;
case stepChangedAt: return Perm.RO;
case owningOrganization: return Perm.RO;
case initialMouEnd: return Perm.RO;
case tags: return Perm.RO;
}
return Perm.NO;
}
private static Perm ProjectMember(ProjectMember property){
switch(property){
case roles: return Perm.RO;
case user: return Perm.RO;
case modifiedAt: return Perm.RO;
}
return Perm.NO;
}
private static Perm Song(Song property){
switch(property){
case name: return Perm.RO;
case scriptureReferences: return Perm.RO;
}
return Perm.NO;
}
private static Perm Story(Story property){
switch(property){
case name: return Perm.RO;
case scriptureReferences: return Perm.RO;
}
return Perm.NO;
}
private static Perm Unavailability(Unavailability property){
switch(property){
case description: return Perm.RO;
case end: return Perm.RO;
case start: return Perm.RO;
}
return Perm.NO;
}
private static Perm User(User property){
switch(property){
case about: return Perm.RO;
case displayFirstName: return Perm.RO;
case displayLastName: return Perm.RO;
case email: return Perm.RO;
case phone: return Perm.RO;
case realFirstName: return Perm.RO;
case realLastName: return Perm.RO;
case roles: return Perm.RO;
case status: return Perm.RO;
case timezone: return Perm.RO;
case title: return Perm.RO;
case education: return Perm.RO;
case organization: return Perm.RO;
case unavailability: return Perm.RO;
case locations: return Perm.RO;
case partners: return Perm.RO;
}
return Perm.NO;
}
}
|
package pl.kozmatteo.finance.app.report;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Primary;
import org.springframework.stereotype.Component;
import pl.kozmatteo.finance.app.report.jpa.JpaTransactionRepository;
import pl.kozmatteo.finance.support.Specification;
import pl.kozmatteo.finance.transactions.Transaction;
import pl.kozmatteo.finance.transactions.TransactionRepository;
import java.util.List;
import static java.util.stream.Collectors.toList;
@Primary
@Component
public class TransactionRepositoryJpaImpl implements TransactionRepository {
private final JpaTransactionRepository transactionJpaRepository;
@Autowired
public TransactionRepositoryJpaImpl(JpaTransactionRepository transactionJpaRepository) {
this.transactionJpaRepository = transactionJpaRepository;
}
@Override
public List<Transaction> findAll() {
return transactionJpaRepository.findAll();
}
@Override
public List<Transaction> findAll(final Specification<Transaction> transactionSpecification) {
return findAll()
.stream()
.filter(transactionSpecification::isSatisfiedBy)
.collect(toList());
}
}
|
package com.androfast.server.appgrabaraudio20;
import android.Manifest;
import android.app.Activity;
import android.content.pm.PackageManager;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.os.Environment;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.view.View;
import android.widget.Toast;
import java.io.IOException;
public class MainActivity extends Activity {
private MediaRecorder miGrabacion;
private String outputFile = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat .requestPermissions(MainActivity.this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.RECORD_AUDIO}, 1000);
}
}
public void grabar(View view) {
outputFile = Environment.getExternalStorageDirectory().
getAbsolutePath() + "/Grabacion.3gp";
miGrabacion = new MediaRecorder();
miGrabacion.setAudioSource(MediaRecorder.AudioSource.MIC);
miGrabacion.setOutputFormat(MediaRecorder.OutputFormat.
THREE_GPP);
miGrabacion.setAudioEncoder(MediaRecorder.OutputFormat.AMR_NB);
miGrabacion.setOutputFile(outputFile);
try {
miGrabacion.prepare();
miGrabacion.start();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Toast.makeText(getApplicationContext(), "La grabación comenzó", Toast.LENGTH_LONG).show();
}
public void detener(View view) {
if (miGrabacion != null) {
miGrabacion.stop();
miGrabacion.release();
miGrabacion = null;
Toast.makeText(getApplicationContext(), "El audio grabado con éxito", Toast.LENGTH_LONG).show();
}
}
public void reproducir(View view) {
MediaPlayer m = new MediaPlayer();
try {
m.setDataSource(outputFile);
} catch (IOException e) {
e.printStackTrace();
}
try {
m.prepare();
} catch (IOException e) {
e.printStackTrace();
}
m.start();
Toast.makeText(getApplicationContext(), "reproducción de audio", Toast.LENGTH_LONG).show();
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.runtime.compress.colgroup;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.Serializable;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.sysds.runtime.DMLRuntimeException;
import org.apache.sysds.runtime.data.SparseRow;
import org.apache.sysds.runtime.functionobjects.Builtin;
import org.apache.sysds.runtime.matrix.data.IJV;
import org.apache.sysds.runtime.matrix.data.MatrixBlock;
import org.apache.sysds.runtime.matrix.operators.AggregateUnaryOperator;
import org.apache.sysds.runtime.matrix.operators.BinaryOperator;
import org.apache.sysds.runtime.matrix.operators.ScalarOperator;
import edu.emory.mathcs.backport.java.util.Arrays;
/**
* Class that stores information about a column group within a compressed matrix block. There are subclasses specific to
* each compression type.
*/
public abstract class ColGroup implements Serializable {
protected static final Log LOG = LogFactory.getLog(ColGroup.class.getName());
private static final long serialVersionUID = 2439785418908671481L;
/**
* Public Group types supported
*
* Note For instance DDC is called DDC not DDC1, or DDC2 which is a specific subtype of the DDC. That
* differentiation is hidden to a user.
*
* Includes Uncompressed for sparse/dense representation RLE for Run length encoding OLE for Offset Length encoding
* DDC for Dense dictionary encoding
*/
public enum CompressionType {
UNCOMPRESSED, RLE, OLE, DDC, CONST
}
/**
* Concrete ColGroupType
*
* Protected such that outside the ColGroup package it should be unknown which specific subtype is used.
*/
protected enum ColGroupType {
UNCOMPRESSED, RLE, OLE, DDC1, DDC2, CONST
}
/** The ColGroup Indexes 0 offset, contained in the ColGroup */
protected int[] _colIndexes;
/** Number of rows in the matrix, for use by child classes. */
protected int _numRows;
/**
* ColGroup Implementation Contains zero row. Note this is not if it contains a zero value. If false then the stored
* values are filling the ColGroup making it a dense representation, that can be leveraged in operations.
*/
protected boolean _zeros;
/** boolean specifying if the column group is encoded lossy */
protected boolean _lossy;
/** Empty constructor, used for serializing into an empty new object of ColGroup. */
protected ColGroup() {
this._colIndexes = null;
this._numRows = -1;
}
/**
* Main constructor.
*
* @param colIndices offsets of the columns in the matrix block that make up the group
* @param numRows total number of rows in the block
*/
protected ColGroup(int[] colIndices, int numRows) {
if(colIndices == null) {
throw new DMLRuntimeException("null input to ColGroup is invalid");
}
if(colIndices.length == 0) {
throw new DMLRuntimeException("0 is an invalid number of columns in a ColGroup");
}
if(numRows < 1) {
throw new DMLRuntimeException(numRows + " is an invalid number of rows in a ColGroup");
}
_colIndexes = colIndices;
_numRows = numRows;
}
/**
* Obtain the offsets of the columns in the matrix block that make up the group
*
* @return offsets of the columns in the matrix block that make up the group
*/
public int[] getColIndices() {
return _colIndexes;
}
/**
* Obtain a column index value.
*
* @param colNum column number
* @return column index value
*/
public int getColIndex(int colNum) {
return _colIndexes[colNum];
}
/**
* Set the column indexes of the column group.
*
* @param colIndexes
*/
protected void setColIndices(int[] colIndexes) {
_colIndexes = colIndexes;
}
/**
* Get number of rows contained in the ColGroup.
*
* @return An integer that is the number of rows.
*/
public int getNumRows() {
return _numRows;
}
/**
* Obtain the number of columns in this column group.
*
* @return number of columns in this column group
*/
public int getNumCols() {
return _colIndexes.length;
}
/**
* Obtain the compression type.
*
* @return How the elements of the column group are compressed.
*/
public abstract CompressionType getCompType();
/**
* Internally get the specific type of ColGroup, this could be extracted from the object but that does not allow for
* nice switches in the code.
*
* @return ColGroupType of the object.
*/
protected abstract ColGroupType getColGroupType();
public void shiftColIndices(int offset) {
for(int i = 0; i < _colIndexes.length; i++)
_colIndexes[i] += offset;
}
/**
* Note: Must be overridden by child classes to account for additional data and metadata
*
* @return an upper bound on the number of bytes used to store this ColGroup in memory.
*/
public abstract long estimateInMemorySize();
/**
* Decompress the contents of this column group into the specified full matrix block.
*
* @param target a matrix block where the columns covered by this column group have not yet been filled in.
* @param rl row lower
* @param ru row upper
*/
public abstract void decompressToBlock(MatrixBlock target, int rl, int ru);
/**
* Decompress the contents of this column group into the specified full matrix block.
*
* @param target a matrix block where the columns covered by this column group have not yet been filled in.
* @param rl row lower
* @param ru row upper
* @param offT The offset into the target matrix block to decompress to.
*/
public abstract void decompressToBlock(MatrixBlock target, int rl, int ru, int offT);
/**
* Decompress the contents of this column group into the specified full matrix block.
*
* @param target a matrix block where the columns covered by this column group have not yet been filled in.
* @param rl row lower
* @param ru row upper
* @param offT The offset into the target matrix block to decompress to.
* @param values The Values materialized in the dictionary
*/
public abstract void decompressToBlock(MatrixBlock target, int rl, int ru, int offT, double[] values);
/**
* Decompress the contents of this column group into uncompressed packed columns
*
* @param target a dense matrix block. The block must have enough space to hold the contents of this column
* group.
* @param colIndexTargets array that maps column indices in the original matrix block to columns of target.
*/
public abstract void decompressToBlock(MatrixBlock target, int[] colIndexTargets);
public static void decompressToBlock(MatrixBlock target, int colIndex, List<ColGroup> colGroups) {
for(ColGroup g : colGroups) {
int groupColIndex = Arrays.binarySearch(g._colIndexes, colIndex);
if(groupColIndex >= 0) {
g.decompressToBlock(target, groupColIndex);
}
}
}
/**
* Decompress to block.
*
* @param target dense output vector
* @param colpos column to decompress, error if larger or equal numCols
*/
public abstract void decompressToBlock(MatrixBlock target, int colpos);
/**
* Serializes column group to data output.
*
* @param out data output
* @throws IOException if IOException occurs
*/
public abstract void write(DataOutput out) throws IOException;
/**
* Deserialize column group from data input.
*
* @param in data input
* @throws IOException if IOException occurs
*/
public abstract void readFields(DataInput in) throws IOException;
/**
* Returns the exact serialized size of column group. This can be used for example for buffer preallocation.
*
* @return exact serialized size for column group
*/
public abstract long getExactSizeOnDisk();
/**
* Get the value at a global row/column position.
*
* @param r row
* @param c column
* @return value at the row/column position
*/
public abstract double get(int r, int c);
/**
* Get all the values in the colGroup. Note that this is only the stored values not the way they are stored. Making
* the output a list of values used in that colGroup not the actual full column.
*
* @return a double list of values.
*/
public abstract double[] getValues();
/**
* Returns the ColGroup as a MatrixBlock. Used as a fall back solution in case a operation is not supported. Use in
* connection to getIfCountsType to get if the values are repeated.
*
* @return Matrix Block of the contained Values. Possibly contained in groups.
*/
public abstract MatrixBlock getValuesAsBlock();
/**
* Returns true if in the getValuesAsBlock method returns values in groups (that needs to be counted) or
* individually potentially repeated values
*
* @return boolean
*/
public abstract boolean getIfCountsType();
/**
* Multiply the slice of the matrix that this column group represents by a vector on the right.
*
* @param vector Vector to multiply by (tall vector)
* @param c Accumulator for holding the result
* @param rl Row to start at
* @param ru Row to stop at
* @param dictVals The dictionary values materialized
*/
public abstract void rightMultByVector(double[] vector, double[] c, int rl, int ru, double[] dictVals);
/**
* Right multiply by matrix. for which the compressed matrix is on the left and the uncompressed is on the right.
* Note that there is no b argument, but the b is aggregated into the values needed for assignment and addition into
* output.
*
* @param preAggregatedB The preAggregated values that is to be put into c
* @param c The output matrix
* @param thatNrColumns The number of columns in B (before aggregation)
* @param rl The row index to start the multiplication from
* @param ru The row index to stop the multiplication at
* @param cl The column index to start from
* @param cu The row index to stop at.
*/
public abstract void rightMultByMatrix(double[] preAggregatedB, double[] c, int thatNrColumns, int rl, int ru,
int cl, int cu);
/**
* Sparse right multiply by matrix, for which the compressed matrix is on the left and the uncompressed sparse is on
* the right. This call differ from the other right multiply by not having a preAggregation phase.
*
* This should only be called in very sparse situations.
*
* @param rows The sparse rows
* @param c The output matrix linearized
* @param numVals The number of values in the dictionary
* @param dictVals The materialized dictionary
* @param nrColumns The number of columns in the matrix to multiply with and also in the output
* @param rl The row index to start at
* @param ru The row index to stop at.
*/
public abstract void rightMultBySparseMatrix(SparseRow[] rows, double[] c, int numVals, double[] dictVals,
int nrColumns, int rl, int ru);
/**
* Multiply the slice of the matrix that this column group represents by a row vector on the left (the original
* column vector is assumed to be transposed already i.e. its size now is 1xn).
*
* @param vector row vector
* @param result matrix block result
* @param numVals The Number of values contained in the Column.
*/
public abstract void leftMultByRowVector(double[] vector, double[] result, int numVals);
/**
* Multiply the slice of the matrix that this column group represents by a row vector on the left (the original
* column vector is assumed to be transposed already i.e. its size now is 1xn).
*
* @param vector row vector
* @param result matrix block result
* @param numVals The Number of values contained in the Column.
* @param values The materialized list of values contained in the dictionary.
*/
public abstract void leftMultByRowVector(double[] vector, double[] result, int numVals, double[] values);
/**
* Multiply with a matrix on the left.
*
* @param matrix matrix to left multiply
* @param result matrix block result
* @param values The materialized list of values contained in the dictionary.
* @param numRows The number of rows in the matrix input
* @param numCols The number of columns in the colGroups parent matrix.
* @param rl The row to start the matrix multiplication from
* @param ru The row to stop the matrix multiplication at.
* @param vOff The offset into the first argument matrix to start at.
*/
public abstract void leftMultByMatrix(double[] matrix, double[] result, double[] values, int numRows, int numCols,
int rl, int ru, int vOff);
/**
* Multiply with a sparse matrix on the left hand side, and add the values to the output result
*
* @param spNrVals the Number of sparse values (since the number of indexes does not align with number of
* values)
* @param indexes the indexes for the sparse values in the given row.
* @param sparseV the sparse values.
* @param result the linearized output matrix
* @param numVals the number of values in the dictionary
* @param values the dictionary values materialized
* @param numRows the number of rows in the left hand side input matrix (the sparse one)
* @param numCols the number of columns in the compression.
* @param row the row index of the sparse row to multiply with.
* @param MaterializedRow The sparse row materialized (should only be done if needed for the specific type of
* ColumnGroup)
*/
public abstract void leftMultBySparseMatrix(int spNrVals, int[] indexes, double[] sparseV, double[] result,
int numVals, double[] values, int numRows, int numCols, int row, double[] MaterializedRow);
/**
* Perform the specified scalar operation directly on the compressed column group, without decompressing individual
* cells if possible.
*
* @param op operation to perform
* @return version of this column group with the operation applied
*/
public abstract ColGroup scalarOperation(ScalarOperator op);
/**
* Perform a binary row operation.
*
* @param op The operation to execute
* @param v The vector of values to apply, should be same length as dictionary length.
* @param sparseSafe True if the operation return 0 on all instances of values in v -- op(v[?], 0)
* @return A updated column group with the new values.
*/
public abstract ColGroup binaryRowOp(BinaryOperator op, double[] v, boolean sparseSafe);
/**
* Unary Aggregate operator, since aggregate operators require new object output, the output becomes an uncompressed
* matrix.
*
* @param op The operator used
* @param c Rhe output matrix block.
*/
public abstract void unaryAggregateOperations(AggregateUnaryOperator op, double[] c);
/**
* Compute the max / min value contained in the dictionary.
*
* @param c Initial value
* @param builtin The build in to use
* @return The result value
*/
public abstract double computeMxx(double c, Builtin builtin);
/**
* Unary Aggregate operator, since aggregate operators require new object output, the output becomes an uncompressed
* matrix.
*
* @param op The operator used
* @param c The output matrix block.
* @param rl The Starting Row to do aggregation from
* @param ru The last Row to do aggregation to (not included)
*/
public abstract void unaryAggregateOperations(AggregateUnaryOperator op, double[] c, int rl, int ru);
/**
* Create a column group iterator for a row index range.
*
* @param rl row lower index, inclusive
* @param ru row upper index, exclusive
* @param inclZeros include zero values into scope of iterator
* @param rowMajor use a row major iteration order
* @return an iterator instance
*/
public abstract Iterator<IJV> getIterator(int rl, int ru, boolean inclZeros, boolean rowMajor);
/**
* Create a dense row iterator for a row index range. This iterator implies the inclusion of zeros and row-major
* iteration order.
*
* @param rl row lower index, inclusive
* @param ru row upper index, exclusive
* @return an iterator instance
*/
public abstract ColGroupRowIterator getRowIterator(int rl, int ru);
/**
* Count the number of non-zeros per row
*
* @param rnnz non-zeros per row
* @param rl row lower bound, inclusive
* @param ru row upper bound, exclusive
*/
public abstract void countNonZerosPerRow(int[] rnnz, int rl, int ru);
/**
* Base class for column group row iterators. We do not implement the default Iterator interface in order to avoid
* unnecessary value copies per group.
*/
protected abstract class ColGroupRowIterator {
public abstract void next(double[] buff, int rowIx, int segIx, boolean last);
}
/**
* Is Lossy
*
* @return returns if the ColGroup is compressed in a lossy manner.
*/
public abstract boolean isLossy();
}
|
package org.infinispan.server.configuration.security;
import java.util.function.Supplier;
import org.infinispan.commons.configuration.Builder;
import org.infinispan.commons.configuration.attributes.AttributeSet;
import org.infinispan.commons.util.InstanceSupplier;
/**
* @since 10.0
*/
public class OAuth2ConfigurationBuilder implements Builder<OAuth2Configuration> {
private final AttributeSet attributes;
OAuth2ConfigurationBuilder() {
this.attributes = OAuth2Configuration.attributeDefinitionSet();
}
boolean isModified() {
return this.attributes.isModified();
}
public OAuth2ConfigurationBuilder clientId(String clientId) {
attributes.attribute(OAuth2Configuration.CLIENT_ID).set(clientId);
return this;
}
public OAuth2ConfigurationBuilder clientSecret(char[] clientSecret) {
attributes.attribute(OAuth2Configuration.CLIENT_SECRET).set(new InstanceSupplier<>(clientSecret));
return this;
}
public OAuth2ConfigurationBuilder clientSecret(Supplier<char[]> clientSecret) {
attributes.attribute(OAuth2Configuration.CLIENT_SECRET).set(clientSecret);
return this;
}
public OAuth2ConfigurationBuilder introspectionUrl(String introspectionUrl) {
attributes.attribute(OAuth2Configuration.INTROSPECTION_URL).set(introspectionUrl);
return this;
}
public OAuth2ConfigurationBuilder clientSSLContext(String value) {
attributes.attribute(OAuth2Configuration.CLIENT_SSL_CONTEXT).set(value);
return this;
}
public OAuth2ConfigurationBuilder hostVerificationPolicy(String value) {
attributes.attribute(OAuth2Configuration.HOST_VERIFICATION_POLICY).set(value);
return this;
}
public OAuth2ConfigurationBuilder connectionTimeout(int timeout) {
attributes.attribute(OAuth2Configuration.CONNECTION_TIMEOUT).set(timeout);
return this;
}
public OAuth2ConfigurationBuilder readTimeout(int timeout) {
attributes.attribute(OAuth2Configuration.READ_TIMEOUT).set(timeout);
return this;
}
@Override
public OAuth2Configuration create() {
return new OAuth2Configuration(attributes.protect());
}
@Override
public OAuth2ConfigurationBuilder read(OAuth2Configuration template) {
attributes.read(template.attributes());
return this;
}
}
|
/**
* 五五海淘返利APP新版接口
* 更新日志<br> 相对于上一build的变更: <br/> APP5.4接口
*
* OpenAPI spec version: 1.3 build20170808-6
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.swagger.client.model;
import io.swagger.client.model.ShoppedStoresListIfModelDataRows;
import java.util.*;
import io.swagger.annotations.*;
import com.google.gson.annotations.SerializedName;
@ApiModel(description = "")
public class ShoppedStoresListIfModelData {
@SerializedName("page_num")
private String pageNum = null;
@SerializedName("page_size")
private String pageSize = null;
@SerializedName("page_length")
private String pageLength = null;
@SerializedName("has_more")
private String hasMore = null;
@SerializedName("rows")
private List<ShoppedStoresListIfModelDataRows> rows = null;
/**
* 页码
**/
@ApiModelProperty(value = "页码")
public String getPageNum() {
return pageNum;
}
public void setPageNum(String pageNum) {
this.pageNum = pageNum;
}
/**
* 每页显示的行数
**/
@ApiModelProperty(value = "每页显示的行数")
public String getPageSize() {
return pageSize;
}
public void setPageSize(String pageSize) {
this.pageSize = pageSize;
}
/**
* 每页实际有多少行数据
**/
@ApiModelProperty(value = "每页实际有多少行数据")
public String getPageLength() {
return pageLength;
}
public void setPageLength(String pageLength) {
this.pageLength = pageLength;
}
/**
* 是否有更多数据
**/
@ApiModelProperty(value = "是否有更多数据")
public String getHasMore() {
return hasMore;
}
public void setHasMore(String hasMore) {
this.hasMore = hasMore;
}
/**
**/
@ApiModelProperty(value = "")
public List<ShoppedStoresListIfModelDataRows> getRows() {
return rows;
}
public void setRows(List<ShoppedStoresListIfModelDataRows> rows) {
this.rows = rows;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ShoppedStoresListIfModelData shoppedStoresListIfModelData = (ShoppedStoresListIfModelData) o;
return (pageNum == null ? shoppedStoresListIfModelData.pageNum == null : pageNum.equals(shoppedStoresListIfModelData.pageNum)) &&
(pageSize == null ? shoppedStoresListIfModelData.pageSize == null : pageSize.equals(shoppedStoresListIfModelData.pageSize)) &&
(pageLength == null ? shoppedStoresListIfModelData.pageLength == null : pageLength.equals(shoppedStoresListIfModelData.pageLength)) &&
(hasMore == null ? shoppedStoresListIfModelData.hasMore == null : hasMore.equals(shoppedStoresListIfModelData.hasMore)) &&
(rows == null ? shoppedStoresListIfModelData.rows == null : rows.equals(shoppedStoresListIfModelData.rows));
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + (pageNum == null ? 0: pageNum.hashCode());
result = 31 * result + (pageSize == null ? 0: pageSize.hashCode());
result = 31 * result + (pageLength == null ? 0: pageLength.hashCode());
result = 31 * result + (hasMore == null ? 0: hasMore.hashCode());
result = 31 * result + (rows == null ? 0: rows.hashCode());
return result;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ShoppedStoresListIfModelData {\n");
sb.append(" pageNum: ").append(pageNum).append("\n");
sb.append(" pageSize: ").append(pageSize).append("\n");
sb.append(" pageLength: ").append(pageLength).append("\n");
sb.append(" hasMore: ").append(hasMore).append("\n");
sb.append(" rows: ").append(rows).append("\n");
sb.append("}\n");
return sb.toString();
}
}
|
package scrame.helper;
import java.util.Comparator;
import scrame.entity.Record;
/**
* A helper class that compares 2 courses.
*/
public class SortRecordByCourseName implements Comparator<Record> {
/**
* A function that compare 2 course objects.
*
* @return the relation of the 2 courses' name
*/
public int compare(Record a, Record b) {
return a.getCourse().getCourseName().compareTo(b.getCourse().getCourseName());
}
}
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glue.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.glue.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* JobBookmarkEntryMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class JobBookmarkEntryMarshaller {
private static final MarshallingInfo<String> JOBNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("JobName").build();
private static final MarshallingInfo<Integer> VERSION_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("Version").build();
private static final MarshallingInfo<Integer> RUN_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("Run").build();
private static final MarshallingInfo<Integer> ATTEMPT_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("Attempt").build();
private static final MarshallingInfo<String> PREVIOUSRUNID_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("PreviousRunId").build();
private static final MarshallingInfo<String> RUNID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("RunId").build();
private static final MarshallingInfo<String> JOBBOOKMARK_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("JobBookmark").build();
private static final JobBookmarkEntryMarshaller instance = new JobBookmarkEntryMarshaller();
public static JobBookmarkEntryMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(JobBookmarkEntry jobBookmarkEntry, ProtocolMarshaller protocolMarshaller) {
if (jobBookmarkEntry == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(jobBookmarkEntry.getJobName(), JOBNAME_BINDING);
protocolMarshaller.marshall(jobBookmarkEntry.getVersion(), VERSION_BINDING);
protocolMarshaller.marshall(jobBookmarkEntry.getRun(), RUN_BINDING);
protocolMarshaller.marshall(jobBookmarkEntry.getAttempt(), ATTEMPT_BINDING);
protocolMarshaller.marshall(jobBookmarkEntry.getPreviousRunId(), PREVIOUSRUNID_BINDING);
protocolMarshaller.marshall(jobBookmarkEntry.getRunId(), RUNID_BINDING);
protocolMarshaller.marshall(jobBookmarkEntry.getJobBookmark(), JOBBOOKMARK_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
|
/*
* Copyright 2002-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.web.util.pattern;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.springframework.http.server.PathContainer.PathSegment;
import org.springframework.web.util.pattern.PathPattern.MatchingContext;
/**
* A regex path element. Used to represent any complicated element of the path.
* For example in '<tt>/foo/*_*/*_{foobar}</tt>' both <tt>*_*</tt> and <tt>*_{foobar}</tt>
* are {@link RegexPathElement} path elements. Derived from the general
* {@link org.springframework.util.AntPathMatcher} approach.
*
* @author Andy Clement
* @since 5.0
*/
class RegexPathElement extends PathElement {
private static final Pattern GLOB_PATTERN = Pattern.compile("\\?|\\*|\\{((?:\\{[^/]+?\\}|[^/{}]|\\\\[{}])+?)\\}");
private static final String DEFAULT_VARIABLE_PATTERN = "(.*)";
private char[] regex;
private final boolean caseSensitive;
private final Pattern pattern;
private int wildcardCount;
private final List<String> variableNames = new LinkedList<>();
RegexPathElement(int pos, char[] regex, boolean caseSensitive, char[] completePattern, char separator) {
super(pos, separator);
this.regex = regex;
this.caseSensitive = caseSensitive;
this.pattern = buildPattern(regex, completePattern);
}
public Pattern buildPattern(char[] regex, char[] completePattern) {
StringBuilder patternBuilder = new StringBuilder();
String text = new String(regex);
Matcher matcher = GLOB_PATTERN.matcher(text);
int end = 0;
while (matcher.find()) {
patternBuilder.append(quote(text, end, matcher.start()));
String match = matcher.group();
if ("?".equals(match)) {
patternBuilder.append('.');
}
else if ("*".equals(match)) {
patternBuilder.append(".*");
int pos = matcher.start();
if (pos < 1 || text.charAt(pos-1) != '.') {
// To be compatible with the AntPathMatcher comparator,
// '.*' is not considered a wildcard usage
this.wildcardCount++;
}
}
else if (match.startsWith("{") && match.endsWith("}")) {
int colonIdx = match.indexOf(':');
if (colonIdx == -1) {
patternBuilder.append(DEFAULT_VARIABLE_PATTERN);
String variableName = matcher.group(1);
if (this.variableNames.contains(variableName)) {
throw new PatternParseException(this.pos, completePattern,
PatternParseException.PatternMessage.ILLEGAL_DOUBLE_CAPTURE, variableName);
}
this.variableNames.add(variableName);
}
else {
String variablePattern = match.substring(colonIdx + 1, match.length() - 1);
patternBuilder.append('(');
patternBuilder.append(variablePattern);
patternBuilder.append(')');
String variableName = match.substring(1, colonIdx);
if (this.variableNames.contains(variableName)) {
throw new PatternParseException(this.pos, completePattern,
PatternParseException.PatternMessage.ILLEGAL_DOUBLE_CAPTURE, variableName);
}
this.variableNames.add(variableName);
}
}
end = matcher.end();
}
patternBuilder.append(quote(text, end, text.length()));
if (this.caseSensitive) {
return Pattern.compile(patternBuilder.toString());
}
else {
return Pattern.compile(patternBuilder.toString(), Pattern.CASE_INSENSITIVE);
}
}
public List<String> getVariableNames() {
return this.variableNames;
}
private String quote(String s, int start, int end) {
if (start == end) {
return "";
}
return Pattern.quote(s.substring(start, end));
}
@Override
public boolean matches(int pathIndex, MatchingContext matchingContext) {
String textToMatch = matchingContext.pathElementValue(pathIndex);
Matcher matcher = this.pattern.matcher(textToMatch);
boolean matches = matcher.matches();
if (matches) {
if (isNoMorePattern()) {
if (matchingContext.determineRemainingPath &&
((this.variableNames.size() == 0) ? true : textToMatch.length() > 0)) {
matchingContext.remainingPathIndex = pathIndex + 1;
matches = true;
}
else {
// No more pattern, is there more data?
// If pattern is capturing variables there must be some actual data to bind to them
matches = (pathIndex + 1) >= matchingContext.pathLength &&
(this.variableNames.isEmpty() || textToMatch.length() > 0);
if (!matches && matchingContext.isMatchOptionalTrailingSeparator()) {
matches = (this.variableNames.isEmpty() || textToMatch.length() > 0) &&
(pathIndex + 2) >= matchingContext.pathLength &&
matchingContext.isSeparator(pathIndex + 1);
}
}
}
else {
matches = (this.next != null && this.next.matches(pathIndex + 1, matchingContext));
}
}
if (matches && matchingContext.extractingVariables) {
// Process captures
if (this.variableNames.size() != matcher.groupCount()) { // SPR-8455
throw new IllegalArgumentException("The number of capturing groups in the pattern segment "
+ this.pattern + " does not match the number of URI template variables it defines, "
+ "which can occur if capturing groups are used in a URI template regex. "
+ "Use non-capturing groups instead.");
}
for (int i = 1; i <= matcher.groupCount(); i++) {
String name = this.variableNames.get(i - 1);
String value = matcher.group(i);
matchingContext.set(name, value,
(i == this.variableNames.size())?
((PathSegment)matchingContext.pathElements.get(pathIndex)).parameters():
NO_PARAMETERS);
}
}
return matches;
}
@Override
public int getNormalizedLength() {
int varsLength = 0;
for (String variableName : this.variableNames) {
varsLength += variableName.length();
}
return (this.regex.length - varsLength - this.variableNames.size());
}
public int getCaptureCount() {
return this.variableNames.size();
}
@Override
public int getWildcardCount() {
return this.wildcardCount;
}
@Override
public int getScore() {
return (getCaptureCount() * CAPTURE_VARIABLE_WEIGHT + getWildcardCount() * WILDCARD_WEIGHT);
}
public String toString() {
return "Regex(" + String.valueOf(this.regex) + ")";
}
@Override
public char[] getChars() {
return this.regex;
}
}
|
/*
* Copyright 2013-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Represents the properties of a global secondary index for the table when the backup was created.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dynamodb-2012-08-10/GlobalSecondaryIndexInfo" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GlobalSecondaryIndexInfo implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The name of the global secondary index.
* </p>
*/
private String indexName;
/**
* <p>
* The complete key schema for a global secondary index, which consists of one or more pairs of attribute names and
* key types:
* </p>
* <ul>
* <li>
* <p>
* <code>HASH</code> - partition key
* </p>
* </li>
* <li>
* <p>
* <code>RANGE</code> - sort key
* </p>
* </li>
* </ul>
* <note>
* <p>
* The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives from
* DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based on their
* partition key values.
* </p>
* <p>
* The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives from the
* way DynamoDB stores items with the same partition key physically close together, in sorted order by the sort key
* value.
* </p>
* </note>
*/
private java.util.List<KeySchemaElement> keySchema;
/**
* <p>
* Represents attributes that are copied (projected) from the table into the global secondary index. These are in
* addition to the primary key attributes and index key attributes, which are automatically projected.
* </p>
*/
private Projection projection;
/**
* <p>
* Represents the provisioned throughput settings for the specified global secondary index.
* </p>
*/
private ProvisionedThroughput provisionedThroughput;
/**
* <p>
* The name of the global secondary index.
* </p>
*
* @param indexName
* The name of the global secondary index.
*/
public void setIndexName(String indexName) {
this.indexName = indexName;
}
/**
* <p>
* The name of the global secondary index.
* </p>
*
* @return The name of the global secondary index.
*/
public String getIndexName() {
return this.indexName;
}
/**
* <p>
* The name of the global secondary index.
* </p>
*
* @param indexName
* The name of the global secondary index.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GlobalSecondaryIndexInfo withIndexName(String indexName) {
setIndexName(indexName);
return this;
}
/**
* <p>
* The complete key schema for a global secondary index, which consists of one or more pairs of attribute names and
* key types:
* </p>
* <ul>
* <li>
* <p>
* <code>HASH</code> - partition key
* </p>
* </li>
* <li>
* <p>
* <code>RANGE</code> - sort key
* </p>
* </li>
* </ul>
* <note>
* <p>
* The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives from
* DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based on their
* partition key values.
* </p>
* <p>
* The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives from the
* way DynamoDB stores items with the same partition key physically close together, in sorted order by the sort key
* value.
* </p>
* </note>
*
* @return The complete key schema for a global secondary index, which consists of one or more pairs of attribute
* names and key types:</p>
* <ul>
* <li>
* <p>
* <code>HASH</code> - partition key
* </p>
* </li>
* <li>
* <p>
* <code>RANGE</code> - sort key
* </p>
* </li>
* </ul>
* <note>
* <p>
* The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute"
* derives from DynamoDB' usage of an internal hash function to evenly distribute data items across
* partitions, based on their partition key values.
* </p>
* <p>
* The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives
* from the way DynamoDB stores items with the same partition key physically close together, in sorted order
* by the sort key value.
* </p>
*/
public java.util.List<KeySchemaElement> getKeySchema() {
return keySchema;
}
/**
* <p>
* The complete key schema for a global secondary index, which consists of one or more pairs of attribute names and
* key types:
* </p>
* <ul>
* <li>
* <p>
* <code>HASH</code> - partition key
* </p>
* </li>
* <li>
* <p>
* <code>RANGE</code> - sort key
* </p>
* </li>
* </ul>
* <note>
* <p>
* The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives from
* DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based on their
* partition key values.
* </p>
* <p>
* The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives from the
* way DynamoDB stores items with the same partition key physically close together, in sorted order by the sort key
* value.
* </p>
* </note>
*
* @param keySchema
* The complete key schema for a global secondary index, which consists of one or more pairs of attribute
* names and key types:</p>
* <ul>
* <li>
* <p>
* <code>HASH</code> - partition key
* </p>
* </li>
* <li>
* <p>
* <code>RANGE</code> - sort key
* </p>
* </li>
* </ul>
* <note>
* <p>
* The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives
* from DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based
* on their partition key values.
* </p>
* <p>
* The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives
* from the way DynamoDB stores items with the same partition key physically close together, in sorted order
* by the sort key value.
* </p>
*/
public void setKeySchema(java.util.Collection<KeySchemaElement> keySchema) {
if (keySchema == null) {
this.keySchema = null;
return;
}
this.keySchema = new java.util.ArrayList<KeySchemaElement>(keySchema);
}
/**
* <p>
* The complete key schema for a global secondary index, which consists of one or more pairs of attribute names and
* key types:
* </p>
* <ul>
* <li>
* <p>
* <code>HASH</code> - partition key
* </p>
* </li>
* <li>
* <p>
* <code>RANGE</code> - sort key
* </p>
* </li>
* </ul>
* <note>
* <p>
* The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives from
* DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based on their
* partition key values.
* </p>
* <p>
* The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives from the
* way DynamoDB stores items with the same partition key physically close together, in sorted order by the sort key
* value.
* </p>
* </note>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setKeySchema(java.util.Collection)} or {@link #withKeySchema(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param keySchema
* The complete key schema for a global secondary index, which consists of one or more pairs of attribute
* names and key types:</p>
* <ul>
* <li>
* <p>
* <code>HASH</code> - partition key
* </p>
* </li>
* <li>
* <p>
* <code>RANGE</code> - sort key
* </p>
* </li>
* </ul>
* <note>
* <p>
* The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives
* from DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based
* on their partition key values.
* </p>
* <p>
* The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives
* from the way DynamoDB stores items with the same partition key physically close together, in sorted order
* by the sort key value.
* </p>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GlobalSecondaryIndexInfo withKeySchema(KeySchemaElement... keySchema) {
if (this.keySchema == null) {
setKeySchema(new java.util.ArrayList<KeySchemaElement>(keySchema.length));
}
for (KeySchemaElement ele : keySchema) {
this.keySchema.add(ele);
}
return this;
}
/**
* <p>
* The complete key schema for a global secondary index, which consists of one or more pairs of attribute names and
* key types:
* </p>
* <ul>
* <li>
* <p>
* <code>HASH</code> - partition key
* </p>
* </li>
* <li>
* <p>
* <code>RANGE</code> - sort key
* </p>
* </li>
* </ul>
* <note>
* <p>
* The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives from
* DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based on their
* partition key values.
* </p>
* <p>
* The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives from the
* way DynamoDB stores items with the same partition key physically close together, in sorted order by the sort key
* value.
* </p>
* </note>
*
* @param keySchema
* The complete key schema for a global secondary index, which consists of one or more pairs of attribute
* names and key types:</p>
* <ul>
* <li>
* <p>
* <code>HASH</code> - partition key
* </p>
* </li>
* <li>
* <p>
* <code>RANGE</code> - sort key
* </p>
* </li>
* </ul>
* <note>
* <p>
* The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives
* from DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based
* on their partition key values.
* </p>
* <p>
* The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives
* from the way DynamoDB stores items with the same partition key physically close together, in sorted order
* by the sort key value.
* </p>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GlobalSecondaryIndexInfo withKeySchema(java.util.Collection<KeySchemaElement> keySchema) {
setKeySchema(keySchema);
return this;
}
/**
* <p>
* Represents attributes that are copied (projected) from the table into the global secondary index. These are in
* addition to the primary key attributes and index key attributes, which are automatically projected.
* </p>
*
* @param projection
* Represents attributes that are copied (projected) from the table into the global secondary index. These
* are in addition to the primary key attributes and index key attributes, which are automatically projected.
*/
public void setProjection(Projection projection) {
this.projection = projection;
}
/**
* <p>
* Represents attributes that are copied (projected) from the table into the global secondary index. These are in
* addition to the primary key attributes and index key attributes, which are automatically projected.
* </p>
*
* @return Represents attributes that are copied (projected) from the table into the global secondary index. These
* are in addition to the primary key attributes and index key attributes, which are automatically
* projected.
*/
public Projection getProjection() {
return this.projection;
}
/**
* <p>
* Represents attributes that are copied (projected) from the table into the global secondary index. These are in
* addition to the primary key attributes and index key attributes, which are automatically projected.
* </p>
*
* @param projection
* Represents attributes that are copied (projected) from the table into the global secondary index. These
* are in addition to the primary key attributes and index key attributes, which are automatically projected.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GlobalSecondaryIndexInfo withProjection(Projection projection) {
setProjection(projection);
return this;
}
/**
* <p>
* Represents the provisioned throughput settings for the specified global secondary index.
* </p>
*
* @param provisionedThroughput
* Represents the provisioned throughput settings for the specified global secondary index.
*/
public void setProvisionedThroughput(ProvisionedThroughput provisionedThroughput) {
this.provisionedThroughput = provisionedThroughput;
}
/**
* <p>
* Represents the provisioned throughput settings for the specified global secondary index.
* </p>
*
* @return Represents the provisioned throughput settings for the specified global secondary index.
*/
public ProvisionedThroughput getProvisionedThroughput() {
return this.provisionedThroughput;
}
/**
* <p>
* Represents the provisioned throughput settings for the specified global secondary index.
* </p>
*
* @param provisionedThroughput
* Represents the provisioned throughput settings for the specified global secondary index.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GlobalSecondaryIndexInfo withProvisionedThroughput(ProvisionedThroughput provisionedThroughput) {
setProvisionedThroughput(provisionedThroughput);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getIndexName() != null)
sb.append("IndexName: ").append(getIndexName()).append(",");
if (getKeySchema() != null)
sb.append("KeySchema: ").append(getKeySchema()).append(",");
if (getProjection() != null)
sb.append("Projection: ").append(getProjection()).append(",");
if (getProvisionedThroughput() != null)
sb.append("ProvisionedThroughput: ").append(getProvisionedThroughput());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GlobalSecondaryIndexInfo == false)
return false;
GlobalSecondaryIndexInfo other = (GlobalSecondaryIndexInfo) obj;
if (other.getIndexName() == null ^ this.getIndexName() == null)
return false;
if (other.getIndexName() != null && other.getIndexName().equals(this.getIndexName()) == false)
return false;
if (other.getKeySchema() == null ^ this.getKeySchema() == null)
return false;
if (other.getKeySchema() != null && other.getKeySchema().equals(this.getKeySchema()) == false)
return false;
if (other.getProjection() == null ^ this.getProjection() == null)
return false;
if (other.getProjection() != null && other.getProjection().equals(this.getProjection()) == false)
return false;
if (other.getProvisionedThroughput() == null ^ this.getProvisionedThroughput() == null)
return false;
if (other.getProvisionedThroughput() != null && other.getProvisionedThroughput().equals(this.getProvisionedThroughput()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getIndexName() == null) ? 0 : getIndexName().hashCode());
hashCode = prime * hashCode + ((getKeySchema() == null) ? 0 : getKeySchema().hashCode());
hashCode = prime * hashCode + ((getProjection() == null) ? 0 : getProjection().hashCode());
hashCode = prime * hashCode + ((getProvisionedThroughput() == null) ? 0 : getProvisionedThroughput().hashCode());
return hashCode;
}
@Override
public GlobalSecondaryIndexInfo clone() {
try {
return (GlobalSecondaryIndexInfo) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.dynamodbv2.model.transform.GlobalSecondaryIndexInfoMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
package com.fsck.k9.mail;
import java.io.IOException;
import java.util.Collections;
import java.util.Date;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import androidx.annotation.NonNull;
import com.fsck.k9.mail.filter.CountingOutputStream;
import com.fsck.k9.mail.filter.EOLConvertingOutputStream;
import timber.log.Timber;
public abstract class Message implements Part, Body {
protected static final String DEFAULT_MIME_TYPE = "text/plain";
public enum RecipientType {
TO, CC, BCC, X_ORIGINAL_TO, DELIVERED_TO, X_ENVELOPE_TO
}
protected String mUid;
private Set<Flag> mFlags = EnumSet.noneOf(Flag.class);
private Date mInternalDate;
public boolean olderThan(Date earliestDate) {
if (earliestDate == null) {
return false;
}
Date myDate = getSentDate();
if (myDate == null) {
myDate = getInternalDate();
}
return myDate != null && myDate.before(earliestDate);
}
public String getUid() {
return mUid;
}
public void setUid(String uid) {
this.mUid = uid;
}
public abstract String getSubject();
public abstract void setSubject(String subject);
public Date getInternalDate() {
return mInternalDate;
}
public void setInternalDate(Date internalDate) {
this.mInternalDate = internalDate;
}
public abstract Date getSentDate();
public abstract void setSentDate(Date sentDate, boolean hideTimeZone);
public abstract Address[] getRecipients(RecipientType type);
public abstract Address[] getFrom();
public abstract void setFrom(Address from);
public abstract Address[] getSender();
public abstract void setSender(Address sender);
public abstract Address[] getReplyTo();
public abstract void setReplyTo(Address[] from);
public abstract String getMessageId();
public abstract void setInReplyTo(String inReplyTo);
public abstract String[] getReferences();
public abstract void setReferences(String references);
@Override
public abstract Body getBody();
@Override
public abstract void addHeader(String name, String value);
@Override
public abstract void addRawHeader(String name, String raw);
@Override
public abstract void setHeader(String name, String value);
@NonNull
@Override
public abstract String[] getHeader(String name);
public abstract List<Header> getHeaders();
@Override
public abstract void removeHeader(String name);
@Override
public abstract void setBody(Body body);
public abstract boolean hasAttachments();
public abstract long getSize();
/*
* TODO Refactor Flags at some point to be able to store user defined flags.
*/
public Set<Flag> getFlags() {
return Collections.unmodifiableSet(mFlags);
}
/**
* @param flag
* Flag to set. Never <code>null</code>.
* @param set
* If <code>true</code>, the flag is added. If <code>false</code>
* , the flag is removed.
* @throws MessagingException
*/
public void setFlag(Flag flag, boolean set) throws MessagingException {
if (set) {
mFlags.add(flag);
} else {
mFlags.remove(flag);
}
}
/**
* This method calls setFlag(Flag, boolean)
* @param flags
* @param set
*/
public void setFlags(final Set<Flag> flags, boolean set) throws MessagingException {
for (Flag flag : flags) {
setFlag(flag, set);
}
}
public boolean isSet(Flag flag) {
return mFlags.contains(flag);
}
public void destroy() throws MessagingException {}
@Override
public abstract void setEncoding(String encoding) throws MessagingException;
public long calculateSize() {
try {
CountingOutputStream out = new CountingOutputStream();
EOLConvertingOutputStream eolOut = new EOLConvertingOutputStream(out);
writeTo(eolOut);
eolOut.flush();
return out.getCount();
} catch (IOException e) {
Timber.e(e, "Failed to calculate a message size");
} catch (MessagingException e) {
Timber.e(e, "Failed to calculate a message size");
}
return 0;
}
}
|
package com.app.linj.util;
import android.annotation.SuppressLint;
import android.os.Environment;
/**
* Created by zhangshenglan on 16/4/29.
*/
@SuppressLint("SdCardPath")
public class BmobConstants {
/**
* 存放发送图片的目录
*/
public static String BMOB_PICTURE_PATH = Environment.getExternalStorageDirectory() + "/Ibaidutieba/image/";
/**
* 我的头像保存目录
*/
public static String MyAvatarDir = "/sdcard/Ibaidutieba/avatar/";
/**
* 拍照回调
*/
public static final int REQUESTCODE_UPLOADAVATAR_CAMERA = 1;//拍照修改头像
public static final int REQUESTCODE_UPLOADAVATAR_LOCATION = 2;//本地相册修改头像
public static final int REQUESTCODE_UPLOADAVATAR_CROP = 3;//系统裁剪头像
public static final int REQUESTCODE_TAKE_CAMERA = 0x000001;//拍照
public static final int REQUESTCODE_TAKE_LOCAL = 0x000002;//本地图片
public static final int REQUESTCODE_TAKE_LOCATION = 0x000003;//位置
public static final String EXTRA_STRING = "extra_string";
public static final int NUMBERS_PER_PAGE = 25;// 每次请求返回评论条数
public static final String ACTION_REGISTER_SUCCESS_FINISH ="register.success.finish";//注册成功之后登陆页面退出
}
|
package com.sahaab.hijri.caldroid;
import java.util.Date;
import android.view.View;
/**
* CaldroidListener inform when user clicks on a valid date (not within disabled
* dates, and valid between min/max dates)
*
* The method onChangeMonth is optional, user can always override this to listen
* to month change event
*
* @author thomasdao
*
*/
public abstract class CaldroidListener {
/**
* Inform client user has clicked on a date
* @param date
* @param view
*/
public abstract void onSelectDate(Date date, View view);
/**
* Inform client user has long clicked on a date
* @param date
* @param view
*/
public void onLongClickDate(Date date, View view) {
// Do nothing
}
/**
* Inform client that calendar has changed month
* @param month
* @param year
*/
public void onChangeMonth(int month, int year) {
// Do nothing
};
/**
* Inform client that CaldroidFragment view has been created and views are
* no longer null. Useful for customization of button and text views
*/
public void onCaldroidViewCreated() {
// Do nothing
}
}
|
// Decompiled by Jad v1.5.8g. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.kpdus.com/jad.html
// Decompiler options: packimports(3) annotate safe
package com.apollographql.apollo.internal.interceptor;
import com.apollographql.apollo.exception.ApolloException;
import com.apollographql.apollo.interceptor.ApolloInterceptorChain;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.Executor;
// Referenced classes of package com.apollographql.apollo.internal.interceptor:
// ApolloCacheInterceptor
class ApolloCacheInterceptor$1
implements Runnable
{
public void run()
{
if(disposed)
//* 0 0:aload_0
//* 1 1:getfield #27 <Field ApolloCacheInterceptor this$0>
//* 2 4:getfield #46 <Field boolean ApolloCacheInterceptor.disposed>
//* 3 7:ifeq 11
return;
// 4 10:return
if(val$request.fetchFromCache)
//* 5 11:aload_0
//* 6 12:getfield #29 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest val$request>
//* 7 15:getfield #51 <Field boolean com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest.fetchFromCache>
//* 8 18:ifeq 77
{
val$callBack.onFetch(com.apollographql.apollo.interceptor.urceType.CACHE);
// 9 21:aload_0
// 10 22:getfield #31 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack val$callBack>
// 11 25:getstatic #57 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$FetchSourceType com.apollographql.apollo.interceptor.ApolloInterceptor$FetchSourceType.CACHE>
// 12 28:invokeinterface #63 <Method void com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack.onFetch(com.apollographql.apollo.interceptor.ApolloInterceptor$FetchSourceType)>
try
{
com.apollographql.apollo.interceptor.ptorResponse ptorresponse = resolveFromCache(val$request);
// 13 33:aload_0
// 14 34:getfield #27 <Field ApolloCacheInterceptor this$0>
// 15 37:aload_0
// 16 38:getfield #29 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest val$request>
// 17 41:invokevirtual #67 <Method com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorResponse ApolloCacheInterceptor.resolveFromCache(com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest)>
// 18 44:astore_1
val$callBack.onResponse(ptorresponse);
// 19 45:aload_0
// 20 46:getfield #31 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack val$callBack>
// 21 49:aload_1
// 22 50:invokeinterface #71 <Method void com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack.onResponse(com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorResponse)>
val$callBack.onCompleted();
// 23 55:aload_0
// 24 56:getfield #31 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack val$callBack>
// 25 59:invokeinterface #74 <Method void com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack.onCompleted()>
return;
// 26 64:return
}
catch(ApolloException apolloexception)
//* 27 65:astore_1
{
val$callBack.onFailure(apolloexception);
// 28 66:aload_0
// 29 67:getfield #31 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack val$callBack>
// 30 70:aload_1
// 31 71:invokeinterface #78 <Method void com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack.onFailure(ApolloException)>
}
return;
// 32 76:return
} else
{
writeOptimisticUpdatesAndPublish(val$request);
// 33 77:aload_0
// 34 78:getfield #27 <Field ApolloCacheInterceptor this$0>
// 35 81:aload_0
// 36 82:getfield #29 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest val$request>
// 37 85:invokevirtual #82 <Method void ApolloCacheInterceptor.writeOptimisticUpdatesAndPublish(com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest)>
val$chain.proceedAsync(val$request, val$dispatcher, new com.apollographql.apollo.interceptor.ApolloInterceptor.CallBack() {
public void onCompleted()
{
// 0 0:return
}
public void onFailure(ApolloException apolloexception1)
{
rollbackOptimisticUpdatesAndPublish(request);
// 0 0:aload_0
// 1 1:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
// 2 4:getfield #28 <Field ApolloCacheInterceptor ApolloCacheInterceptor$1.this$0>
// 3 7:aload_0
// 4 8:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
// 5 11:getfield #32 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest ApolloCacheInterceptor$1.val$request>
// 6 14:invokevirtual #38 <Method void ApolloCacheInterceptor.rollbackOptimisticUpdatesAndPublish(com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest)>
callBack.onFailure(apolloexception1);
// 7 17:aload_0
// 8 18:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
// 9 21:getfield #42 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack ApolloCacheInterceptor$1.val$callBack>
// 10 24:aload_1
// 11 25:invokeinterface #44 <Method void com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack.onFailure(ApolloException)>
// 12 30:return
}
public void onFetch(com.apollographql.apollo.interceptor.ApolloInterceptor.FetchSourceType fetchsourcetype)
{
callBack.onFetch(fetchsourcetype);
// 0 0:aload_0
// 1 1:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
// 2 4:getfield #42 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack ApolloCacheInterceptor$1.val$callBack>
// 3 7:aload_1
// 4 8:invokeinterface #49 <Method void com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack.onFetch(com.apollographql.apollo.interceptor.ApolloInterceptor$FetchSourceType)>
// 5 13:return
}
public void onResponse(com.apollographql.apollo.interceptor.ApolloInterceptor.InterceptorResponse interceptorresponse)
{
if(disposed)
//* 0 0:aload_0
//* 1 1:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
//* 2 4:getfield #28 <Field ApolloCacheInterceptor ApolloCacheInterceptor$1.this$0>
//* 3 7:getfield #57 <Field boolean ApolloCacheInterceptor.disposed>
//* 4 10:ifeq 14
return;
// 5 13:return
try
{
Set set = cacheResponse(interceptorresponse, request);
// 6 14:aload_0
// 7 15:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
// 8 18:getfield #28 <Field ApolloCacheInterceptor ApolloCacheInterceptor$1.this$0>
// 9 21:aload_1
// 10 22:aload_0
// 11 23:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
// 12 26:getfield #32 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest ApolloCacheInterceptor$1.val$request>
// 13 29:invokevirtual #61 <Method Set ApolloCacheInterceptor.cacheResponse(com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorResponse, com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest)>
// 14 32:astore_2
Set set1 = rollbackOptimisticUpdates(request);
// 15 33:aload_0
// 16 34:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
// 17 37:getfield #28 <Field ApolloCacheInterceptor ApolloCacheInterceptor$1.this$0>
// 18 40:aload_0
// 19 41:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
// 20 44:getfield #32 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest ApolloCacheInterceptor$1.val$request>
// 21 47:invokevirtual #65 <Method Set ApolloCacheInterceptor.rollbackOptimisticUpdates(com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest)>
// 22 50:astore_3
HashSet hashset = new HashSet();
// 23 51:new #67 <Class HashSet>
// 24 54:dup
// 25 55:invokespecial #68 <Method void HashSet()>
// 26 58:astore 4
((Set) (hashset)).addAll(((java.util.Collection) (set1)));
// 27 60:aload 4
// 28 62:aload_3
// 29 63:invokeinterface #74 <Method boolean Set.addAll(java.util.Collection)>
// 30 68:pop
((Set) (hashset)).addAll(((java.util.Collection) (set)));
// 31 69:aload 4
// 32 71:aload_2
// 33 72:invokeinterface #74 <Method boolean Set.addAll(java.util.Collection)>
// 34 77:pop
publishCacheKeys(((Set) (hashset)));
// 35 78:aload_0
// 36 79:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
// 37 82:getfield #28 <Field ApolloCacheInterceptor ApolloCacheInterceptor$1.this$0>
// 38 85:aload 4
// 39 87:invokevirtual #78 <Method void ApolloCacheInterceptor.publishCacheKeys(Set)>
}
//* 40 90:aload_0
//* 41 91:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
//* 42 94:getfield #42 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack ApolloCacheInterceptor$1.val$callBack>
//* 43 97:aload_1
//* 44 98:invokeinterface #80 <Method void com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack.onResponse(com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorResponse)>
//* 45 103:aload_0
//* 46 104:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
//* 47 107:getfield #42 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack ApolloCacheInterceptor$1.val$callBack>
//* 48 110:invokeinterface #82 <Method void com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack.onCompleted()>
//* 49 115:return
// Misplaced declaration of an exception variable
catch(com.apollographql.apollo.interceptor.ApolloInterceptor.InterceptorResponse interceptorresponse)
//* 50 116:astore_1
{
rollbackOptimisticUpdatesAndPublish(request);
// 51 117:aload_0
// 52 118:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
// 53 121:getfield #28 <Field ApolloCacheInterceptor ApolloCacheInterceptor$1.this$0>
// 54 124:aload_0
// 55 125:getfield #17 <Field ApolloCacheInterceptor$1 this$1>
// 56 128:getfield #32 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest ApolloCacheInterceptor$1.val$request>
// 57 131:invokevirtual #38 <Method void ApolloCacheInterceptor.rollbackOptimisticUpdatesAndPublish(com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest)>
throw interceptorresponse;
// 58 134:aload_1
// 59 135:athrow
}
callBack.onResponse(interceptorresponse);
callBack.onCompleted();
}
final ApolloCacheInterceptor._cls1 this$1;
{
this$1 = ApolloCacheInterceptor._cls1.this;
// 0 0:aload_0
// 1 1:aload_1
// 2 2:putfield #17 <Field ApolloCacheInterceptor$1 this$1>
super();
// 3 5:aload_0
// 4 6:invokespecial #19 <Method void Object()>
// 5 9:return
}
}
);
// 38 88:aload_0
// 39 89:getfield #33 <Field ApolloInterceptorChain val$chain>
// 40 92:aload_0
// 41 93:getfield #29 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest val$request>
// 42 96:aload_0
// 43 97:getfield #35 <Field Executor val$dispatcher>
// 44 100:new #13 <Class ApolloCacheInterceptor$1$1>
// 45 103:dup
// 46 104:aload_0
// 47 105:invokespecial #85 <Method void ApolloCacheInterceptor$1$1(ApolloCacheInterceptor$1)>
// 48 108:invokeinterface #91 <Method void ApolloInterceptorChain.proceedAsync(com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest, Executor, com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack)>
return;
// 49 113:return
}
}
final ApolloCacheInterceptor this$0;
final com.apollographql.apollo.interceptor.k val$callBack;
final ApolloInterceptorChain val$chain;
final Executor val$dispatcher;
final com.apollographql.apollo.interceptor.ptorRequest val$request;
ApolloCacheInterceptor$1()
{
this$0 = final_apollocacheinterceptor;
// 0 0:aload_0
// 1 1:aload_1
// 2 2:putfield #27 <Field ApolloCacheInterceptor this$0>
val$request = ptorrequest;
// 3 5:aload_0
// 4 6:aload_2
// 5 7:putfield #29 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$InterceptorRequest val$request>
val$callBack = k;
// 6 10:aload_0
// 7 11:aload_3
// 8 12:putfield #31 <Field com.apollographql.apollo.interceptor.ApolloInterceptor$CallBack val$callBack>
val$chain = apollointerceptorchain;
// 9 15:aload_0
// 10 16:aload 4
// 11 18:putfield #33 <Field ApolloInterceptorChain val$chain>
val$dispatcher = Executor.this;
// 12 21:aload_0
// 13 22:aload 5
// 14 24:putfield #35 <Field Executor val$dispatcher>
super();
// 15 27:aload_0
// 16 28:invokespecial #38 <Method void Object()>
// 17 31:return
}
}
|
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.oauth.tokenvaluegenerator;
import org.apache.oltu.oauth2.as.issuer.ValueGenerator;
import org.apache.oltu.oauth2.common.exception.OAuthSystemException;
import org.bouncycastle.util.encoders.Hex;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.util.UUID;
/**
* Token value generator class to generate SHA-256 hash as a token value (256 bits, 64 Hex Characters).
*/
@Deprecated
public class SHA256Generator implements ValueGenerator {
@Override
public String generateValue() throws OAuthSystemException {
// UUID is a 36 (32 + 4) digit string directly hashing it to SHA-256 does not make sense since SHA-256 is a
// 64 digit string. We are combining two UUIDs to generate a long string.
return this.generateValue(UUID.randomUUID().toString() + UUID.randomUUID().toString());
}
@Override
public String generateValue(String value) throws OAuthSystemException {
try {
MessageDigest digest = MessageDigest.getInstance("SHA-256");
digest.reset();
digest.update(value.getBytes(StandardCharsets.UTF_8));
byte[] messageDigest = digest.digest();
// Return the hex representation of the hash.
return Hex.toHexString(messageDigest);
} catch (Exception e) {
throw new OAuthSystemException("Error while generating the token value.", e);
}
}
}
|
package com.ruoyi.common.core.utils.file;
/**
* 媒体类型工具类
*
* @author king
*/
public class MimeTypeUtils
{
public static final String IMAGE_PNG = "image/png";
public static final String IMAGE_JPG = "image/jpg";
public static final String IMAGE_JPEG = "image/jpeg";
public static final String IMAGE_BMP = "image/bmp";
public static final String IMAGE_GIF = "image/gif";
public static final String[] IMAGE_EXTENSION = { "bmp", "gif", "jpg", "jpeg", "png" };
public static final String[] FLASH_EXTENSION = { "swf", "flv" };
public static final String[] MEDIA_EXTENSION = { "swf", "flv", "mp3", "wav", "wma", "wmv", "mid", "avi", "mpg",
"asf", "rm", "rmvb" };
public static final String[] VIDEO_EXTENSION = { "mp4", "avi", "rmvb" };
public static final String[] DEFAULT_ALLOWED_EXTENSION = {
// 图片
"bmp", "gif", "jpg", "jpeg", "png",
// word excel powerpoint
"doc", "docx", "xls", "xlsx", "ppt", "pptx", "html", "htm", "txt",
// 压缩文件
"rar", "zip", "gz", "bz2",
// 视频格式
"mp4", "avi", "rmvb",
// pdf
"pdf" };
public static String getExtension(String prefix)
{
switch (prefix)
{
case IMAGE_PNG:
return "png";
case IMAGE_JPG:
return "jpg";
case IMAGE_JPEG:
return "jpeg";
case IMAGE_BMP:
return "bmp";
case IMAGE_GIF:
return "gif";
default:
return "";
}
}
}
|
public class InvokeDemo implements Runnable {
public static void main(String[] args){
new InvokeDemo().test();
}
public void test(){
InvokeDemo.staticMethod(); // invokestatic
InvokeDemo demo = new InvokeDemo(); // invokespecial
demo.instanceMethod(); // invokespecial
super.equals(null); // invokespecial
this.run(); // invokevirtual
((Runnable) demo).run(); // invokeinterface
}
public static void staticMethod() {}
private void instanceMethod() {}
@Override public void run() {}
}
|
package org.smartregister.chw.hf.activity;
import android.app.Activity;
import android.view.Menu;
import android.view.MenuItem;
import androidx.fragment.app.Fragment;
import androidx.viewpager.widget.ViewPager;
import org.smartregister.chw.anc.activity.BaseAncMemberProfileActivity;
import org.smartregister.chw.core.activity.CoreAboveFiveChildProfileActivity;
import org.smartregister.chw.core.activity.CoreChildProfileActivity;
import org.smartregister.chw.core.activity.CoreFamilyProfileActivity;
import org.smartregister.chw.core.activity.CoreFamilyProfileMenuActivity;
import org.smartregister.chw.core.activity.CoreFamilyRemoveMemberActivity;
import org.smartregister.chw.core.utils.CoreConstants;
import org.smartregister.chw.fp.dao.FpDao;
import org.smartregister.chw.hf.fragment.FamilyProfileMemberFragment;
import org.smartregister.chw.hf.model.FamilyProfileModel;
import org.smartregister.chw.hf.presenter.FamilyProfilePresenter;
import org.smartregister.chw.pnc.activity.BasePncMemberProfileActivity;
import org.smartregister.commonregistry.CommonPersonObject;
import org.smartregister.family.adapter.ViewPagerAdapter;
import org.smartregister.family.util.Constants;
import java.util.HashMap;
public class FamilyProfileActivity extends CoreFamilyProfileActivity {
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
setupMenuOptions(menu);
return true;
}
@Override
protected void refreshPresenter() {
presenter = new FamilyProfilePresenter(this, new FamilyProfileModel(familyName), familyBaseEntityId, familyHead, primaryCaregiver, familyName);
}
@Override
protected void refreshList(Fragment fragment) {
if (fragment instanceof FamilyProfileMemberFragment) {
FamilyProfileMemberFragment familyProfileMemberFragment = ((FamilyProfileMemberFragment) fragment);
if (familyProfileMemberFragment.presenter() != null) {
familyProfileMemberFragment.refreshListView();
}
}
}
@Override
protected Class<? extends CoreFamilyRemoveMemberActivity> getFamilyRemoveMemberClass() {
return FamilyRemoveMemberActivity.class;
}
@Override
protected Class<? extends CoreFamilyProfileMenuActivity> getFamilyProfileMenuClass() {
return FamilyProfileMenuActivity.class;
}
@Override
protected void initializePresenter() {
super.initializePresenter();
presenter = new FamilyProfilePresenter(this, new FamilyProfileModel(familyName), familyBaseEntityId, familyHead, primaryCaregiver, familyName);
}
@Override
protected ViewPager setupViewPager(ViewPager viewPager) {
adapter = new ViewPagerAdapter(getSupportFragmentManager());
adapter.addFragment(FamilyProfileMemberFragment.newInstance(this.getIntent().getExtras()),
this.getString(org.smartregister.family.R.string.member).toUpperCase());
viewPager.setAdapter(adapter);
if (getIntent().getBooleanExtra(CoreConstants.INTENT_KEY.SERVICE_DUE, false) ||
getIntent().getBooleanExtra(Constants.INTENT_KEY.GO_TO_DUE_PAGE, false)) {
viewPager.setCurrentItem(1);
}
return viewPager;
}
@Override
protected Class<?> getFamilyOtherMemberProfileActivityClass() {
return FamilyOtherMemberProfileActivity.class;
}
@Override
protected Class<? extends CoreAboveFiveChildProfileActivity> getAboveFiveChildProfileActivityClass() {
return AboveFiveChildProfileActivity.class;
}
@Override
protected Class<? extends CoreChildProfileActivity> getChildProfileActivityClass() {
return ChildProfileActivity.class;
}
@Override
protected Class<? extends BaseAncMemberProfileActivity> getAncMemberProfileActivityClass() {
return AncMemberProfileActivity.class;
}
@Override
protected Class<? extends BasePncMemberProfileActivity> getPncMemberProfileActivityClass() {
return PncMemberProfileActivity.class;
}
@Override
protected void goToFpProfile(String baseEntityId, Activity activity) {
FamilyPlanningMemberProfileActivity.startFpMemberProfileActivity(activity, FpDao.getMember(baseEntityId));
}
@Override
protected boolean isAncMember(String baseEntityId) {
return getFamilyProfilePresenter().isAncMember(baseEntityId);
}
@Override
protected HashMap<String, String> getAncFamilyHeadNameAndPhone(String baseEntityId) {
return getFamilyProfilePresenter().getAncFamilyHeadNameAndPhone(baseEntityId);
}
@Override
protected CommonPersonObject getAncCommonPersonObject(String baseEntityId) {
return getFamilyProfilePresenter().getAncCommonPersonObject(baseEntityId);
}
@Override
protected CommonPersonObject getPncCommonPersonObject(String baseEntityId) {
return getFamilyProfilePresenter().getPncCommonPersonObject(baseEntityId);
}
@Override
protected boolean isPncMember(String baseEntityId) {
return getFamilyProfilePresenter().isPncMember(baseEntityId);
}
private void setupMenuOptions(Menu menu) {
MenuItem removeMember = menu.findItem(org.smartregister.chw.core.R.id.action_remove_member);
MenuItem changeFamHead = menu.findItem(org.smartregister.chw.core.R.id.action_change_head);
MenuItem changeCareGiver = menu.findItem(org.smartregister.chw.core.R.id.action_change_care_giver);
if (removeMember != null) {
removeMember.setVisible(false);
}
if (changeFamHead != null) {
changeFamHead.setVisible(false);
}
if (changeCareGiver != null) {
changeCareGiver.setVisible(false);
}
}
public FamilyProfilePresenter getFamilyProfilePresenter() {
return (FamilyProfilePresenter) presenter;
}
}
|
package io.quarkus.oidc;
import java.time.Duration;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import io.quarkus.oidc.common.runtime.OidcCommonConfig;
import io.quarkus.oidc.common.runtime.OidcConstants;
import io.quarkus.oidc.runtime.OidcConfig;
import io.quarkus.runtime.annotations.ConfigGroup;
import io.quarkus.runtime.annotations.ConfigItem;
@ConfigGroup
public class OidcTenantConfig extends OidcCommonConfig {
/**
* A unique tenant identifier. It must be set by {@code TenantConfigResolver} providers which
* resolve the tenant configuration dynamically and is optional in all other cases.
*/
@ConfigItem
public Optional<String> tenantId = Optional.empty();
/**
* If this tenant configuration is enabled.
*/
@ConfigItem(defaultValue = "true")
public boolean tenantEnabled = true;
/**
* The application type, which can be one of the following values from enum {@link ApplicationType}.
*/
@ConfigItem(defaultValue = "service")
public ApplicationType applicationType = ApplicationType.SERVICE;
/**
* Relative path or absolute URL of the OIDC authorization endpoint which authenticates the users.
* This property must be set for the 'web-app' applications if OIDC discovery is disabled.
* This property will be ignored if the discovery is enabled.
*/
@ConfigItem
public Optional<String> authorizationPath = Optional.empty();
/**
* Relative path or absolute URL of the OIDC userinfo endpoint.
* This property must only be set for the 'web-app' applications if OIDC discovery is disabled
* and 'authentication.user-info-required' property is enabled.
* This property will be ignored if the discovery is enabled.
*/
@ConfigItem
public Optional<String> userInfoPath = Optional.empty();
/**
* Relative path or absolute URL of the OIDC RFC7662 introspection endpoint which can introspect both opaque and JWT tokens.
* This property must be set if OIDC discovery is disabled and 1) the opaque bearer access tokens have to be verified
* or 2) JWT tokens have to be verified while the cached JWK verification set with no matching JWK is being refreshed.
* This property will be ignored if the discovery is enabled.
*/
@ConfigItem
public Optional<String> introspectionPath = Optional.empty();
/**
* Relative path or absolute URL of the OIDC JWKS endpoint which returns a JSON Web Key Verification Set.
* This property should be set if OIDC discovery is disabled and the local JWT verification is required.
* This property will be ignored if the discovery is enabled.
*/
@ConfigItem
public Optional<String> jwksPath = Optional.empty();
/**
* Relative path or absolute URL of the OIDC end_session_endpoint.
* This property must be set if OIDC discovery is disabled and RP Initiated Logout support for the 'web-app' applications is
* required.
* This property will be ignored if the discovery is enabled.
*/
@ConfigItem
public Optional<String> endSessionPath = Optional.empty();
/**
* Public key for the local JWT token verification.
* OIDC server connection will not be created when this property is set.
*/
@ConfigItem
public Optional<String> publicKey = Optional.empty();
/**
* Configuration to find and parse a custom claim containing the roles information.
*/
@ConfigItem
public Roles roles = new Roles();
/**
* Configuration how to validate the token claims.
*/
@ConfigItem
public Token token = new Token();
/**
* Logout configuration
*/
@ConfigItem
public Logout logout = new Logout();
/**
* Different options to configure authorization requests
*/
public Authentication authentication = new Authentication();
/**
* Default token state manager configuration
*/
@ConfigItem
public TokenStateManager tokenStateManager = new TokenStateManager();
/**
* Allow caching the token introspection data.
* Note enabling this property does not enable the cache itself but only permits to cache the token introspection
* for a given tenant. If the default token cache can be used then please see {@link OidcConfig.TokenCache} how to enable
* it.
*/
@ConfigItem(defaultValue = "true")
public boolean allowTokenIntrospectionCache = true;
/**
* Allow caching the user info data.
* Note enabling this property does not enable the cache itself but only permits to cache the user info data
* for a given tenant. If the default token cache can be used then please see {@link OidcConfig.TokenCache} how to enable
* it.
*/
@ConfigItem(defaultValue = "true")
public boolean allowUserInfoCache = true;
@ConfigGroup
public static class Logout {
/**
* The relative path of the logout endpoint at the application. If provided, the application is able to initiate the
* logout through this endpoint in conformance with the OpenID Connect RP-Initiated Logout specification.
*/
@ConfigItem
public Optional<String> path = Optional.empty();
/**
* Relative path of the application endpoint where the user should be redirected to after logging out from the OpenID
* Connect Provider.
* This endpoint URI must be properly registered at the OpenID Connect Provider as a valid redirect URI.
*/
@ConfigItem
public Optional<String> postLogoutPath = Optional.empty();
/**
* Name of the post logout URI parameter which will be added as a query parameter to the logout redirect URI.
*/
@ConfigItem(defaultValue = OidcConstants.POST_LOGOUT_REDIRECT_URI)
public String postLogoutUriParam;
/**
* Additional properties which will be added as the query parameters to the logout redirect URI.
*/
@ConfigItem
public Map<String, String> extraParams;
public void setPath(Optional<String> path) {
this.path = path;
}
public String getPath() {
return path.get();
}
public void setPostLogoutPath(Optional<String> postLogoutPath) {
this.postLogoutPath = postLogoutPath;
}
public Optional<String> getPostLogoutPath() {
return postLogoutPath;
}
public Map<String, String> getExtraParams() {
return extraParams;
}
public void setExtraParams(Map<String, String> extraParams) {
this.extraParams = extraParams;
}
public String getPostLogoutUriParam() {
return postLogoutUriParam;
}
public void setPostLogoutUriParam(String postLogoutUriParam) {
this.postLogoutUriParam = postLogoutUriParam;
}
}
/**
* Default Authorization Code token state manager configuration
*/
@ConfigGroup
public static class TokenStateManager {
public enum Strategy {
/**
* Keep ID, access and refresh tokens.
*/
KEEP_ALL_TOKENS,
/**
* Keep ID token only
*/
ID_TOKEN,
/**
* Keep ID and refresh tokens only
*/
ID_REFRESH_TOKENS
}
/**
* Default TokenStateManager strategy.
*/
@ConfigItem(defaultValue = "keep_all_tokens")
public Strategy strategy = Strategy.KEEP_ALL_TOKENS;
/**
* Default TokenStateManager keeps all tokens (ID, access and refresh)
* returned in the authorization code grant response in a single session cookie by default.
*
* Enable this property to minimize a session cookie size
*/
@ConfigItem(defaultValue = "false")
public boolean splitTokens;
public boolean isSplitTokens() {
return splitTokens;
}
public void setSplitTokens(boolean spliTokens) {
this.splitTokens = spliTokens;
}
public Strategy getStrategy() {
return strategy;
}
public void setStrategy(Strategy strategy) {
this.strategy = strategy;
}
}
public Optional<String> getAuthorizationPath() {
return authorizationPath;
}
public void setAuthorizationPath(String authorizationPath) {
this.authorizationPath = Optional.of(authorizationPath);
}
public Optional<String> getUserInfoPath() {
return userInfoPath;
}
public void setUserInfoPath(String userInfoPath) {
this.userInfoPath = Optional.of(userInfoPath);
}
public Optional<String> getIntrospectionPath() {
return introspectionPath;
}
public void setIntrospectionPath(String introspectionPath) {
this.introspectionPath = Optional.of(introspectionPath);
}
public Optional<String> getJwksPath() {
return jwksPath;
}
public void setJwksPath(String jwksPath) {
this.jwksPath = Optional.of(jwksPath);
}
public Optional<String> getEndSessionPath() {
return endSessionPath;
}
public void setEndSessionPath(String endSessionPath) {
this.endSessionPath = Optional.of(endSessionPath);
}
public Optional<String> getPublicKey() {
return publicKey;
}
public void setPublicKey(String publicKey) {
this.publicKey = Optional.of(publicKey);
}
public Roles getRoles() {
return roles;
}
public void setRoles(Roles roles) {
this.roles = roles;
}
public Token getToken() {
return token;
}
public void setToken(Token token) {
this.token = token;
}
public Authentication getAuthentication() {
return authentication;
}
public void setAuthentication(Authentication authentication) {
this.authentication = authentication;
}
public Optional<String> getTenantId() {
return tenantId;
}
public void setTenantId(String tenantId) {
this.tenantId = Optional.of(tenantId);
}
public boolean isTenantEnabled() {
return tenantEnabled;
}
public void setTenantEnabled(boolean enabled) {
this.tenantEnabled = enabled;
}
public void setLogout(Logout logout) {
this.logout = logout;
}
public Logout getLogout() {
return logout;
}
@ConfigGroup
public static class Roles {
public static Roles fromClaimPath(String path) {
return fromClaimPathAndSeparator(path, null);
}
public static Roles fromClaimPathAndSeparator(String path, String sep) {
Roles roles = new Roles();
roles.roleClaimPath = Optional.ofNullable(path);
roles.roleClaimSeparator = Optional.ofNullable(sep);
return roles;
}
/**
* Path to the claim containing an array of groups. It starts from the top level JWT JSON object and
* can contain multiple segments where each segment represents a JSON object name only, example: "realm/groups".
* Use double quotes with the namespace qualified claim names.
* This property can be used if a token has no 'groups' claim but has the groups set in a different claim.
*/
@ConfigItem
public Optional<String> roleClaimPath = Optional.empty();
/**
* Separator for splitting a string which may contain multiple group values.
* It will only be used if the "role-claim-path" property points to a custom claim whose value is a string.
* A single space will be used by default because the standard 'scope' claim may contain a space separated sequence.
*/
@ConfigItem
public Optional<String> roleClaimSeparator = Optional.empty();
/**
* Source of the principal roles.
*/
@ConfigItem
public Optional<Source> source = Optional.empty();
public Optional<String> getRoleClaimPath() {
return roleClaimPath;
}
public void setRoleClaimPath(String roleClaimPath) {
this.roleClaimPath = Optional.of(roleClaimPath);
}
public Optional<String> getRoleClaimSeparator() {
return roleClaimSeparator;
}
public void setRoleClaimSeparator(String roleClaimSeparator) {
this.roleClaimSeparator = Optional.of(roleClaimSeparator);
}
public Optional<Source> getSource() {
return source;
}
public void setSource(Source source) {
this.source = Optional.of(source);
}
// Source of the principal roles
public static enum Source {
/**
* ID Token - the default value for the 'web-app' applications.
*/
idtoken,
/**
* Access Token - the default value for the 'service' applications;
* can also be used as the source of roles for the 'web-app' applications.
*/
accesstoken,
/**
* User Info
*/
userinfo
}
}
/**
* Defines the authorization request properties when authenticating
* users using the Authorization Code Grant Type.
*/
@ConfigGroup
public static class Authentication {
/**
* Relative path for calculating a "redirect_uri" query parameter.
* It has to start from a forward slash and will be appended to the request URI's host and port.
* For example, if the current request URI is 'https://localhost:8080/service' then a 'redirect_uri' parameter
* will be set to 'https://localhost:8080/' if this property is set to '/' and be the same as the request URI
* if this property has not been configured.
* Note the original request URI will be restored after the user has authenticated if 'restorePathAfterRedirect' is set
* to 'true'.
*/
@ConfigItem
public Optional<String> redirectPath = Optional.empty();
/**
* If this property is set to 'true' then the original request URI which was used before
* the authentication will be restored after the user has been redirected back to the application.
*
* Note if `redirectPath` property is not set the the original request URI will be restored even if this property is
* disabled.
*/
@ConfigItem(defaultValue = "false")
public boolean restorePathAfterRedirect;
/**
* Remove the query parameters such as 'code' and 'state' set by the OIDC server on the redirect URI
* after the user has authenticated by redirecting a user to the same URI but without the query parameters.
*/
@ConfigItem(defaultValue = "true")
public boolean removeRedirectParameters = true;
/**
* Both ID and access tokens are fetched from the OIDC provider as part of the authorization code flow.
* ID token is always verified on every user request as the primary token which is used
* to represent the principal and extract the roles.
* Access token is not verified by default since it is meant to be propagated to the downstream services.
* The verification of the access token should be enabled if it is injected as a JWT token.
*
* Access tokens obtained as part of the code flow will always be verified if `quarkus.oidc.roles.source`
* property is set to `accesstoken` which means the authorization decision will be based on the roles extracted from the
* access token.
*
* Bearer access tokens are always verified.
*/
@ConfigItem(defaultValue = "false")
public boolean verifyAccessToken;
/**
* Force 'https' as the 'redirect_uri' parameter scheme when running behind an SSL terminating reverse proxy.
* This property, if enabled, will also affect the logout `post_logout_redirect_uri` and the local redirect requests.
*/
@ConfigItem(defaultValue = "false")
public boolean forceRedirectHttpsScheme;
/**
* List of scopes
*/
@ConfigItem
public Optional<List<String>> scopes = Optional.empty();
/**
* Additional properties which will be added as the query parameters to the authentication redirect URI.
*/
@ConfigItem
public Map<String, String> extraParams;
/**
* If enabled the state, session and post logout cookies will have their 'secure' parameter set to 'true'
* when HTTP is used. It may be necessary when running behind an SSL terminating reverse proxy.
* The cookies will always be secure if HTTPS is used even if this property is set to false.
*/
@ConfigItem(defaultValue = "false")
public boolean cookieForceSecure;
/**
* Cookie name suffix.
* For example, a session cookie name for the default OIDC tenant is 'q_session' but can be changed to 'q_session_test'
* if this property is set to 'test'.
*/
@ConfigItem
public Optional<String> cookieSuffix = Optional.empty();
/**
* Cookie path parameter value which, if set, will be used to set a path parameter for the session, state and post
* logout cookies.
* The `cookie-path-header` property, if set, will be checked first.
*/
@ConfigItem(defaultValue = "/")
public String cookiePath = "/";
/**
* Cookie path header parameter value which, if set, identifies the incoming HTTP header
* whose value will be used to set a path parameter for the session, state and post logout cookies.
* If the header is missing then the `cookie-path` property will be checked.
*/
@ConfigItem
public Optional<String> cookiePathHeader = Optional.empty();
/**
* Cookie domain parameter value which, if set, will be used for the session, state and post logout cookies.
*/
@ConfigItem
public Optional<String> cookieDomain = Optional.empty();
/**
* If this property is set to 'true' then an OIDC UserInfo endpoint will be called
*/
@ConfigItem(defaultValue = "false")
public boolean userInfoRequired;
/**
* Session age extension in minutes.
* The user session age property is set to the value of the ID token life-span by default and
* the user will be redirected to the OIDC provider to re-authenticate once the session has expired.
* If this property is set to a non-zero value then the expired ID token can be refreshed before
* the session has expired.
* This property will be ignored if the `token.refresh-expired` property has not been enabled.
*/
@ConfigItem(defaultValue = "5M")
public Duration sessionAgeExtension = Duration.ofMinutes(5);
/**
* If this property is set to 'true' then a normal 302 redirect response will be returned
* if the request was initiated via JavaScript API such as XMLHttpRequest or Fetch and the current user needs to be
* (re)authenticated which may not be desirable for Single Page Applications since
* it automatically following the redirect may not work given that OIDC authorization endpoints typically do not support
* CORS.
* If this property is set to `false` then a status code of '499' will be returned to allow
* the client to handle the redirect manually
*/
@ConfigItem(defaultValue = "true")
public boolean javaScriptAutoRedirect = true;
/**
* Requires that ID token is available when the authorization code flow completes. In most case this property
* should be enabled. Disable this property only when you need to use the authorization code flow with OAuth2 providers.
*/
@ConfigItem(defaultValue = "true")
public boolean idTokenRequired = true;
public boolean isJavaScriptAutoRedirect() {
return javaScriptAutoRedirect;
}
public void setJavaScriptAutoredirect(boolean autoRedirect) {
this.javaScriptAutoRedirect = autoRedirect;
}
public Optional<String> getRedirectPath() {
return redirectPath;
}
public void setRedirectPath(String redirectPath) {
this.redirectPath = Optional.of(redirectPath);
}
public Optional<List<String>> getScopes() {
return scopes;
}
public void setScopes(Optional<List<String>> scopes) {
this.scopes = scopes;
}
public Map<String, String> getExtraParams() {
return extraParams;
}
public void setExtraParams(Map<String, String> extraParams) {
this.extraParams = extraParams;
}
public boolean isForceRedirectHttpsScheme() {
return forceRedirectHttpsScheme;
}
public void setForceRedirectHttpsScheme(boolean forceRedirectHttpsScheme) {
this.forceRedirectHttpsScheme = forceRedirectHttpsScheme;
}
public boolean isRestorePathAfterRedirect() {
return restorePathAfterRedirect;
}
public void setRestorePathAfterRedirect(boolean restorePathAfterRedirect) {
this.restorePathAfterRedirect = restorePathAfterRedirect;
}
public boolean isCookieForceSecure() {
return cookieForceSecure;
}
public void setCookieForceSecure(boolean cookieForceSecure) {
this.cookieForceSecure = cookieForceSecure;
}
public String getCookiePath() {
return cookiePath;
}
public void setCookiePath(String cookiePath) {
this.cookiePath = cookiePath;
}
public Optional<String> getCookieDomain() {
return cookieDomain;
}
public void setCookieDomain(String cookieDomain) {
this.cookieDomain = Optional.of(cookieDomain);
}
public boolean isUserInfoRequired() {
return userInfoRequired;
}
public void setUserInfoRequired(boolean userInfoRequired) {
this.userInfoRequired = userInfoRequired;
}
public boolean isRemoveRedirectParameters() {
return removeRedirectParameters;
}
public void setRemoveRedirectParameters(boolean removeRedirectParameters) {
this.removeRedirectParameters = removeRedirectParameters;
}
public boolean isVerifyAccessToken() {
return verifyAccessToken;
}
public void setVerifyAccessToken(boolean verifyAccessToken) {
this.verifyAccessToken = verifyAccessToken;
}
public Duration getSessionAgeExtension() {
return sessionAgeExtension;
}
public void setSessionAgeExtension(Duration sessionAgeExtension) {
this.sessionAgeExtension = sessionAgeExtension;
}
public Optional<String> getCookiePathHeader() {
return cookiePathHeader;
}
public void setCookiePathHeader(String cookiePathHeader) {
this.cookiePathHeader = Optional.of(cookiePathHeader);
}
public boolean isIdTokenRequired() {
return idTokenRequired;
}
public void setIdTokenRequired(boolean idTokenRequired) {
this.idTokenRequired = idTokenRequired;
}
public Optional<String> getCookieSuffix() {
return cookieSuffix;
}
public void setCookieSuffix(String cookieSuffix) {
this.cookieSuffix = Optional.of(cookieSuffix);
}
}
@ConfigGroup
public static class Token {
public static Token fromIssuer(String issuer) {
Token tokenClaims = new Token();
tokenClaims.issuer = Optional.of(issuer);
tokenClaims.audience = Optional.ofNullable(null);
return tokenClaims;
}
public static Token fromAudience(String... audience) {
Token tokenClaims = new Token();
tokenClaims.issuer = Optional.ofNullable(null);
tokenClaims.audience = Optional.of(Arrays.asList(audience));
return tokenClaims;
}
/**
* Expected issuer 'iss' claim value.
* Note this property overrides the `issuer` property which may be set in OpenId Connect provider's well-known
* configuration.
* If the `iss` claim value varies depending on the host/IP address or tenant id of the provider then you may skip the
* issuer verification by setting this property to 'any' but it should be done only when other options (such as
* configuring
* the provider to use the fixed `iss` claim value) are not possible.
*/
@ConfigItem
public Optional<String> issuer = Optional.empty();
/**
* Expected audience 'aud' claim value which may be a string or an array of strings.
*/
@ConfigItem
public Optional<List<String>> audience = Optional.empty();
/**
* Expected token type
*/
@ConfigItem
public Optional<String> tokenType = Optional.empty();
/**
* Life span grace period in seconds.
* When checking token expiry, current time is allowed to be later than token expiration time by at most the configured
* number of seconds.
* When checking token issuance, current time is allowed to be sooner than token issue time by at most the configured
* number of seconds.
*/
@ConfigItem
public OptionalInt lifespanGrace = OptionalInt.empty();
/**
* Name of the claim which contains a principal name. By default, the 'upn', 'preferred_username' and `sub` claims are
* checked.
*/
@ConfigItem
public Optional<String> principalClaim = Optional.empty();
/**
* Refresh expired ID tokens.
* If this property is enabled then a refresh token request will be performed if the ID token has expired
* and, if successful, the local session will be updated with the new set of tokens.
* Otherwise, the local session will be invalidated and the user redirected to the OpenID Provider to re-authenticate.
* In this case the user may not be challenged again if the OIDC provider session is still active.
*
* For this option be effective the `authentication.session-age-extension` property should also be set to a non-zero
* value since the refresh token is currently kept in the user session.
*
* This option is valid only when the application is of type {@link ApplicationType#WEB_APP}}.
*/
@ConfigItem
public boolean refreshExpired;
/**
* Refresh token time skew in seconds.
* If this property is enabled then the configured number of seconds is added to the current time
* when checking whether the access token should be refreshed. If the sum is greater than this access token's
* expiration time then a refresh is going to happen.
*
* This property will be ignored if the 'refresh-expired' property is not enabled.
*/
@ConfigItem
public Optional<Duration> refreshTokenTimeSkew = Optional.empty();
/**
* Forced JWK set refresh interval in minutes.
*/
@ConfigItem(defaultValue = "10M")
public Duration forcedJwkRefreshInterval = Duration.ofMinutes(10);
/**
* Custom HTTP header that contains a bearer token.
* This option is valid only when the application is of type {@link ApplicationType#SERVICE}}.
*/
@ConfigItem
public Optional<String> header = Optional.empty();
/**
* Allow the remote introspection of JWT tokens when no matching JWK key is available.
*
* Note this property is set to 'true' by default for backward-compatibility reasons and will be set to `false`
* instead in one of the next releases.
*
* Also note this property will be ignored if JWK endpoint URI is not available and introspecting the tokens is
* the only verification option.
*/
@ConfigItem(defaultValue = "true")
public boolean allowJwtIntrospection = true;
/**
* Allow the remote introspection of the opaque tokens.
*
* Set this property to 'false' if only JWT tokens are expected.
*/
@ConfigItem(defaultValue = "true")
public boolean allowOpaqueTokenIntrospection = true;
public Optional<String> getIssuer() {
return issuer;
}
public void setIssuer(String issuer) {
this.issuer = Optional.of(issuer);
}
public Optional<String> getHeader() {
return header;
}
public void setHeader(String header) {
this.header = Optional.of(header);
}
public Optional<List<String>> getAudience() {
return audience;
}
public void setAudience(List<String> audience) {
this.audience = Optional.of(audience);
}
public OptionalInt getLifespanGrace() {
return lifespanGrace;
}
public void setLifespanGrace(int lifespanGrace) {
this.lifespanGrace = OptionalInt.of(lifespanGrace);
}
public Optional<String> getPrincipalClaim() {
return principalClaim;
}
public void setPrincipalClaim(String principalClaim) {
this.principalClaim = Optional.of(principalClaim);
}
public boolean isRefreshExpired() {
return refreshExpired;
}
public void setRefreshExpired(boolean refreshExpired) {
this.refreshExpired = refreshExpired;
}
public Duration getForcedJwkRefreshInterval() {
return forcedJwkRefreshInterval;
}
public void setForcedJwkRefreshInterval(Duration forcedJwkRefreshInterval) {
this.forcedJwkRefreshInterval = forcedJwkRefreshInterval;
}
public Optional<String> getTokenType() {
return tokenType;
}
public void setTokenType(String tokenType) {
this.tokenType = Optional.of(tokenType);
}
public Optional<Duration> getRefreshTokenTimeSkew() {
return refreshTokenTimeSkew;
}
public void setRefreshTokenTimeSkew(Duration refreshTokenTimeSkew) {
this.refreshTokenTimeSkew = Optional.of(refreshTokenTimeSkew);
}
public boolean isAllowJwtIntrospection() {
return allowJwtIntrospection;
}
public void setAllowJwtIntrospection(boolean allowJwtIntrospection) {
this.allowJwtIntrospection = allowJwtIntrospection;
}
public boolean isAllowOpaqueTokenIntrospection() {
return allowOpaqueTokenIntrospection;
}
public void setAllowOpaqueTokenIntrospection(boolean allowOpaqueTokenIntrospection) {
this.allowOpaqueTokenIntrospection = allowOpaqueTokenIntrospection;
}
}
public static enum ApplicationType {
/**
* A {@code WEB_APP} is a client that serves pages, usually a frontend application. For this type of client the
* Authorization Code Flow is defined as the preferred method for authenticating users.
*/
WEB_APP,
/**
* A {@code SERVICE} is a client that has a set of protected HTTP resources, usually a backend application following the
* RESTful Architectural Design. For this type of client, the Bearer Authorization method is defined as the preferred
* method for authenticating and authorizing users.
*/
SERVICE,
/**
* A combined {@code SERVICE} and {@code WEB_APP} client.
* For this type of client, the Bearer Authorization method will be used if the Authorization header is set
* and Authorization Code Flow - if not.
*/
HYBRID
}
public ApplicationType getApplicationType() {
return applicationType;
}
public void setApplicationType(ApplicationType type) {
this.applicationType = type;
}
public boolean isAllowTokenIntrospectionCache() {
return allowTokenIntrospectionCache;
}
public void setAllowTokenIntrospectionCache(boolean allowTokenIntrospectionCache) {
this.allowTokenIntrospectionCache = allowTokenIntrospectionCache;
}
public boolean isAllowUserInfoCache() {
return allowUserInfoCache;
}
public void setAllowUserInfoCache(boolean allowUserInfoCache) {
this.allowUserInfoCache = allowUserInfoCache;
}
}
|
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author Tuelho 2009
*/
public class Configuracao {
// private static final String LOGO = "/br/com/idealize/imagem/icone_sistema.png";
private static final String CAMINHO_PROPERTIES = "configuracao/sistema.properties";
// private static final String CAMINHO_PROPERTIES = "configuracao/sistema-web.properties";
public Configuracao() {
}
public static String getProperties(String campo){
try {
Properties prop = new Properties();
prop.load(new FileInputStream(CAMINHO_PROPERTIES));
return prop.getProperty(campo);
} catch (IOException ex) {
Logger.getLogger(Configuracao.class.getName()).log(Level.SEVERE, null, ex);
return "";
}
}
}
|
// ORM class for table 'article'
// WARNING: This class is AUTO-GENERATED. Modify at your own risk.
//
// Debug information:
// Generated date: Tue Jul 09 16:21:47 CST 2019
// For connector: org.apache.sqoop.manager.MySQLManager
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.lib.db.DBWritable;
import com.cloudera.sqoop.lib.JdbcWritableBridge;
import com.cloudera.sqoop.lib.DelimiterSet;
import com.cloudera.sqoop.lib.FieldFormatter;
import com.cloudera.sqoop.lib.RecordParser;
import com.cloudera.sqoop.lib.BooleanParser;
import com.cloudera.sqoop.lib.BlobRef;
import com.cloudera.sqoop.lib.ClobRef;
import com.cloudera.sqoop.lib.LargeObjectLoader;
import com.cloudera.sqoop.lib.SqoopRecord;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
public class article extends SqoopRecord implements DBWritable, Writable {
private final int PROTOCOL_VERSION = 3;
public int getClassFormatVersion() { return PROTOCOL_VERSION; }
public static interface FieldSetterCommand { void setField(Object value); } protected ResultSet __cur_result_set;
private Map<String, FieldSetterCommand> setters = new HashMap<String, FieldSetterCommand>();
private void init0() {
setters.put("markdown", new FieldSetterCommand() {
@Override
public void setField(Object value) {
article.this.markdown = (String)value;
}
});
}
public article() {
init0();
}
private String markdown;
public String get_markdown() {
return markdown;
}
public void set_markdown(String markdown) {
this.markdown = markdown;
}
public article with_markdown(String markdown) {
this.markdown = markdown;
return this;
}
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof article)) {
return false;
}
article that = (article) o;
boolean equal = true;
equal = equal && (this.markdown == null ? that.markdown == null : this.markdown.equals(that.markdown));
return equal;
}
public boolean equals0(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof article)) {
return false;
}
article that = (article) o;
boolean equal = true;
equal = equal && (this.markdown == null ? that.markdown == null : this.markdown.equals(that.markdown));
return equal;
}
public void readFields(ResultSet __dbResults) throws SQLException {
this.__cur_result_set = __dbResults;
this.markdown = JdbcWritableBridge.readString(1, __dbResults);
}
public void readFields0(ResultSet __dbResults) throws SQLException {
this.markdown = JdbcWritableBridge.readString(1, __dbResults);
}
public void loadLargeObjects(LargeObjectLoader __loader)
throws SQLException, IOException, InterruptedException {
}
public void loadLargeObjects0(LargeObjectLoader __loader)
throws SQLException, IOException, InterruptedException {
}
public void write(PreparedStatement __dbStmt) throws SQLException {
write(__dbStmt, 0);
}
public int write(PreparedStatement __dbStmt, int __off) throws SQLException {
JdbcWritableBridge.writeString(markdown, 1 + __off, -1, __dbStmt);
return 1;
}
public void write0(PreparedStatement __dbStmt, int __off) throws SQLException {
JdbcWritableBridge.writeString(markdown, 1 + __off, -1, __dbStmt);
}
public void readFields(DataInput __dataIn) throws IOException {
this.readFields0(__dataIn); }
public void readFields0(DataInput __dataIn) throws IOException {
if (__dataIn.readBoolean()) {
this.markdown = null;
} else {
this.markdown = Text.readString(__dataIn);
}
}
public void write(DataOutput __dataOut) throws IOException {
if (null == this.markdown) {
__dataOut.writeBoolean(true);
} else {
__dataOut.writeBoolean(false);
Text.writeString(__dataOut, markdown);
}
}
public void write0(DataOutput __dataOut) throws IOException {
if (null == this.markdown) {
__dataOut.writeBoolean(true);
} else {
__dataOut.writeBoolean(false);
Text.writeString(__dataOut, markdown);
}
}
private static final DelimiterSet __outputDelimiters = new DelimiterSet((char) 9, (char) 10, (char) 0, (char) 0, false);
public String toString() {
return toString(__outputDelimiters, true);
}
public String toString(DelimiterSet delimiters) {
return toString(delimiters, true);
}
public String toString(boolean useRecordDelim) {
return toString(__outputDelimiters, useRecordDelim);
}
public String toString(DelimiterSet delimiters, boolean useRecordDelim) {
StringBuilder __sb = new StringBuilder();
char fieldDelim = delimiters.getFieldsTerminatedBy();
__sb.append(FieldFormatter.escapeAndEnclose(markdown==null?"null":markdown, delimiters));
if (useRecordDelim) {
__sb.append(delimiters.getLinesTerminatedBy());
}
return __sb.toString();
}
public void toString0(DelimiterSet delimiters, StringBuilder __sb, char fieldDelim) {
__sb.append(FieldFormatter.escapeAndEnclose(markdown==null?"null":markdown, delimiters));
}
private static final DelimiterSet __inputDelimiters = new DelimiterSet((char) 9, (char) 10, (char) 0, (char) 0, false);
private RecordParser __parser;
public void parse(Text __record) throws RecordParser.ParseError {
if (null == this.__parser) {
this.__parser = new RecordParser(__inputDelimiters);
}
List<String> __fields = this.__parser.parseRecord(__record);
__loadFromFields(__fields);
}
public void parse(CharSequence __record) throws RecordParser.ParseError {
if (null == this.__parser) {
this.__parser = new RecordParser(__inputDelimiters);
}
List<String> __fields = this.__parser.parseRecord(__record);
__loadFromFields(__fields);
}
public void parse(byte [] __record) throws RecordParser.ParseError {
if (null == this.__parser) {
this.__parser = new RecordParser(__inputDelimiters);
}
List<String> __fields = this.__parser.parseRecord(__record);
__loadFromFields(__fields);
}
public void parse(char [] __record) throws RecordParser.ParseError {
if (null == this.__parser) {
this.__parser = new RecordParser(__inputDelimiters);
}
List<String> __fields = this.__parser.parseRecord(__record);
__loadFromFields(__fields);
}
public void parse(ByteBuffer __record) throws RecordParser.ParseError {
if (null == this.__parser) {
this.__parser = new RecordParser(__inputDelimiters);
}
List<String> __fields = this.__parser.parseRecord(__record);
__loadFromFields(__fields);
}
public void parse(CharBuffer __record) throws RecordParser.ParseError {
if (null == this.__parser) {
this.__parser = new RecordParser(__inputDelimiters);
}
List<String> __fields = this.__parser.parseRecord(__record);
__loadFromFields(__fields);
}
private void __loadFromFields(List<String> fields) {
Iterator<String> __it = fields.listIterator();
String __cur_str = null;
try {
if (__it.hasNext()) {
__cur_str = __it.next();
} else {
__cur_str = "null";
}
if (__cur_str.equals("null")) { this.markdown = null; } else {
this.markdown = __cur_str;
}
} catch (RuntimeException e) { throw new RuntimeException("Can't parse input data: '" + __cur_str + "'", e); } }
private void __loadFromFields0(Iterator<String> __it) {
String __cur_str = null;
try {
if (__it.hasNext()) {
__cur_str = __it.next();
} else {
__cur_str = "null";
}
if (__cur_str.equals("null")) { this.markdown = null; } else {
this.markdown = __cur_str;
}
} catch (RuntimeException e) { throw new RuntimeException("Can't parse input data: '" + __cur_str + "'", e); } }
public Object clone() throws CloneNotSupportedException {
article o = (article) super.clone();
return o;
}
public void clone0(article o) throws CloneNotSupportedException {
}
public Map<String, Object> getFieldMap() {
Map<String, Object> __sqoop$field_map = new HashMap<String, Object>();
__sqoop$field_map.put("markdown", this.markdown);
return __sqoop$field_map;
}
public void getFieldMap0(Map<String, Object> __sqoop$field_map) {
__sqoop$field_map.put("markdown", this.markdown);
}
public void setField(String __fieldName, Object __fieldVal) {
if (!setters.containsKey(__fieldName)) {
throw new RuntimeException("No such field:"+__fieldName);
}
setters.get(__fieldName).setField(__fieldVal);
}
}
|
/*
* Copyright (C) 2016-2022 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.cii.d16b.supplementary.tools;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.helger.cii.testfiles.supplementary.tools.JAXBBindingCreator;
/**
* Utility class to create JAXB binding stuff.
*
* @see JAXBBindingCreator
* @author Philip Helger
*/
public final class MainCreateJAXBBindingD16B
{
private static final Logger LOGGER = LoggerFactory.getLogger (MainCreateJAXBBindingD16B.class);
public static void main (final String [] args)
{
LOGGER.info ("D16B");
JAXBBindingCreator.runCIIBindingCreation ("d16b", false);
LOGGER.info ("Done");
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.inlong.tubemq.manager.service.tube;
import lombok.Data;
@Data
public class IpIdRelation {
private String ip;
private Integer id;
}
|
import org.openqa.selenium.By;
public class Locators {
//Media page
public static final By LINK_MEDIA = By.cssSelector("a[href='https://romanceabroad.com/media/index']");
//Registration
public static final By BUTTON_REGISTRATION = By.xpath("//button[@id='show-registration-block']");
public static final By BUTTON_NEXT = By.xpath("//button[@data-action='next-page'][text()='Next']");
public static final By TEXT_FIELD_EMAIL = By.cssSelector("input#email");
public static final By TEXT_FIELD_PASSWORD = By.cssSelector("input#password");
public static final By TEXT_FIELD_NICKNAME = By.cssSelector("#nickname");
public static final By TEXT_FIELD_PHONE = By.cssSelector("input[name='data[phone]']");
//Search
public static final By LINK_SEARCH = By.xpath("//a[@href='https://romanceabroad.com/users/search']");
public static final By LINK_TOURS = By.xpath("//a[@href='https://romanceabroad.com/store/sweets/20-tour_to_ukraine']");
public static final By LINK_STORE = By.xpath("//a[@href='https://romanceabroad.com/store/category-sweets']");
public static final By LINK_HOW_WORKS = By.xpath("//a[@href='https://romanceabroad.com/content/view/how-it-works']");
public static final By LINK_BLOG = By.cssSelector("a[href='https://romanceabroad.com/content/view/blog']");
}
|
/*-
* #%L
* BigDataViewer-Playground
* %%
* Copyright (C) 2019 - 2021 Nicolas Chiaruttini, EPFL - Robert Haase, MPI CBG - Christian Tischer, EMBL
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package sc.fiji.bdvpg.sourceandconverter.transform;
import bdv.AbstractSpimSource;
import bdv.tools.transformation.TransformedSource;
import bdv.viewer.SourceAndConverter;
import mpicbg.spim.data.registration.ViewRegistration;
import mpicbg.spim.data.registration.ViewTransform;
import mpicbg.spim.data.registration.ViewTransformAffine;
import net.imglib2.realtransform.AffineTransform3D;
import sc.fiji.bdvpg.scijava.services.SourceAndConverterService;
import sc.fiji.bdvpg.services.SourceAndConverterServices;
import sc.fiji.bdvpg.sourceandconverter.SourceAndConverterAndTimeRange;
import java.lang.reflect.Method;
import static sc.fiji.bdvpg.scijava.services.SourceAndConverterService.SPIM_DATA_INFO;
/**
* Helper class that helps to apply an affinetransform to a {@link SourceAndConverter}
*
* Because there are many ways the affinetransform can be applied to a source depending
* on the the spimsource class and on how you want to deal with the previous already existing
* transforms
*/
public class SourceTransformHelper {
/**
*
* branch between mutateTransformedSourceAndConverter and mutateLastSpimdataTransformation depending on the source class
*
* @param affineTransform3D affine transform 3d
* @param sacTR source to transform
* @return transformed source
*/
public static SourceAndConverter mutate(AffineTransform3D affineTransform3D, SourceAndConverterAndTimeRange sacTR) {
if (sacTR.sac.getSpimSource() instanceof AbstractSpimSource) {
if (SourceAndConverterServices.getSourceAndConverterService().getMetadata(sacTR.sac,SPIM_DATA_INFO)!=null) {
return mutateLastSpimdataTransformation(affineTransform3D, sacTR);
} else {
if (sacTR.sac.getSpimSource() instanceof TransformedSource) {
return mutateTransformedSourceAndConverter(affineTransform3D,sacTR);
} else {
return createNewTransformedSourceAndConverter(affineTransform3D,sacTR);
}
}
} else if (sacTR.sac.getSpimSource() instanceof TransformedSource) {
return mutateTransformedSourceAndConverter(affineTransform3D,sacTR);
} else {
return createNewTransformedSourceAndConverter(affineTransform3D,sacTR);
}
}
/**
* branch between createNewTransformedSourceAndConverter and appendNewSpimdataTransformation depending on the source class
*
* @param affineTransform3D affine transform to append
* @param sacTR source to transform
* @return a transformed source ( same as the input for append, unless it's not possible )
*/
public static SourceAndConverter append(AffineTransform3D affineTransform3D, SourceAndConverterAndTimeRange sacTR) {
if (sacTR.sac.getSpimSource() instanceof AbstractSpimSource) {
if (SourceAndConverterServices.getSourceAndConverterService().getMetadata(sacTR.sac,SPIM_DATA_INFO)!=null) {
return appendNewSpimdataTransformation(affineTransform3D, sacTR);
} else {
return createNewTransformedSourceAndConverter(affineTransform3D,sacTR);
}
} else {
System.err.println("Cannot append a transformation to a source of class : "+sacTR.sac.getSpimSource().getClass().getSimpleName());
System.err.println("You can try 'mutate' or wrap as transformed Source");
return createNewTransformedSourceAndConverter(affineTransform3D,sacTR);
}
}
/**
*
* branch between setTransformedSourceAndConverter and setLastSpimdataTransformation depending on the source class
*
* @param affineTransform3D affine transform 3d
* @param sacTR source to transform
* @return transformed source
*/
public static SourceAndConverter set(AffineTransform3D affineTransform3D, SourceAndConverterAndTimeRange sacTR) {
if (sacTR.sac.getSpimSource() instanceof AbstractSpimSource) {
if (SourceAndConverterServices.getSourceAndConverterService().getMetadata(sacTR.sac,SPIM_DATA_INFO)!=null) {
return setLastSpimdataTransformation(affineTransform3D, sacTR);
} else {
if (sacTR.sac.getSpimSource() instanceof TransformedSource) {
return setTransformedSourceAndConverter(affineTransform3D,sacTR);
} else {
return createNewTransformedSourceAndConverter(affineTransform3D,sacTR);
}
}
} else if (sacTR.sac.getSpimSource() instanceof TransformedSource) {
return setTransformedSourceAndConverter(affineTransform3D,sacTR);
} else {
return createNewTransformedSourceAndConverter(affineTransform3D,sacTR);
}
}
/**
* Ignores registration
* @param affineTransform3D affine transform 3D
* @param sacTR the source and a time range, combined in a single class {@link SourceAndConverterAndTimeRange}
* @return the untransformed source, because the transformation has been canceled
*/
public static SourceAndConverter cancel(AffineTransform3D affineTransform3D, SourceAndConverterAndTimeRange sacTR) {
return sacTR.sac;
}
/**
* if a source has a linked spimdata, mutates the last registration to account for changes
* @param affineTransform3D affine transform
* @param sacTR source to transform
* @return the transformed source (equals to the input, the underlying spimdata object has been modified)
*/
public static SourceAndConverter mutateLastSpimdataTransformation(AffineTransform3D affineTransform3D, SourceAndConverterAndTimeRange sacTR) {
assert SourceAndConverterServices
.getSourceAndConverterService()
.containsMetadata(sacTR.sac,SPIM_DATA_INFO);
assert SourceAndConverterServices
.getSourceAndConverterService()
.getMetadata(sacTR.sac,SPIM_DATA_INFO) instanceof SourceAndConverterService.SpimDataInfo;
SourceAndConverterService.SpimDataInfo sdi = ((SourceAndConverterService.SpimDataInfo)
SourceAndConverterServices.getSourceAndConverterService()
.getMetadata(sacTR.sac,SPIM_DATA_INFO));
sacTR.getTimePoints().forEach( timePoint -> {
ViewRegistration vr = sdi.asd.getViewRegistrations().getViewRegistration(timePoint, sdi.setupId);
ViewTransform vt = vr.getTransformList().get(vr.getTransformList().size() - 1);
AffineTransform3D at3D = new AffineTransform3D();
at3D.concatenate(vt.asAffine3D());
at3D.preConcatenate(affineTransform3D);
ViewTransform newvt = new ViewTransformAffine(vt.getName(), at3D);
vr.getTransformList().remove(vt);
vr.getTransformList().add(newvt);
vr.updateModel();
try {
Method updateBdvSource = Class.forName("bdv.AbstractSpimSource").getDeclaredMethod("loadTimepoint", int.class);
updateBdvSource.setAccessible(true);
AbstractSpimSource ass = (AbstractSpimSource) sacTR.sac.getSpimSource();
updateBdvSource.invoke(ass, timePoint);
if (sacTR.sac.asVolatile() != null) {
ass = (AbstractSpimSource) sacTR.sac.asVolatile().getSpimSource();
updateBdvSource.invoke(ass, timePoint);
}
} catch (ClassCastException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
});
return sacTR.sac;
}
/**
* if a source has a linked spimdata, mutates the last registration to account for changes
*
* contrary to mutate, the previous transform is erased and not preconcatenates
* @param affineTransform3D affine transform 3d
* @param sacTR source to transform
* @return transformed source
*/
public static SourceAndConverter setLastSpimdataTransformation(AffineTransform3D affineTransform3D, SourceAndConverterAndTimeRange sacTR) {
assert SourceAndConverterServices
.getSourceAndConverterService()
.containsMetadata(sacTR.sac,SPIM_DATA_INFO);
assert SourceAndConverterServices
.getSourceAndConverterService()
.getMetadata(sacTR.sac,SPIM_DATA_INFO) instanceof SourceAndConverterService.SpimDataInfo;
SourceAndConverterService.SpimDataInfo sdi = ((SourceAndConverterService.SpimDataInfo)
SourceAndConverterServices.getSourceAndConverterService()
.getMetadata(sacTR.sac,SPIM_DATA_INFO));
sacTR.getTimePoints().forEach( timePoint -> {
ViewRegistration vr = sdi.asd.getViewRegistrations().getViewRegistration(timePoint, sdi.setupId);
ViewTransform vt = vr.getTransformList().get(vr.getTransformList().size() - 1);
ViewTransform newvt = new ViewTransformAffine(vt.getName(), affineTransform3D);
vr.getTransformList().remove(vt);
vr.getTransformList().add(newvt);
vr.updateModel();
try {
Method updateBdvSource = Class.forName("bdv.AbstractSpimSource").getDeclaredMethod("loadTimepoint", int.class);
updateBdvSource.setAccessible(true);
AbstractSpimSource ass = (AbstractSpimSource) sacTR.sac.getSpimSource();
updateBdvSource.invoke(ass, timePoint);
if (sacTR.sac.asVolatile() != null) {
ass = (AbstractSpimSource) sacTR.sac.asVolatile().getSpimSource();
updateBdvSource.invoke(ass, timePoint);
}
} catch (ClassCastException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
});
return sacTR.sac;
}
/**
* if a source has a linked spimdata, appends a new transformation in the registration model
* @param affineTransform3D affine transform 3d
* @param sacTR source to transform
* @return transformed source
*/
public static SourceAndConverter appendNewSpimdataTransformation(AffineTransform3D affineTransform3D, SourceAndConverterAndTimeRange sacTR) {
assert SourceAndConverterServices
.getSourceAndConverterService()
.containsMetadata(sacTR.sac,SPIM_DATA_INFO);
assert SourceAndConverterServices
.getSourceAndConverterService()
.getMetadata(sacTR.sac,SPIM_DATA_INFO) instanceof SourceAndConverterService.SpimDataInfo;
SourceAndConverterService.SpimDataInfo sdi = ((SourceAndConverterService.SpimDataInfo)
SourceAndConverterServices.getSourceAndConverterService()
.getMetadata(sacTR.sac,SPIM_DATA_INFO));
ViewTransform newvt = new ViewTransformAffine("Manual transform", affineTransform3D);
sacTR.getTimePoints().forEach( timePoint -> {
sdi.asd.getViewRegistrations().getViewRegistration(timePoint, sdi.setupId).preconcatenateTransform(newvt);
sdi.asd.getViewRegistrations().getViewRegistration(timePoint, sdi.setupId).updateModel();
try {
Method updateBdvSource = Class.forName("bdv.AbstractSpimSource").getDeclaredMethod("loadTimepoint", int.class);
updateBdvSource.setAccessible(true);
AbstractSpimSource ass = (AbstractSpimSource) sacTR.sac.getSpimSource();
updateBdvSource.invoke(ass, timePoint);
if (sacTR.sac.asVolatile() != null) {
ass = (AbstractSpimSource) sacTR.sac.asVolatile().getSpimSource();
updateBdvSource.invoke(ass, timePoint);
}
} catch (ClassCastException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
});
return sacTR.sac;
}
/**
* Wraps into transformed sources the registered sources
* Note : time range is ignored (using TransformedSource)
* @param affineTransform3D affine transform 3d
* @param sacTR source to transform
* @return transformed source
*/
public static SourceAndConverter createNewTransformedSourceAndConverter(AffineTransform3D affineTransform3D, SourceAndConverterAndTimeRange sacTR) {
SourceAndConverter transformedSac = new SourceAffineTransformer(sacTR.sac, affineTransform3D).getSourceOut();
return transformedSac;
}
/**
* provided a source was already a transformed source, updates the inner affineTransform3D
* Note : timerange ignored
* @param affineTransform3D affine transform 3d
* @param sacTR source to transform
* @return mutated transformed source, if possible
*/
public static SourceAndConverter mutateTransformedSourceAndConverter(AffineTransform3D affineTransform3D, SourceAndConverterAndTimeRange sacTR) {
assert sacTR.sac.getSpimSource() instanceof TransformedSource;
AffineTransform3D at3D = new AffineTransform3D();
((TransformedSource)sacTR.sac.getSpimSource()).getFixedTransform(at3D);
((TransformedSource)sacTR.sac.getSpimSource()).setFixedTransform(at3D.preConcatenate(affineTransform3D));
return sacTR.sac;
}
/**
* provided a source was already a transformed source, sets the inner affineTransform3D
* Contrary to mutateTransformedSourceAndConverter, the original transform is not preconcatenated
* Note : timerange ignored
* @param affineTransform3D affine transform 3d
* @param sacTR source to transform
* @return transformed source
*/
public static SourceAndConverter setTransformedSourceAndConverter(AffineTransform3D affineTransform3D, SourceAndConverterAndTimeRange sacTR) {
assert sacTR.sac.getSpimSource() instanceof TransformedSource;
//AffineTransform3D at3D = new AffineTransform3D();
((TransformedSource)sacTR.sac.getSpimSource()).setFixedTransform(affineTransform3D);
//.getIncrementalTransform(at3D);
//((TransformedSource)sacTR.sac.getSpimSource()).getIncrementalTransform(at3D);//.getFixedTransform(at3D);
//((TransformedSource)sacTR.sac.getSpimSource()).setFixedTransform(at3D.inverse().preConcatenate(affineTransform3D));
return sacTR.sac;
}
}
|
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2008, 2009, 2010, 2014 Etudes, Inc.
*
* Portions completed before September 1, 2008
* Copyright (c) 2007, 2008 The Regents of the University of Michigan & Foothill College, ETUDES Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.etudes.mneme.impl;
import org.etudes.mneme.api.Ent;
/**
* EntImpl implements Ent
*/
public class EntImpl implements Ent
{
protected String description = null;
protected String id = null;
protected Boolean marked = Boolean.FALSE;
protected long termId;
protected String termDescription;
/**
* Construct.
*
* @param other
* The other to copy.
*/
public EntImpl(EntImpl other)
{
set(other);
}
/**
* Construct.
*/
public EntImpl(String id, String description)
{
setId(id);
setDescription(description);
}
/**
* Construct.
*/
public EntImpl(String id, String description, long termId, String termDescription)
{
setId(id);
setDescription(description);
setTermId(termId);
setTermDescription(termDescription);
}
/**
* {@inheritDoc}
*/
public boolean equals(Object obj)
{
// two EntImpl are equals if they have the same id
if (this == obj) return true;
if ((obj == null) || (obj.getClass() != this.getClass())) return false;
if ((this.id == null) || (((EntImpl) obj).id == null)) return false;
return this.id.equals(((EntImpl) obj).id);
}
/**
* {@inheritDoc}
*/
public String getDescription()
{
return this.description;
}
/**
* {@inheritDoc}
*/
public String getId()
{
return this.id;
}
/**
* {@inheritDoc}
*/
public Boolean getMarked()
{
return this.marked;
}
/**
* {@inheritDoc}
*/
public long getTermId()
{
return termId;
}
/**
* {@inheritDoc}
*/
public String getTermDescription()
{
return termDescription;
}
/**
* {@inheritDoc}
*/
public void setDescription(String description)
{
this.description = description;
}
/**
* {@inheritDoc}
*/
public void setId(String id)
{
this.id = id;
}
/**
* {@inheritDoc}
*/
public void setMarked(Boolean marked)
{
this.marked = marked;
}
/**
* {@inheritDoc}
*/
public void setTermId(long termId)
{
this.termId = termId;
}
/**
* {@inheritDoc}
*/
public void setTermDescription(String termDescription)
{
this.termDescription = termDescription;
}
/**
* Set as a copy of the other.
*
* @param other
* The other to copy.
*/
protected void set(EntImpl other)
{
this.description = other.description;
this.id = other.id;
this.marked = other.marked;
}
}
|
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.phone;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.TypedArray;
import android.preference.EditTextPreference;
import android.provider.ContactsContract.CommonDataKinds.Phone;
import android.telephony.PhoneNumberUtils;
import android.text.TextUtils;
import android.text.method.ArrowKeyMovementMethod;
import android.text.method.DialerKeyListener;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.TextView;
public class EditPhoneNumberPreference extends EditTextPreference {
//allowed modes for this preference.
/** simple confirmation (OK / CANCEL) */
private static final int CM_CONFIRM = 0;
/** toggle [(ENABLE / CANCEL) or (DISABLE / CANCEL)], use isToggled() to see requested state.*/
private static final int CM_ACTIVATION = 1;
private int mConfirmationMode;
//String constants used in storing the value of the preference
// The preference is backed by a string that holds the encoded value, which reads:
// <VALUE_ON | VALUE_OFF><VALUE_SEPARATOR><mPhoneNumber>
// for example, an enabled preference with a number of 6502345678 would read:
// "1:6502345678"
private static final String VALUE_SEPARATOR = ":";
private static final String VALUE_OFF = "0";
private static final String VALUE_ON = "1";
//UI layout
private ImageButton mContactPickButton;
//Listeners
/** Called when focus is changed between fields */
private View.OnFocusChangeListener mDialogFocusChangeListener;
/** Called when the Dialog is closed. */
private OnDialogClosedListener mDialogOnClosedListener;
/**
* Used to indicate that we are going to request for a
* default number. for the dialog.
*/
private GetDefaultNumberListener mGetDefaultNumberListener;
//Activity values
private Activity mParentActivity;
private Intent mContactListIntent;
/** Arbitrary activity-assigned preference id value */
private int mPrefId;
//similar to toggle preference
private CharSequence mEnableText;
private CharSequence mDisableText;
private CharSequence mChangeNumberText;
private CharSequence mSummaryOn;
private CharSequence mSummaryOff;
// button that was clicked on dialog close.
private int mButtonClicked;
//relevant (parsed) value of the mText
private String mPhoneNumber;
private boolean mChecked;
/**
* Interface for the dialog closed listener, related to
* DialogPreference.onDialogClosed(), except we also pass in a buttonClicked
* value indicating which of the three possible buttons were pressed.
*/
interface OnDialogClosedListener {
void onDialogClosed(EditPhoneNumberPreference preference, int buttonClicked);
}
/**
* Interface for the default number setting listener. Handles requests for
* the default display number for the dialog.
*/
interface GetDefaultNumberListener {
/**
* Notify that we are looking for a default display value.
* @return null if there is no contribution from this interface,
* indicating that the orignal value of mPhoneNumber should be
* displayed unchanged.
*/
String onGetDefaultNumber(EditPhoneNumberPreference preference);
}
/*
* Constructors
*/
public EditPhoneNumberPreference(Context context, AttributeSet attrs) {
super(context, attrs);
setDialogLayoutResource(R.layout.pref_dialog_editphonenumber);
//create intent to bring up contact list
mContactListIntent = new Intent(Intent.ACTION_GET_CONTENT);
mContactListIntent.setType(Phone.CONTENT_ITEM_TYPE);
//get the edit phone number default settings
TypedArray a = context.obtainStyledAttributes(attrs,
R.styleable.EditPhoneNumberPreference, 0, R.style.EditPhoneNumberPreference);
mEnableText = a.getString(R.styleable.EditPhoneNumberPreference_enableButtonText);
mDisableText = a.getString(R.styleable.EditPhoneNumberPreference_disableButtonText);
mChangeNumberText = a.getString(R.styleable.EditPhoneNumberPreference_changeNumButtonText);
mConfirmationMode = a.getInt(R.styleable.EditPhoneNumberPreference_confirmMode, 0);
a.recycle();
//get the summary settings, use CheckBoxPreference as the standard.
a = context.obtainStyledAttributes(attrs, android.R.styleable.CheckBoxPreference, 0, 0);
mSummaryOn = a.getString(android.R.styleable.CheckBoxPreference_summaryOn);
mSummaryOff = a.getString(android.R.styleable.CheckBoxPreference_summaryOff);
a.recycle();
}
public EditPhoneNumberPreference(Context context) {
this(context, null);
}
/*
* Methods called on UI bindings
*/
@Override
//called when we're binding the view to the preference.
protected void onBindView(View view) {
super.onBindView(view);
// Sync the summary view
TextView summaryView = (TextView) view.findViewById(android.R.id.summary);
if (summaryView != null) {
CharSequence sum;
int vis;
//set summary depending upon mode
if (mConfirmationMode == CM_ACTIVATION) {
if (mChecked) {
sum = (mSummaryOn == null) ? getSummary() : mSummaryOn;
} else {
sum = (mSummaryOff == null) ? getSummary() : mSummaryOff;
}
} else {
sum = getSummary();
}
if (sum != null) {
summaryView.setText(sum);
vis = View.VISIBLE;
} else {
vis = View.GONE;
}
if (vis != summaryView.getVisibility()) {
summaryView.setVisibility(vis);
}
}
}
//called when we're binding the dialog to the preference's view.
@Override
protected void onBindDialogView(View view) {
// default the button clicked to be the cancel button.
mButtonClicked = DialogInterface.BUTTON_NEGATIVE;
super.onBindDialogView(view);
//get the edittext component within the number field
EditText editText = getEditText();
//get the contact pick button within the number field
mContactPickButton = (ImageButton) view.findViewById(R.id.select_contact);
//setup number entry
if (editText != null) {
// see if there is a means to get a default number,
// and set it accordingly.
if (mGetDefaultNumberListener != null) {
String defaultNumber = mGetDefaultNumberListener.onGetDefaultNumber(this);
if (defaultNumber != null) {
mPhoneNumber = defaultNumber;
}
}
editText.setText(mPhoneNumber);
editText.setMovementMethod(ArrowKeyMovementMethod.getInstance());
editText.setKeyListener(DialerKeyListener.getInstance());
editText.setOnFocusChangeListener(mDialogFocusChangeListener);
}
//set contact picker
if (mContactPickButton != null) {
mContactPickButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
if (mParentActivity != null) {
mParentActivity.startActivityForResult(mContactListIntent, mPrefId);
}
}
});
}
}
/**
* Overriding EditTextPreference's onAddEditTextToDialogView.
*
* This method attaches the EditText to the container specific to this
* preference's dialog layout.
*/
@Override
protected void onAddEditTextToDialogView(View dialogView, EditText editText) {
// look for the container object
ViewGroup container = (ViewGroup) dialogView
.findViewById(R.id.edit_container);
// add the edittext to the container.
if (container != null) {
container.addView(editText, ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
}
}
//control the appearance of the dialog depending upon the mode.
@Override
protected void onPrepareDialogBuilder(AlertDialog.Builder builder) {
// modified so that we just worry about the buttons being
// displayed, since there is no need to hide the edittext
// field anymore.
if (mConfirmationMode == CM_ACTIVATION) {
if (mChecked) {
builder.setPositiveButton(mChangeNumberText, this);
builder.setNeutralButton(mDisableText, this);
} else {
builder.setPositiveButton(null, null);
builder.setNeutralButton(mEnableText, this);
}
}
// set the call icon on the title.
builder.setIcon(R.drawable.ic_dialog_call);
}
/*
* Listeners and other state setting methods
*/
//set the on focus change listener to be assigned to the Dialog's edittext field.
public void setDialogOnFocusChangeListener(View.OnFocusChangeListener l) {
mDialogFocusChangeListener = l;
}
//set the listener to be called wht the dialog is closed.
public void setDialogOnClosedListener(OnDialogClosedListener l) {
mDialogOnClosedListener = l;
}
//set the link back to the parent activity, so that we may run the contact picker.
public void setParentActivity(Activity parent, int identifier) {
mParentActivity = parent;
mPrefId = identifier;
mGetDefaultNumberListener = null;
}
//set the link back to the parent activity, so that we may run the contact picker.
//also set the default number listener.
public void setParentActivity(Activity parent, int identifier, GetDefaultNumberListener l) {
mParentActivity = parent;
mPrefId = identifier;
mGetDefaultNumberListener = l;
}
/*
* Notification handlers
*/
//Notify the preference that the pick activity is complete.
public void onPickActivityResult(String pickedValue) {
EditText editText = getEditText();
if (editText != null) {
editText.setText(pickedValue);
}
}
//called when the dialog is clicked.
@Override
public void onClick(DialogInterface dialog, int which) {
// The neutral button (button3) is always the toggle.
if ((mConfirmationMode == CM_ACTIVATION) && (which == DialogInterface.BUTTON_NEUTRAL)) {
//flip the toggle if we are in the correct mode.
setToggled(!isToggled());
}
// record the button that was clicked.
mButtonClicked = which;
super.onClick(dialog, which);
}
@Override
//When the dialog is closed, perform the relevant actions, including setting
// phone numbers and calling the close action listener.
protected void onDialogClosed(boolean positiveResult) {
// A positive result is technically either button1 or button3.
if ((mButtonClicked == DialogInterface.BUTTON_POSITIVE) ||
(mButtonClicked == DialogInterface.BUTTON_NEUTRAL)){
setPhoneNumber(getEditText().getText().toString());
super.onDialogClosed(positiveResult);
setText(getStringValue());
} else {
super.onDialogClosed(positiveResult);
}
// send the clicked button over to the listener.
if (mDialogOnClosedListener != null) {
mDialogOnClosedListener.onDialogClosed(this, mButtonClicked);
}
}
/*
* Toggle handling code.
*/
//return the toggle value.
public boolean isToggled() {
return mChecked;
}
//set the toggle value.
// return the current preference to allow for chaining preferences.
public EditPhoneNumberPreference setToggled(boolean checked) {
mChecked = checked;
setText(getStringValue());
notifyChanged();
return this;
}
/**
* Phone number handling code
*/
public String getPhoneNumber() {
// return the phone number, after it has been stripped of all
// irrelevant text.
return PhoneNumberUtils.stripSeparators(mPhoneNumber);
}
/** The phone number including any formatting characters */
protected String getRawPhoneNumber() {
return mPhoneNumber;
}
//set the phone number value.
// return the current preference to allow for chaining preferences.
public EditPhoneNumberPreference setPhoneNumber(String number) {
mPhoneNumber = number;
setText(getStringValue());
notifyChanged();
return this;
}
/*
* Other code relevant to preference framework
*/
//when setting default / initial values, make sure we're setting things correctly.
@Override
protected void onSetInitialValue(boolean restoreValue, Object defaultValue) {
setValueFromString(restoreValue ? getPersistedString(getStringValue())
: (String) defaultValue);
}
/**
* Decides how to disable dependents.
*/
@Override
public boolean shouldDisableDependents() {
// There is really only one case we care about, but for consistency
// we fill out the dependency tree for all of the cases. If this
// is in activation mode (CF), we look for the encoded toggle value
// in the string. If this in confirm mode (VM), then we just
// examine the number field.
// Note: The toggle value is stored in the string in an encoded
// manner (refer to setValueFromString and getStringValue below).
boolean shouldDisable = false;
if ((mConfirmationMode == CM_ACTIVATION) && (mEncodedText != null)) {
String[] inValues = mEncodedText.split(":", 2);
shouldDisable = inValues[0].equals(VALUE_ON);
} else {
shouldDisable = (TextUtils.isEmpty(mPhoneNumber) && (mConfirmationMode == CM_CONFIRM));
}
return shouldDisable;
}
/**
* Override persistString so that we can get a hold of the EditTextPreference's
* text field.
*/
private String mEncodedText = null;
@Override
protected boolean persistString(String value) {
mEncodedText = value;
return super.persistString(value);
}
/*
* Summary On handling code
*/
//set the Summary for the on state (relevant only in CM_ACTIVATION mode)
public EditPhoneNumberPreference setSummaryOn(CharSequence summary) {
mSummaryOn = summary;
if (isToggled()) {
notifyChanged();
}
return this;
}
//set the Summary for the on state, given a string resource id
// (relevant only in CM_ACTIVATION mode)
public EditPhoneNumberPreference setSummaryOn(int summaryResId) {
return setSummaryOn(getContext().getString(summaryResId));
}
//get the summary string for the on state
public CharSequence getSummaryOn() {
return mSummaryOn;
}
/*
* Summary Off handling code
*/
//set the Summary for the off state (relevant only in CM_ACTIVATION mode)
public EditPhoneNumberPreference setSummaryOff(CharSequence summary) {
mSummaryOff = summary;
if (!isToggled()) {
notifyChanged();
}
return this;
}
//set the Summary for the off state, given a string resource id
// (relevant only in CM_ACTIVATION mode)
public EditPhoneNumberPreference setSummaryOff(int summaryResId) {
return setSummaryOff(getContext().getString(summaryResId));
}
//get the summary string for the off state
public CharSequence getSummaryOff() {
return mSummaryOff;
}
/*
* Methods to get and set from encoded strings.
*/
//set the values given an encoded string.
protected void setValueFromString(String value) {
String[] inValues = value.split(":", 2);
setToggled(inValues[0].equals(VALUE_ON));
setPhoneNumber(inValues[1]);
}
//retrieve the state of this preference in the form of an encoded string
protected String getStringValue() {
return ((isToggled() ? VALUE_ON : VALUE_OFF) + VALUE_SEPARATOR + getPhoneNumber());
}
/**
* Externally visible method to bring up the dialog.
*
* Generally used when we are navigating the user to this preference.
*/
public void showPhoneNumberDialog() {
showDialog(null);
}
}
|
package com.nexuslink.alphrye.helper;
import android.content.Context;
import android.view.View;
import android.view.inputmethod.InputMethodManager;
/**
* author : Android 轮子哥
* github : https://github.com/getActivity/AndroidProject
* time : 2018/10/18
* desc : 软键盘工具类
*/
public final class KeyboardUtils {
/**
* 显示软键盘
*
* @param view 依附的View
*/
public static void showKeyboard(View view) {
if (view == null) return;
InputMethodManager imm = (InputMethodManager) view.getContext()
.getSystemService(Context.INPUT_METHOD_SERVICE);
if (imm != null) {
//view.requestFocus();
imm.showSoftInput(view, 0);
}
}
/**
* 隐藏软键盘
*
* @param view 依附的View
*/
public static void hideKeyboard(View view) {
if (view == null) return;
InputMethodManager imm = (InputMethodManager) view.getContext()
.getSystemService(Context.INPUT_METHOD_SERVICE);
if (imm != null) {
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
}
}
/**
* 切换软键盘
*
* @param view 依附的View
*/
public static void toggleSoftInput(View view) {
if (view == null) return;
InputMethodManager imm = (InputMethodManager) view.getContext()
.getSystemService(Context.INPUT_METHOD_SERVICE);
if (imm != null) {
imm.toggleSoftInput(0, 0);
}
}
}
|
package me.kristoffer.multicastmessenger;
import java.math.BigInteger;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Scanner;
public class SeedUtil {
// Testing method
public static void main(String[] args) {
SeedUtil seed = new SeedUtil();
while(true) {
seed.open();
}
}
// Testing method
public void open() {
Scanner scanner = new Scanner(System.in);
String inputSeed = "";
while (scanner.hasNextLine()) {
inputSeed = scanner.nextLine();
break;
}
getHashedAddress(inputSeed);
System.out.println(getHashedAddress(inputSeed).getHostAddress());
}
public static InetAddress getHashedAddress(String seed) {
int seeds = new BigInteger(seed.getBytes()).intValue();
if (seeds < 0) {
seeds = seeds * -1; // Ensure above 0
}
byte a = (byte) 224;
byte b = (byte) (seeds % 127);
byte c = (byte) (seeds % (b + 33));
byte d = (byte) (seeds % (b + 71));
try {
return InetAddress.getByAddress(new byte[] {a, b, c ,d});
} catch (UnknownHostException e) {
e.printStackTrace();
}
return null;
}
}
|
/*
* Copyright 2015-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.autoscaling.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/autoscaling-2011-01-01/DescribeScalingProcessTypes"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeScalingProcessTypesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeScalingProcessTypesRequest == false)
return false;
DescribeScalingProcessTypesRequest other = (DescribeScalingProcessTypesRequest) obj;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
return hashCode;
}
@Override
public DescribeScalingProcessTypesRequest clone() {
return (DescribeScalingProcessTypesRequest) super.clone();
}
}
|
package com.softroniiks.digid.model;
import androidx.room.Embedded;
import androidx.room.Relation;
import java.util.List;
//modeling 1:many relationship user: id's
public class UserAndDriverLicense {
@Embedded
public User user;
@Relation(
parentColumn = "userId",
entityColumn = "ownerId"
)
public List<DriverLicense> driverLicense;
}
|
/*
* Copyright 2016-2018 Axioma srl.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.holonplatform.datastore.mongo.sync.test.data;
public class TestNestedBean3 {
private String v1;
private Double v2;
public String getV1() {
return v1;
}
public void setV1(String v1) {
this.v1 = v1;
}
public Double getV2() {
return v2;
}
public void setV2(Double v2) {
this.v2 = v2;
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.apex.malhar.sql.sample;
import org.apache.apex.malhar.sql.SQLExecEnvironment;
import org.apache.apex.malhar.sql.table.CSVMessageFormat;
import org.apache.apex.malhar.sql.table.FileEndpoint;
import org.apache.apex.malhar.sql.table.KafkaEndpoint;
import org.apache.hadoop.conf.Configuration;
import com.datatorrent.api.DAG;
import com.datatorrent.api.StreamingApplication;
import com.datatorrent.api.annotation.ApplicationAnnotation;
@ApplicationAnnotation(name = "PureStyleSQLApplication")
/**
* @since 3.6.0
*/
public class PureStyleSQLApplication implements StreamingApplication
{
@Override
public void populateDAG(DAG dag, Configuration conf)
{
// Source definition
String schemaInName = conf.get("schemaInName");
String schemaInDef = conf.get("schemaInDef");
String broker = conf.get("broker");
String sourceTopic = conf.get("topic");
// Destination definition
String schemaOutName = conf.get("schemaOutName");
String schemaOutDef = conf.get("schemaOutDef");
String outputFolder = conf.get("outputFolder");
String outFilename = conf.get("destFileName");
// SQL statement
String sql = conf.get("sql");
SQLExecEnvironment.getEnvironment()
.registerTable(schemaInName, new KafkaEndpoint(broker, sourceTopic,
new CSVMessageFormat(schemaInDef)))
.registerTable(schemaOutName, new FileEndpoint(outputFolder, outFilename,
new CSVMessageFormat(schemaOutDef)))
.registerFunction("APEXCONCAT", this.getClass(), "apex_concat_str")
.executeSQL(dag, sql);
}
public static String apex_concat_str(String s1, String s2)
{
return s1 + s2;
}
}
|
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.mysql.v2017_12_01;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.annotation.JsonSubTypes;
/**
* The properties used to create a new server.
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "createMode")
@JsonTypeName("ServerPropertiesForCreate")
@JsonSubTypes({
@JsonSubTypes.Type(name = "Default", value = ServerPropertiesForDefaultCreate.class),
@JsonSubTypes.Type(name = "PointInTimeRestore", value = ServerPropertiesForRestore.class),
@JsonSubTypes.Type(name = "GeoRestore", value = ServerPropertiesForGeoRestore.class),
@JsonSubTypes.Type(name = "Replica", value = ServerPropertiesForReplica.class)
})
public class ServerPropertiesForCreate {
/**
* Server version. Possible values include: '5.6', '5.7'.
*/
@JsonProperty(value = "version")
private ServerVersion version;
/**
* Enable ssl enforcement or not when connect to server. Possible values
* include: 'Enabled', 'Disabled'.
*/
@JsonProperty(value = "sslEnforcement")
private SslEnforcementEnum sslEnforcement;
/**
* Storage profile of a server.
*/
@JsonProperty(value = "storageProfile")
private StorageProfile storageProfile;
/**
* Get server version. Possible values include: '5.6', '5.7'.
*
* @return the version value
*/
public ServerVersion version() {
return this.version;
}
/**
* Set server version. Possible values include: '5.6', '5.7'.
*
* @param version the version value to set
* @return the ServerPropertiesForCreate object itself.
*/
public ServerPropertiesForCreate withVersion(ServerVersion version) {
this.version = version;
return this;
}
/**
* Get enable ssl enforcement or not when connect to server. Possible values include: 'Enabled', 'Disabled'.
*
* @return the sslEnforcement value
*/
public SslEnforcementEnum sslEnforcement() {
return this.sslEnforcement;
}
/**
* Set enable ssl enforcement or not when connect to server. Possible values include: 'Enabled', 'Disabled'.
*
* @param sslEnforcement the sslEnforcement value to set
* @return the ServerPropertiesForCreate object itself.
*/
public ServerPropertiesForCreate withSslEnforcement(SslEnforcementEnum sslEnforcement) {
this.sslEnforcement = sslEnforcement;
return this;
}
/**
* Get storage profile of a server.
*
* @return the storageProfile value
*/
public StorageProfile storageProfile() {
return this.storageProfile;
}
/**
* Set storage profile of a server.
*
* @param storageProfile the storageProfile value to set
* @return the ServerPropertiesForCreate object itself.
*/
public ServerPropertiesForCreate withStorageProfile(StorageProfile storageProfile) {
this.storageProfile = storageProfile;
return this;
}
}
|
package aop.pattern.proxy.dynamic;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
//动态代理实现
public class DynamicProxy implements InvocationHandler {
// 这个就是我们要代理的真实对象
private Object obj;
// 构造方法,给我们要代理的真实对象赋初值
public DynamicProxy(Object object) {
this.obj = object;
}
//相比静态代理,动态代理减只需要实现一个接口即可完成,而静态代理每次都要实现新加的方法以及维护被代理方法
@Override
public Object invoke(Object object, Method method, Object[] args)
throws Throwable {
//********************方法前增强***************************
// 反射调用目标方法
method.invoke(obj, args);
return null;
//********************方法后增强***************************
}
}
|
package com.co.wedding.bo;
import javax.annotation.Resource;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import com.co.wedding.info.test.TestService;
/**
* BO Controller
*
* @author Chopin
* @date 2019-02-03
* @since 1.0
*/
@Controller
@RequestMapping(value=BoConstant.BO_MAIN)
public class BoController {
@Resource
TestService service;
/**
* 대시보드
* @param model
*/
@RequestMapping(value="/NR_index.do")
public void index() {}
@RequestMapping(value="/")
public String index2() {
return "redirect:NR_index.do";
}
}
|
/*
* Copyright (c) 1997 - 2016
* Actelion Pharmaceuticals Ltd.
* Gewerbestrasse 16
* CH-4123 Allschwil, Switzerland
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the the copyright holder nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
package com.actelion.research.chem.properties.fractaldimension;
import com.actelion.research.util.Formatter;
/**
* ResultFracDimCalc
* <p>Modest v. Korff</p>
* <p>
* Created by korffmo1 on 28.08.18.
*/
public class ResultFracDimCalc extends InputObjectFracDimCalc {
public static final String TAG_SMILES = "SMILES";
public static final String TAG_ID = "MoleculeId";
public static final String TAG_SUM_UNIQUE_FRAGMENTS_CALC = "SumUniqueFragmentsCalculated";
public static final String TAG_BONDS_AT_MAX_FRAGS_CALC = "BondNumberAtMaxNumFragCalculated";
public static final String TAG_MAX_NUM_UNIQUE_FRAGMENTS_CALC = "MaxNumUniqueFragmentsCalculated";
public static final String TAG_FRACTAL_DIM = "FractalDimension";
public static final String TAG_MESSAGE = "Message";
public static final String [] ARR_TAGS = {
TAG_SMILES,
TAG_ID,
TAG_SUM_UNIQUE_FRAGMENTS_CALC,
TAG_BONDS_AT_MAX_FRAGS_CALC,
TAG_MAX_NUM_UNIQUE_FRAGMENTS_CALC,
TAG_FRACTAL_DIM,
TAG_MESSAGE};
public static final String SEP = "\t";
int idMolecule;
double fractalDimension;
int bondsAtMaxFrag;
int maxNumUniqueFrags;
int sumUniqueFrags;
String message;
public ResultFracDimCalc(InputObjectFracDimCalc inputObjectFracDimCalc) {
super(inputObjectFracDimCalc);
idMolecule = -1;
fractalDimension = Double.NaN;
bondsAtMaxFrag = -1;
maxNumUniqueFrags = -1;
sumUniqueFrags = -1;
message = "";
}
public double getFractalDimension() {
return fractalDimension;
}
public int getBondsAtMaxFrag() {
return bondsAtMaxFrag;
}
public int getMaxNumUniqueFrags() {
return maxNumUniqueFrags;
}
public int getSumUniqueFrags() {
return sumUniqueFrags;
}
public String getMessage() {
return message;
}
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getSmiles());
sb.append(SEP);
sb.append(getId());
sb.append(SEP);
sb.append(getSumUniqueFrags());
sb.append(SEP);
sb.append(getBondsAtMaxFrag());
sb.append(SEP);
sb.append(getMaxNumUniqueFrags());
sb.append(SEP);
sb.append(Formatter.format3(getFractalDimension()));
sb.append(SEP);
sb.append(getMessage());
return sb.toString();
}
public static String toStringHeader() {
StringBuilder sb = new StringBuilder();
sb.append(TAG_SMILES);
sb.append(SEP);
sb.append(TAG_ID);
sb.append(SEP);
sb.append(TAG_SUM_UNIQUE_FRAGMENTS_CALC);
sb.append(SEP);
sb.append(TAG_BONDS_AT_MAX_FRAGS_CALC);
sb.append(SEP);
sb.append(TAG_MAX_NUM_UNIQUE_FRAGMENTS_CALC);
sb.append(SEP);
sb.append(TAG_FRACTAL_DIM);
sb.append(SEP);
sb.append(TAG_MESSAGE);
return sb.toString();
}
}
|
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
public class TwitterTriangleCount {
private static final String INTERMEDIATE_OUTPUT_PATH = "intermediate_output";
public static class UndirectedGraphMapper extends Mapper<LongWritable, Text, LongWritable, LongWritable> {
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
LongWritable mapKey = new LongWritable();
LongWritable mapValue = new LongWritable();
StringTokenizer stringTokenizer = new StringTokenizer(value.toString());
String userOne = stringTokenizer.nextToken();
String userTwo = stringTokenizer.nextToken();
long userOneID = Long.parseLong(userOne);
long userTwoID = Long.parseLong(userTwo);
if (userOneID < userTwoID) {
// emit(userOneID, userTwoID)
emited.add(toBeEmited);
mapKey.set(userOneID);
mapValue.set(userTwoID);
context.write(mapKey, mapValue);
} else if (userOneID > userTwoID) {
// emit(userTwoID, userOneID)
emited.add(toBeEmited);
mapKey.set(userTwoID);
mapValue.set(userOneID);
context.write(mapKey, mapValue);
}
}
}
public static class UndirectedGraphReducer extends Reducer<LongWritable, LongWritable, Text, Text> {
public void reduce(LongWritable key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
Set<LongWritable> uniqueFollowers = new HashSet<LongWritable>();
for (LongWritable value : values) {
uniqueFollowers.add(value);
}
for (LongWritable uniqueFollower : uniqueFollowers) {
context.write(new Text(key), new Text(uniqueFollower));
}
}
}
public static class FollowersMapper extends Mapper<Text, Text, LongWritable, LongWritable> {
public void map(LongWritable key, LongWritable value, Context context) throws IOException, InterruptedException {
StringTokenizer keyTokenizer = new StringTokenizer(key.toString());
StringTokenizer valueTokenizer = new StringTokenizer(value.toString());
long outKey = Long.parseLong(keyTokenizer.nextToken());
long outValue = Long.parseLong(valueTokenizer.nextToken());
context.write(new LongWritable(outKey), new LongWritable(outValue));
}
}
public static class FollowersReducer extends Reducer<LongWritable, LongWritable, Text, Text> {
public void reduce(LongWritable key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
LongWritable[] followerTuple = new LongWritable[2];
List<LongWritable> followersList = new ArrayList<LongWritable>();
for (LongWritable followerOne : values) {
for (LongWritable followerTwo: values) {
if (followerOne.compareTo(followerTwo) < 0) {
String outValue = followerOne.get().toString() + "#" + followerTwo.get().toString();
context.write(new Text(key), new Text(outValue));
}
}
}
}
}
public static class TriangleMapper extends Mapper<Text, Text, Text, Text> {
public void map(Text key, Text value, Context context) throws IOException, InterruptedException {
String[] splittedValue = value.split("#");
if (splittedValue.length == 1) {
// original data
String newKey = key.toString() + "#" + value.toString();
context.write(new Text(newKey), "$");
} else if (splittedValue.length == 2) {
// two-path data
context.write(value, key);
}
}
}
public static class TriangleReducer extends Reducer<Text, Text, Text, LongWritable> {
static long totalTriangles = 0;
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
boolean connected = false;
long triangles = 0;
for (Text value : values) {
String valueString = value.toString();
if (valueString.equals("$")) {
connected = true;
} else {
triangles += 1;
}
}
if (connected) {
totalTriangles += triangles;
}
String resultKey = "total_triangles";
context.write(new Text(resultKey), new LongWritable(totalTriangles));
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "GraphUndirecterMapReduceJob");
job.setJarByClass(TwitterTriangleCount.class);
job.setMapperClass(UndirectedGraphMapper.class);
job.setReducerClass(UndirectedGraphReducer.class);
job.setMapOutputKeyClass(LongWritable.class);
job.setMapOutputValueClass(LongWritable.class);
job.setNumReduceTasks(0);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
job.waitForCompletion(true);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(INTERMEDIATE_OUTPUT_PATH));
Configuration conf2 = new Configuration();
Job job2 = Job.getInstance(conf2, "TwoPathMapReduceJob");
job2.setJarByClass(TwitterTriangleCount.class);
job2.setMapperClass(FollowersMapper.class);
job2.setReducerClass(FollowersReducer.class);
job2.setMapOutputKeyClass(LongWritable.class);
job2.setMapOutputValueClass(LongWritable.class);
job2.setOutputKeyClass(Text.class);
job2.setOutputValueClass(Text.class);
job2.waitForCompletion(true);
FileInputFormat.addInputPath(job2, new Path(INTERMEDIATE_OUTPUT_PATH));
FileOutputFormat.setOutputPath(job2, new Path(INTERMEDIATE_OUTPUT_PATH));
Configuration conf3 = new Configuration();
Job job3 = Job.getInstance(conf3, "TriangleCounterJob");
job3.setJarByClass(TwitterTriangleCount.class);
job3.setMapperClass(TriangleMapper.class);
job3.setReducerClass(TriangleReducer.class);
job3.setMapOutputKeyClass(Text.class);
job3.setMapOutputValueClass(Text.class);
job3.setOutputKeyClass(Text.class);
job3.setOutputValueClass(LongWritable.class);
FileInputFormat.addInputPath(job3, new Path(INTERMEDIATE_OUTPUT_PATH));
FileInputFormat.setOutputPath(job3, new Path(args[1]));
System.exit(job3.waitForCompletion(true) ? 0 : 1);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.encryption;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.security.AlgorithmParameterGenerator;
import java.security.AlgorithmParameters;
import java.security.GeneralSecurityException;
import java.security.InvalidKeyException;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.Provider;
import java.security.SecureRandom;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.util.Collection;
import java.util.Iterator;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.KeyGenerator;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import org.apache.pdfbox.cos.COSArray;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.cos.COSString;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.bouncycastle.asn1.ASN1Encoding;
import org.bouncycastle.asn1.ASN1InputStream;
import org.bouncycastle.asn1.ASN1ObjectIdentifier;
import org.bouncycastle.asn1.ASN1Primitive;
import org.bouncycastle.asn1.ASN1Set;
import org.bouncycastle.asn1.DEROctetString;
import org.bouncycastle.asn1.DERSet;
import org.bouncycastle.asn1.cms.ContentInfo;
import org.bouncycastle.asn1.cms.EncryptedContentInfo;
import org.bouncycastle.asn1.cms.EnvelopedData;
import org.bouncycastle.asn1.cms.IssuerAndSerialNumber;
import org.bouncycastle.asn1.cms.KeyTransRecipientInfo;
import org.bouncycastle.asn1.cms.RecipientIdentifier;
import org.bouncycastle.asn1.cms.RecipientInfo;
import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
import org.bouncycastle.asn1.x509.TBSCertificate;
import org.bouncycastle.cert.X509CertificateHolder;
import org.bouncycastle.cms.CMSEnvelopedData;
import org.bouncycastle.cms.CMSException;
import org.bouncycastle.cms.KeyTransRecipientId;
import org.bouncycastle.cms.RecipientId;
import org.bouncycastle.cms.RecipientInformation;
import org.bouncycastle.cms.jcajce.JceKeyTransEnvelopedRecipient;
import org.bouncycastle.util.Arrays;
/**
* This class implements the public key security handler described in the PDF specification.
*
* @see PublicKeyProtectionPolicy to see how to protect document with this security handler.
* @author Benoit Guillon
*/
public final class PublicKeySecurityHandler extends SecurityHandler<PublicKeyProtectionPolicy>
{
/** The filter name. */
public static final String FILTER = "Adobe.PubSec";
private static final String SUBFILTER4 = "adbe.pkcs7.s4";
private static final String SUBFILTER5 = "adbe.pkcs7.s5";
/**
* Constructor.
*/
public PublicKeySecurityHandler()
{
}
/**
* Constructor used for encryption.
*
* @param publicKeyProtectionPolicy The protection policy.
*/
public PublicKeySecurityHandler(PublicKeyProtectionPolicy publicKeyProtectionPolicy)
{
super(publicKeyProtectionPolicy);
}
/**
* Prepares everything to decrypt the document.
*
* @param encryption encryption dictionary, can be retrieved via
* {@link PDDocument#getEncryption()}
* @param documentIDArray document id which is returned via
* {@link org.apache.pdfbox.cos.COSDocument#getDocumentID()} (not used by
* this handler)
* @param decryptionMaterial Information used to decrypt the document.
*
* @throws IOException If there is an error accessing data. If verbose mode
* is enabled, the exception message will provide more details why the
* match wasn't successful.
*/
@Override
public void prepareForDecryption(PDEncryption encryption, COSArray documentIDArray,
DecryptionMaterial decryptionMaterial)
throws IOException
{
if (!(decryptionMaterial instanceof PublicKeyDecryptionMaterial))
{
throw new IOException(
"Provided decryption material is not compatible with the document - "
+ "did you pass a null keyStore?");
}
PDCryptFilterDictionary defaultCryptFilterDictionary = encryption.getDefaultCryptFilterDictionary();
if (defaultCryptFilterDictionary != null && defaultCryptFilterDictionary.getLength() != 0)
{
setKeyLength(defaultCryptFilterDictionary.getLength());
setDecryptMetadata(defaultCryptFilterDictionary.isEncryptMetaData());
}
else if (encryption.getLength() != 0)
{
setKeyLength(encryption.getLength());
setDecryptMetadata(encryption.isEncryptMetaData());
}
PublicKeyDecryptionMaterial material = (PublicKeyDecryptionMaterial) decryptionMaterial;
try
{
boolean foundRecipient = false;
X509Certificate certificate = material.getCertificate();
X509CertificateHolder materialCert = null;
if (certificate != null)
{
materialCert = new X509CertificateHolder(certificate.getEncoded());
}
// the decrypted content of the enveloped data that match
// the certificate in the decryption material provided
byte[] envelopedData = null;
// the bytes of each recipient in the recipients array
COSArray array = encryption.getCOSObject().getCOSArray(COSName.RECIPIENTS);
if (array == null && defaultCryptFilterDictionary != null)
{
array = defaultCryptFilterDictionary.getCOSObject().getCOSArray(COSName.RECIPIENTS);
}
if (array == null)
{
throw new IOException("/Recipients entry is missing in encryption dictionary");
}
byte[][] recipientFieldsBytes = new byte[array.size()][];
//TODO encryption.getRecipientsLength() and getRecipientStringAt() should be deprecated
int recipientFieldsLength = 0;
StringBuilder extraInfo = new StringBuilder();
for (int i = 0; i < array.size(); i++)
{
COSString recipientFieldString = (COSString) array.getObject(i);
byte[] recipientBytes = recipientFieldString.getBytes();
CMSEnvelopedData data = new CMSEnvelopedData(recipientBytes);
Collection<RecipientInformation> recipCertificatesIt = data.getRecipientInfos()
.getRecipients();
int j = 0;
for (RecipientInformation ri : recipCertificatesIt)
{
// Impl: if a matching certificate was previously found it is an error,
// here we just don't care about it
RecipientId rid = ri.getRID();
if (!foundRecipient && rid.match(materialCert))
{
foundRecipient = true;
PrivateKey privateKey = (PrivateKey) material.getPrivateKey();
// might need to call setContentProvider() if we use PKI token, see
// http://bouncy-castle.1462172.n4.nabble.com/CMSException-exception-unwrapping-key-key-invalid-unknown-key-type-passed-to-RSA-td4658109.html
envelopedData = ri.getContent(new JceKeyTransEnvelopedRecipient(privateKey));
break;
}
j++;
if (certificate != null)
{
extraInfo.append('\n');
extraInfo.append(j);
extraInfo.append(": ");
if (rid instanceof KeyTransRecipientId)
{
appendCertInfo(extraInfo, (KeyTransRecipientId) rid, certificate, materialCert);
}
}
}
recipientFieldsBytes[i] = recipientBytes;
recipientFieldsLength += recipientBytes.length;
}
if (!foundRecipient || envelopedData == null)
{
throw new IOException("The certificate matches none of " + array.size()
+ " recipient entries" + extraInfo.toString());
}
if (envelopedData.length != 24)
{
throw new IOException("The enveloped data does not contain 24 bytes");
}
// now envelopedData contains:
// - the 20 bytes seed
// - the 4 bytes of permission for the current user
byte[] accessBytes = new byte[4];
System.arraycopy(envelopedData, 20, accessBytes, 0, 4);
AccessPermission currentAccessPermission = new AccessPermission(accessBytes);
currentAccessPermission.setReadOnly();
setCurrentAccessPermission(currentAccessPermission);
// what we will put in the SHA1 = the seed + each byte contained in the recipients array
byte[] sha1Input = new byte[recipientFieldsLength + 20];
// put the seed in the sha1 input
System.arraycopy(envelopedData, 0, sha1Input, 0, 20);
// put each bytes of the recipients array in the sha1 input
int sha1InputOffset = 20;
for (byte[] recipientFieldsByte : recipientFieldsBytes)
{
System.arraycopy(recipientFieldsByte, 0, sha1Input, sha1InputOffset,
recipientFieldsByte.length);
sha1InputOffset += recipientFieldsByte.length;
}
byte[] mdResult;
if (encryption.getVersion() == 4 || encryption.getVersion() == 5)
{
if (!isDecryptMetadata())
{
// "4 bytes with the value 0xFF if the key being generated is intended for use in
// document-level encryption and the document metadata is being left as plaintext"
sha1Input = Arrays.copyOf(sha1Input, sha1Input.length + 4);
System.arraycopy(new byte[]{ (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff}, 0, sha1Input, sha1Input.length - 4, 4);
}
if (encryption.getVersion() == 4)
{
mdResult = MessageDigests.getSHA1().digest(sha1Input);
}
else
{
mdResult = MessageDigests.getSHA256().digest(sha1Input);
}
// detect whether AES encryption is used. This assumes that the encryption algo is
// stored in the PDCryptFilterDictionary
// However, crypt filters are used only when V is 4 or 5.
if (defaultCryptFilterDictionary != null)
{
COSName cryptFilterMethod = defaultCryptFilterDictionary.getCryptFilterMethod();
setAES(COSName.AESV2.equals(cryptFilterMethod) ||
COSName.AESV3.equals(cryptFilterMethod));
}
}
else
{
mdResult = MessageDigests.getSHA1().digest(sha1Input);
}
// we have the encryption key ...
setEncryptionKey(new byte[getKeyLength() / 8]);
System.arraycopy(mdResult, 0, getEncryptionKey(), 0, getKeyLength() / 8);
}
catch (CMSException | KeyStoreException | CertificateEncodingException e)
{
throw new IOException(e);
}
}
private void appendCertInfo(StringBuilder extraInfo, KeyTransRecipientId ktRid,
X509Certificate certificate, X509CertificateHolder materialCert)
{
BigInteger ridSerialNumber = ktRid.getSerialNumber();
if (ridSerialNumber != null)
{
String certSerial = "unknown";
BigInteger certSerialNumber = certificate.getSerialNumber();
if (certSerialNumber != null)
{
certSerial = certSerialNumber.toString(16);
}
extraInfo.append("serial-#: rid ");
extraInfo.append(ridSerialNumber.toString(16));
extraInfo.append(" vs. cert ");
extraInfo.append(certSerial);
extraInfo.append(" issuer: rid \'");
extraInfo.append(ktRid.getIssuer());
extraInfo.append("\' vs. cert \'");
extraInfo.append(materialCert == null ? "null" : materialCert.getIssuer());
extraInfo.append("\' ");
}
}
/**
* Prepare the document for encryption.
*
* @param doc The document that will be encrypted.
*
* @throws IOException If there is an error while encrypting.
*/
@Override
public void prepareDocumentForEncryption(PDDocument doc) throws IOException
{
try
{
PDEncryption dictionary = doc.getEncryption();
if (dictionary == null)
{
dictionary = new PDEncryption();
}
dictionary.setFilter(FILTER);
dictionary.setLength(getKeyLength());
int version = computeVersionNumber();
dictionary.setVersion(version);
// remove CF, StmF, and StrF entries that may be left from a previous encryption
dictionary.removeV45filters();
// create the 20 bytes seed
byte[] seed = new byte[20];
KeyGenerator key;
try
{
key = KeyGenerator.getInstance("AES");
}
catch (NoSuchAlgorithmException e)
{
// should never happen
throw new RuntimeException(e);
}
key.init(192, new SecureRandom());
SecretKey sk = key.generateKey();
// create the 20 bytes seed
System.arraycopy(sk.getEncoded(), 0, seed, 0, 20);
byte[][] recipientsFields = computeRecipientsField(seed);
int shaInputLength = seed.length;
for (byte[] field : recipientsFields)
{
shaInputLength += field.length;
}
byte[] shaInput = new byte[shaInputLength];
System.arraycopy(seed, 0, shaInput, 0, 20);
int shaInputOffset = 20;
for (byte[] recipientsField : recipientsFields)
{
System.arraycopy(recipientsField, 0, shaInput, shaInputOffset, recipientsField.length);
shaInputOffset += recipientsField.length;
}
byte[] mdResult;
switch (version)
{
case 4:
dictionary.setSubFilter(SUBFILTER5);
mdResult = MessageDigests.getSHA1().digest(shaInput);
prepareEncryptionDictAES(dictionary, COSName.AESV2, recipientsFields);
break;
case 5:
dictionary.setSubFilter(SUBFILTER5);
mdResult = MessageDigests.getSHA256().digest(shaInput);
prepareEncryptionDictAES(dictionary, COSName.AESV3, recipientsFields);
break;
default:
dictionary.setSubFilter(SUBFILTER4);
mdResult = MessageDigests.getSHA1().digest(shaInput);
dictionary.setRecipients(recipientsFields);
break;
}
setEncryptionKey(new byte[getKeyLength() / 8]);
System.arraycopy(mdResult, 0, getEncryptionKey(), 0, getKeyLength() / 8);
doc.setEncryptionDictionary(dictionary);
doc.getDocument().setEncryptionDictionary(dictionary.getCOSObject());
}
catch(GeneralSecurityException e)
{
throw new IOException(e);
}
}
private void prepareEncryptionDictAES(PDEncryption encryptionDictionary, COSName aesVName, byte[][] recipients)
{
PDCryptFilterDictionary cryptFilterDictionary = new PDCryptFilterDictionary();
cryptFilterDictionary.setCryptFilterMethod(aesVName);
cryptFilterDictionary.setLength(getKeyLength());
COSArray array = new COSArray();
for (byte[] recipient : recipients)
{
array.add(new COSString(recipient));
}
cryptFilterDictionary.getCOSObject().setItem(COSName.RECIPIENTS, array);
array.setDirect(true);
encryptionDictionary.setDefaultCryptFilterDictionary(cryptFilterDictionary);
encryptionDictionary.setStreamFilterName(COSName.DEFAULT_CRYPT_FILTER);
encryptionDictionary.setStringFilterName(COSName.DEFAULT_CRYPT_FILTER);
cryptFilterDictionary.getCOSObject().setDirect(true);
setAES(true);
}
private byte[][] computeRecipientsField(byte[] seed) throws GeneralSecurityException, IOException
{
PublicKeyProtectionPolicy protectionPolicy = getProtectionPolicy();
byte[][] recipientsField = new byte[protectionPolicy.getNumberOfRecipients()][];
Iterator<PublicKeyRecipient> it = protectionPolicy.getRecipientsIterator();
int i = 0;
while(it.hasNext())
{
PublicKeyRecipient recipient = it.next();
X509Certificate certificate = recipient.getX509();
int permission = recipient.getPermission().getPermissionBytesForPublicKey();
byte[] pkcs7input = new byte[24];
byte one = (byte)(permission);
byte two = (byte)(permission >>> 8);
byte three = (byte)(permission >>> 16);
byte four = (byte)(permission >>> 24);
// put this seed in the pkcs7 input
System.arraycopy(seed, 0, pkcs7input, 0, 20);
pkcs7input[20] = four;
pkcs7input[21] = three;
pkcs7input[22] = two;
pkcs7input[23] = one;
ASN1Primitive obj = createDERForRecipient(pkcs7input, certificate);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
obj.encodeTo(baos, ASN1Encoding.DER);
recipientsField[i] = baos.toByteArray();
i++;
}
return recipientsField;
}
private ASN1Primitive createDERForRecipient(byte[] in, X509Certificate cert)
throws IOException, GeneralSecurityException
{
String algorithm = PKCSObjectIdentifiers.RC2_CBC.getId();
AlgorithmParameterGenerator apg;
KeyGenerator keygen;
Cipher cipher;
try
{
Provider provider = SecurityProvider.getProvider();
apg = AlgorithmParameterGenerator.getInstance(algorithm, provider);
keygen = KeyGenerator.getInstance(algorithm, provider);
cipher = Cipher.getInstance(algorithm, provider);
}
catch (NoSuchAlgorithmException e)
{
// happens when using the command line app .jar file
throw new IOException("Could not find a suitable javax.crypto provider for algorithm " +
algorithm + "; possible reason: using an unsigned .jar file", e);
}
catch (NoSuchPaddingException e)
{
// should never happen, if this happens throw IOException instead
throw new RuntimeException("Could not find a suitable javax.crypto provider", e);
}
AlgorithmParameters parameters = apg.generateParameters();
ASN1Primitive object;
try (ASN1InputStream input = new ASN1InputStream(parameters.getEncoded("ASN.1")))
{
object = input.readObject();
}
keygen.init(128);
SecretKey secretkey = keygen.generateKey();
cipher.init(1, secretkey, parameters);
byte[] bytes = cipher.doFinal(in);
KeyTransRecipientInfo recipientInfo = computeRecipientInfo(cert, secretkey.getEncoded());
DERSet set = new DERSet(new RecipientInfo(recipientInfo));
AlgorithmIdentifier algorithmId = new AlgorithmIdentifier(new ASN1ObjectIdentifier(algorithm), object);
EncryptedContentInfo encryptedInfo =
new EncryptedContentInfo(PKCSObjectIdentifiers.data, algorithmId, new DEROctetString(bytes));
EnvelopedData enveloped = new EnvelopedData(null, set, encryptedInfo, (ASN1Set) null);
ContentInfo contentInfo = new ContentInfo(PKCSObjectIdentifiers.envelopedData, enveloped);
return contentInfo.toASN1Primitive();
}
private KeyTransRecipientInfo computeRecipientInfo(X509Certificate x509certificate, byte[] abyte0)
throws IOException, CertificateEncodingException, InvalidKeyException,
BadPaddingException, IllegalBlockSizeException
{
TBSCertificate certificate;
try (ASN1InputStream input = new ASN1InputStream(x509certificate.getTBSCertificate()))
{
certificate = TBSCertificate.getInstance(input.readObject());
}
AlgorithmIdentifier algorithmId = certificate.getSubjectPublicKeyInfo().getAlgorithm();
IssuerAndSerialNumber serial = new IssuerAndSerialNumber(
certificate.getIssuer(),
certificate.getSerialNumber().getValue());
Cipher cipher;
try
{
cipher = Cipher.getInstance(algorithmId.getAlgorithm().getId(),
SecurityProvider.getProvider());
}
catch (NoSuchAlgorithmException | NoSuchPaddingException e)
{
// should never happen, if this happens throw IOException instead
throw new RuntimeException("Could not find a suitable javax.crypto provider", e);
}
cipher.init(1, x509certificate.getPublicKey());
DEROctetString octets = new DEROctetString(cipher.doFinal(abyte0));
RecipientIdentifier recipientId = new RecipientIdentifier(serial);
return new KeyTransRecipientInfo(recipientId, algorithmId, octets);
}
}
|
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author Gencturk
*/
public class VehicleSys {
private static ArrayList<Vehicle> vehicles = new ArrayList<Vehicle>();
public static boolean addVehicle(Vehicle vehicle)
{
for(int i = 0; i < vehicles.size(); i++)
if(vehicles.get(i).getLicencePlate()== vehicle.getLicencePlate())
return false;
vehicles.add(vehicle);
return true;
}
public static Vehicle searchVehicle(String licencePlate)
{
for(int i = 0; i < vehicles.size(); i++)
if(vehicles.get(i).getLicencePlate().equals(licencePlate))
return vehicles.get(i);
return null;
}
public static ArrayList<Vehicle> getVehicles()
{
return vehicles;
}
public static ArrayList<Car> getCars()
{
ArrayList<Car> cars = new ArrayList<Car>();
for(int i = 0; i < vehicles.size(); i++)
if(vehicles.get(i) instanceof Car)
cars.add((Car)vehicles.get(i));
return cars;
}
public static ArrayList<Motorcycle> getMotorcycles()
{
ArrayList<Motorcycle> motors = new ArrayList<Motorcycle>();
for(int i = 0; i < vehicles.size(); i++)
if(vehicles.get(i) instanceof Motorcycle)
motors.add((Motorcycle)vehicles.get(i));
return motors;
}
public static ArrayList<Minivan> getMinivans()
{
ArrayList<Minivan> minivans = new ArrayList<Minivan>();
for(int i = 0; i < vehicles.size(); i++)
if(vehicles.get(i) instanceof Minivan)
minivans.add((Minivan)vehicles.get(i));
return minivans;
}
public static String getVehiclesInfo()
{
String result = "";
for(int i = 0; i < vehicles.size(); i++)
result += vehicles.get(i).toString();
return result;
}
public static String getCarsInfo(boolean checkAvailibility)
{
String result = "";
for(int i = 0; i < vehicles.size(); i++)
{
if(checkAvailibility && vehicles.get(i).isRented)
continue;
if(vehicles.get(i) instanceof Car)
result += vehicles.get(i).toString();
}
return result;
}
public static String getMotorsInfo(boolean checkAvailibility)
{
String result = "";
for(int i = 0; i < vehicles.size(); i++)
{
if(checkAvailibility && vehicles.get(i).isRented)
continue;
if(vehicles.get(i) instanceof Motorcycle)
result += vehicles.get(i).toString();
}
return result;
}
public static String getMinivansInfo(boolean checkAvailibility)
{
String result = "";
for(int i = 0; i < vehicles.size(); i++)
{
if(checkAvailibility && vehicles.get(i).isRented)
continue;
if(vehicles.get(i) instanceof Minivan)
result += vehicles.get(i).toString();
}
return result;
}
public static boolean readTextFile()
{
/*File Structure
--Car--
licencePlate,brand,model,year,dailyPrice,isRented,minAgeToRent,numOfSeats,fuelType
--Motor--
licencePlate,brand,model,year,dailyPrice,isRented,minAgeToRent,helmetType,motorPower
--Minivan--
licencePlate,brand,model,year,dailyPrice,isRented,minAgeToRent,weightCapacity,numOfSeats
*/
Scanner scanner;
File file = new File("car.txt");
if(file.exists())
{
try {
scanner = new Scanner(file);
} catch (FileNotFoundException ex) {
Logger.getLogger(VehicleSys.class.getName()).log(Level.SEVERE, null, ex);
return false;
}
String input[];
while(scanner.hasNext())
{
input = scanner.nextLine().split(",");
addVehicle(new Car(input[0],input[1],input[2],
Integer.parseInt(input[3]),
Double.parseDouble(input[4]),
Boolean.parseBoolean(input[5]),
Integer.parseInt(input[6]),
Integer.parseInt(input[7]),
input[8]));
}
}
file = new File("motor.txt");
if(file.exists())
{
try {
scanner = new Scanner(file);
} catch (FileNotFoundException ex) {
Logger.getLogger(VehicleSys.class.getName()).log(Level.SEVERE, null, ex);
return false;
}
String input[];
while(scanner.hasNext())
{
input = scanner.nextLine().split(",");
addVehicle(new Motorcycle(input[0],input[1],input[2],
Integer.parseInt(input[3]),
Double.parseDouble(input[4]),
Boolean.parseBoolean(input[5]),
Integer.parseInt(input[6]),
input[7],
Integer.parseInt(input[8])));
}
}
file = new File("minivan.txt");
if(file.exists())
{
try {
scanner = new Scanner(file);
} catch (FileNotFoundException ex) {
Logger.getLogger(VehicleSys.class.getName()).log(Level.SEVERE, null, ex);
return false;
}
String input[];
while(scanner.hasNext())
{
input = scanner.nextLine().split(",");
addVehicle(new Minivan(input[0],input[1],input[2],
Integer.parseInt(input[3]),
Double.parseDouble(input[4]),
Boolean.parseBoolean(input[5]),
Integer.parseInt(input[6]),
Double.parseDouble(input[7]),
Integer.parseInt(input[8])));
}
}
return true;
}
public static void writeTextFile()
{
//CAR.TXT
File file = new File("car.txt");
PrintWriter writer;
try {
writer = new PrintWriter(file);
} catch (FileNotFoundException ex) {
Logger.getLogger(VehicleSys.class.getName()).log(Level.SEVERE, null, ex);
return;
}
ArrayList<Car> cars = getCars();
for(int i = 0; i < cars.size(); i++)
writer.println(cars.get(i).toStringForWriting());
writer.close();
//MOTOR.TXT
file = new File("motor.txt");
try {
writer = new PrintWriter(file);
} catch (FileNotFoundException ex) {
Logger.getLogger(VehicleSys.class.getName()).log(Level.SEVERE, null, ex);
return;
}
ArrayList<Motorcycle> motors = getMotorcycles();
for(int i = 0; i < motors.size(); i++)
writer.println(motors.get(i).toStringForWriting());
writer.close();
//MINIVAN.TXT
file = new File("minivan.txt");
try {
writer = new PrintWriter(file);
} catch (FileNotFoundException ex) {
Logger.getLogger(VehicleSys.class.getName()).log(Level.SEVERE, null, ex);
return;
}
ArrayList<Minivan> minivans = getMinivans();
for(int i = 0; i < minivans.size(); i++)
writer.println(minivans.get(i).toStringForWriting());
writer.close();
}
}
|
/*
* Copyright (c) 2015 The CCP project authors. All Rights Reserved.
*
* Use of this source code is governed by a Beijing Speedtong Information Technology Co.,Ltd license
* that can be found in the LICENSE file in the root of the web site.
*
* http://www.yuntongxun.com
*
* An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/package com.darly.im.common.view;
import android.annotation.TargetApi;
import android.content.Context;
import android.os.Build;
import android.util.AttributeSet;
import android.view.View;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.darly.dlclent.R;
/**
* 网络提醒BannerView
* Created by Jorstin on 2015/3/18.
*/
public class NetWarnBannerView extends LinearLayout {
private View mContetLayout;
private ImageView mNetWarnIcon;
private TextView mNetDetail;
private TextView mNetDetailTips;
private TextView mNetHintTips;
private ProgressBar mProgressBar;
/**
* @param context
*/
public NetWarnBannerView(Context context) {
this(context , null);
}
/**
* @param context
* @param attrs
*/
public NetWarnBannerView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
/**
* @param context
* @param attrs
* @param defStyle
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public NetWarnBannerView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
/**
*
*/
private void init() {
View.inflate(getContext(), R.layout.net_warn_item, this);
mContetLayout = findViewById(R.id.nwview);
mNetWarnIcon = (ImageView) findViewById(R.id.nw_icon);
mNetDetail = (TextView) findViewById(R.id.nw_detail);
mNetDetailTips = (TextView) findViewById(R.id.nw_detail_tip);
mNetHintTips = (TextView) findViewById(R.id.nw_hint_tip);
mProgressBar = (ProgressBar) findViewById(R.id.nw_prog);
}
public final void setNetWarnText(CharSequence text) {
mNetDetail.setText(text);
mProgressBar.setVisibility(View.GONE);
mContetLayout.setVisibility(View.VISIBLE);
}
public final void setNetWarnDetailTips(CharSequence text) {
mNetDetailTips.setText(text);
mProgressBar.setVisibility(View.GONE);
mContetLayout.setVisibility(View.VISIBLE);
}
public final void setNetWarnHintText(CharSequence text) {
mNetHintTips.setText(text);
mProgressBar.setVisibility(View.GONE);
mContetLayout.setVisibility(View.VISIBLE);
}
/**
* set gone
*/
public void hideWarnBannerView() {
if(mContetLayout == null) {
return;
}
mContetLayout.setVisibility(View.GONE);
}
/**
* 重新连接
* @param reconnect
*/
public final void reconnect(boolean reconnect) {
mContetLayout.setVisibility(View.VISIBLE);
if(reconnect) {
mProgressBar.setVisibility(View.VISIBLE);
mNetWarnIcon.setVisibility(View.INVISIBLE);
return ;
}
mProgressBar.setVisibility(View.GONE);
mNetWarnIcon.setVisibility(View.VISIBLE);
}
}
|
/*
* Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tencentcloudapi.vod.v20180717.models;
import com.tencentcloudapi.common.AbstractModel;
import com.google.gson.annotations.SerializedName;
import com.google.gson.annotations.Expose;
import java.util.HashMap;
public class HighlightsConfigureInfo extends AbstractModel{
/**
* 智能精彩片段任务开关,可选值:
<li>ON:开启智能精彩片段任务;</li>
<li>OFF:关闭智能精彩片段任务。</li>
*/
@SerializedName("Switch")
@Expose
private String Switch;
/**
* Get 智能精彩片段任务开关,可选值:
<li>ON:开启智能精彩片段任务;</li>
<li>OFF:关闭智能精彩片段任务。</li>
* @return Switch 智能精彩片段任务开关,可选值:
<li>ON:开启智能精彩片段任务;</li>
<li>OFF:关闭智能精彩片段任务。</li>
*/
public String getSwitch() {
return this.Switch;
}
/**
* Set 智能精彩片段任务开关,可选值:
<li>ON:开启智能精彩片段任务;</li>
<li>OFF:关闭智能精彩片段任务。</li>
* @param Switch 智能精彩片段任务开关,可选值:
<li>ON:开启智能精彩片段任务;</li>
<li>OFF:关闭智能精彩片段任务。</li>
*/
public void setSwitch(String Switch) {
this.Switch = Switch;
}
/**
* Internal implementation, normal users should not use it.
*/
public void toMap(HashMap<String, String> map, String prefix) {
this.setParamSimple(map, prefix + "Switch", this.Switch);
}
}
|
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.transform;
import com.google.common.collect.Maps;
import org.gradle.api.artifacts.component.ComponentArtifactIdentifier;
import org.gradle.api.artifacts.component.ComponentIdentifier;
import org.gradle.api.artifacts.component.ProjectComponentIdentifier;
import org.gradle.api.internal.artifacts.ivyservice.resolveengine.artifact.ResolvedArtifactSet;
import org.gradle.api.internal.attributes.AttributeContainerInternal;
import org.gradle.api.internal.attributes.ImmutableAttributes;
import org.gradle.api.internal.file.FileCollectionStructureVisitor;
import org.gradle.api.internal.tasks.TaskDependencyResolveContext;
import org.gradle.internal.Describables;
import org.gradle.internal.DisplayName;
import org.gradle.internal.operations.BuildOperationQueue;
import org.gradle.internal.operations.RunnableBuildOperation;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
/**
* Transformed artifact set that performs the transformation itself when requested.
*/
public class ConsumerProvidedResolvedVariant implements ResolvedArtifactSet, ConsumerProvidedVariantFiles {
private final ComponentIdentifier componentIdentifier;
private final ResolvedArtifactSet delegate;
private final AttributeContainerInternal attributes;
private final Transformation transformation;
private final ExtraExecutionGraphDependenciesResolverFactory resolverFactory;
private final TransformationNodeRegistry transformationNodeRegistry;
public ConsumerProvidedResolvedVariant(
ComponentIdentifier componentIdentifier,
ResolvedArtifactSet delegate,
AttributeContainerInternal target,
Transformation transformation,
ExtraExecutionGraphDependenciesResolverFactory dependenciesResolverFactory,
TransformationNodeRegistry transformationNodeRegistry
) {
this.componentIdentifier = componentIdentifier;
this.delegate = delegate;
this.attributes = target;
this.transformation = transformation;
this.resolverFactory = dependenciesResolverFactory;
this.transformationNodeRegistry = transformationNodeRegistry;
}
@Override
public ImmutableAttributes getTargetVariantAttributes() {
return attributes.asImmutable();
}
@Override
public DisplayName getTargetVariantName() {
return Describables.of(componentIdentifier, attributes);
}
@Override
public String toString() {
return getTargetVariantName().getCapitalizedDisplayName();
}
@Override
public Completion startVisit(BuildOperationQueue<RunnableBuildOperation> actions, AsyncArtifactListener listener) {
FileCollectionStructureVisitor.VisitType visitType = listener.prepareForVisit(this);
if (visitType == FileCollectionStructureVisitor.VisitType.NoContents) {
return visitor -> visitor.endVisitCollection(ConsumerProvidedResolvedVariant.this);
}
Map<ComponentArtifactIdentifier, TransformationResult> artifactResults = Maps.newConcurrentMap();
Completion result = delegate.startVisit(actions, new TransformingAsyncArtifactListener(transformation, actions, artifactResults, getDependenciesResolver(), transformationNodeRegistry));
return new TransformCompletion(result, attributes, artifactResults);
}
@Override
public void visitLocalArtifacts(LocalArtifactVisitor listener) {
// Cannot visit local artifacts until transform has been executed
}
@Override
public void visitDependencies(TaskDependencyResolveContext context) {
Collection<TransformationNode> scheduledNodes = transformationNodeRegistry.getOrCreate(delegate, transformation, getDependenciesResolver());
if (!scheduledNodes.isEmpty()) {
context.add(new DefaultTransformationDependency(scheduledNodes));
}
}
@Override
public Collection<TransformationNode> getScheduledNodes() {
// Only care about transformed project outputs. For everything else, calculate the value eagerly
AtomicReference<Boolean> hasProjectArtifacts = new AtomicReference<>(false);
delegate.visitLocalArtifacts(artifact -> {
if (artifact.getId().getComponentIdentifier() instanceof ProjectComponentIdentifier) {
hasProjectArtifacts.set(true);
}
});
if (hasProjectArtifacts.get()) {
return transformationNodeRegistry.getOrCreate(delegate, transformation, getDependenciesResolver());
} else {
return Collections.emptySet();
}
}
private ExecutionGraphDependenciesResolver getDependenciesResolver() {
return resolverFactory.create(componentIdentifier);
}
}
|
package io.flowing.retail.order.flow.base;
import org.camunda.bpm.engine.impl.pvm.delegate.ActivityExecution;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import io.flowing.retail.order.domain.Order;
import io.flowing.retail.order.flow.FetchGoodsCommandPayload;
import io.flowing.retail.order.messages.Message;
import io.flowing.retail.order.messages.MessageSender;
import io.flowing.retail.order.persistence.OrderRepository;
/**
* Alternative implementation if you prefer having send/receive in one single ServiceTask
* which is often easier understood by "normal people"
*
*/
@Component
public class FetchGoodsPubSubAdapter extends PublishSubscribeAdapter {
@Autowired
private MessageSender messageSender;
@Autowired
private OrderRepository orderRepository;
@Override
public void execute(ActivityExecution context) throws Exception {
Order order = orderRepository.findById( //
(String)context.getVariable("orderId")).get();
String traceId = context.getProcessBusinessKey();
// publish
messageSender.send(new Message<FetchGoodsCommandPayload>( //
"FetchGoodsCommand", //
traceId, //
new FetchGoodsCommandPayload() //
.setRefId(order.getId()) //
.setItems(order.getItems())));
addMessageSubscription(context, "GoodsFetchedEvent");
}
}
|
package com.dzonesoft.exam.viewmodel.admin.file;
import lombok.Data;
@Data
public class UploadResultVM {
private String original;
private String name;
private String url;
private Long size;
private String type;
private String state;
}
|
import org.bouncycastle.asn1.x500.X500Name;
import org.bouncycastle.asn1.x509.ExtendedKeyUsage;
import org.bouncycastle.asn1.x509.KeyPurposeId;
import org.bouncycastle.asn1.x509.X509Extensions;
import org.bouncycastle.asn1.x509.X509Name;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.x509.X509V3CertificateGenerator;
import javax.security.auth.x500.X500Principal;
import java.io.FileOutputStream;
import java.math.BigInteger;
import java.security.*;
import java.security.cert.X509Certificate;
import java.time.LocalDate;
import java.time.ZoneOffset;
import java.util.Date;
public class Main{
public static void main(String args[]) throws Exception{
X509Certificate selfSignedX509Certificate = generate();
System.out.println(selfSignedX509Certificate);
}
public static X509Certificate generate() throws Exception {
Security.addProvider(new BouncyCastleProvider());
// generate a key pair
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA", "BC");
keyPairGenerator.initialize(4096, new SecureRandom());
KeyPair keyPair = keyPairGenerator.generateKeyPair();
// build a certificate generator
X509V3CertificateGenerator certGen = new X509V3CertificateGenerator();
X500Principal dnName = new X500Principal("cn=example");
// add some options
certGen.setSerialNumber(BigInteger.valueOf(System.currentTimeMillis()));
certGen.setSubjectDN(new X509Name("dc=name"));
certGen.setIssuerDN(dnName); // use the same
// from
certGen.setNotBefore(Date.from(LocalDate.of(2021, 1, 1).
atStartOfDay(ZoneOffset.UTC).toInstant()));
// in 10 years
certGen.setNotAfter(Date.from(LocalDate.of(2031, 1, 1).
atStartOfDay(ZoneOffset.UTC).toInstant()));
certGen.setPublicKey(keyPair.getPublic());
certGen.setSignatureAlgorithm("SHA256WithRSAEncryption");
certGen.addExtension(X509Extensions.ExtendedKeyUsage,
true,new ExtendedKeyUsage(KeyPurposeId.id_kp_timeStamping));
// finally, sign the certificate with the private key of the same KeyPair
X509Certificate cert = certGen.generate(keyPair.getPrivate(), "BC");
KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType());
char[] pwdArray = "pass".toCharArray();
ks.load(null, pwdArray);
ks.setKeyEntry("app_key", keyPair.getPrivate(), pwdArray,new X509Certificate[]{cert});
ks.setCertificateEntry("app_cert",cert);
try (FileOutputStream fos = new FileOutputStream("KeyStore.jks")) {
ks.store(fos, pwdArray);
}
return cert;
}
}
|
package microDon.clients.models;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
public class ListResponse<T> {
private List<T> resources;
private Pagination pagination;
public ListResponse() {
}
public List<T> getResources() {
return resources;
}
public void setResources(List<T> resources) {
this.resources = resources;
}
public Pagination getPagination() {
return pagination;
}
public void setPagination(Pagination pagination) {
this.pagination = pagination;
}
private class Pagination {
@JsonProperty("previous_uri")
private String previousUri;
@JsonProperty("next_uri")
private String nextUri;
public Pagination() {
}
public String getPreviousUri() {
return previousUri;
}
public void setPreviousUri(String previousUri) {
this.previousUri = previousUri;
}
public String getNextUri() {
return nextUri;
}
public void setNextUri(String nextUri) {
this.nextUri = nextUri;
}
}
}
|
package org.csanchez.jenkins.plugins.kubernetes;
import com.cloudbees.plugins.credentials.CredentialsMatchers;
import com.cloudbees.plugins.credentials.CredentialsProvider;
import com.cloudbees.plugins.credentials.common.StandardCredentials;
import com.cloudbees.plugins.credentials.common.StandardListBoxModel;
import com.cloudbees.plugins.credentials.domains.URIRequirementBuilder;
import hudson.AbortException;
import hudson.EnvVars;
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Util;
import hudson.model.AbstractProject;
import hudson.model.Item;
import hudson.model.Run;
import hudson.model.TaskListener;
import hudson.security.ACL;
import hudson.tasks.BuildWrapperDescriptor;
import hudson.util.ListBoxModel;
import jenkins.authentication.tokens.api.AuthenticationTokens;
import jenkins.model.Jenkins;
import jenkins.tasks.SimpleBuildWrapper;
import org.jenkinsci.Symbol;
import org.jenkinsci.plugins.kubernetes.auth.KubernetesAuthConfig;
import org.jenkinsci.plugins.kubernetes.auth.KubernetesAuthException;
import org.jenkinsci.plugins.kubernetes.auth.KubernetesAuth;
import org.kohsuke.stapler.AncestorInPath;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import javax.annotation.Nonnull;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.Writer;
import java.io.OutputStreamWriter;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/**
* @author <a href="mailto:nicolas.deloof@gmail.com">Nicolas De Loof</a>
*/
public class KubectlBuildWrapper extends SimpleBuildWrapper {
private String serverUrl;
private String credentialsId;
private String caCertificate;
@DataBoundConstructor
public KubectlBuildWrapper(@Nonnull String serverUrl, @Nonnull String credentialsId,
@Nonnull String caCertificate) {
this.serverUrl = serverUrl;
this.credentialsId = Util.fixEmpty(credentialsId);
this.caCertificate = Util.fixEmptyAndTrim(caCertificate);
}
public String getServerUrl() {
return serverUrl;
}
public String getCredentialsId() {
return credentialsId;
}
public String getCaCertificate() {
return caCertificate;
}
protected Object readResolve() {
this.credentialsId = Util.fixEmpty(credentialsId);
this.caCertificate = Util.fixEmptyAndTrim(caCertificate);
return this;
}
@Override
public void setUp(Context context, Run<?, ?> build, FilePath workspace, Launcher launcher, TaskListener listener, EnvVars initialEnvironment) throws IOException, InterruptedException {
if (credentialsId == null) {
throw new AbortException("No credentials defined to setup Kubernetes CLI");
}
workspace.mkdirs();
FilePath configFile = workspace.createTempFile(".kube", "config");
Set<String> tempFiles = new HashSet<>(Arrays.asList(configFile.getRemote()));
context.env("KUBECONFIG", configFile.getRemote());
context.setDisposer(new CleanupDisposer(tempFiles));
StandardCredentials credentials = CredentialsProvider.findCredentialById(credentialsId, StandardCredentials.class, build, Collections.emptyList());
if (credentials == null) {
throw new AbortException("No credentials found for id \"" + credentialsId + "\"");
}
KubernetesAuth auth = AuthenticationTokens.convert(KubernetesAuth.class, credentials);
if (auth == null) {
throw new AbortException("Unsupported Credentials type " + credentials.getClass().getName());
}
try (Writer w = new OutputStreamWriter(configFile.write(), StandardCharsets.UTF_8)) {
w.write(auth.buildKubeConfig(new KubernetesAuthConfig(getServerUrl(), getCaCertificate(), getCaCertificate() == null)));
} catch (KubernetesAuthException e) {
throw new AbortException(e.getMessage());
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
ByteArrayOutputStream err = new ByteArrayOutputStream();
String cmd = "kubectl version";
int status = launcher.launch().cmdAsSingleString(cmd).stdout(out).stderr(err).quiet(true).envs("KUBECONFIG="+configFile.getRemote()).join();
if (status != 0) {
StringBuilder msgBuilder = new StringBuilder("Failed to run \"").append(cmd).append("\". Returned status code ").append(status).append(".\n");
msgBuilder.append("stdout:\n").append(out).append("\n");
msgBuilder.append("stderr:\n").append(err);
throw new AbortException(msgBuilder.toString());
}
}
@Extension
@Symbol("kubeconfig")
public static class DescriptorImpl extends BuildWrapperDescriptor {
@Override
public boolean isApplicable(AbstractProject<?, ?> item) {
return true;
}
@Override
public String getDisplayName() {
return "Setup Kubernetes CLI (kubectl)";
}
public ListBoxModel doFillCredentialsIdItems(@AncestorInPath Item item, @QueryParameter String serverUrl, @QueryParameter String credentialsId) {
if (item == null
? !Jenkins.get().hasPermission(Jenkins.ADMINISTER)
: !item.hasPermission(Item.EXTENDED_READ)) {
return new StandardListBoxModel().includeCurrentValue(credentialsId);
}
StandardListBoxModel result = new StandardListBoxModel();
result.includeEmptyValue();
result.includeMatchingAs(
ACL.SYSTEM,
item,
StandardCredentials.class,
URIRequirementBuilder.fromUri(serverUrl).build(),
CredentialsMatchers.anyOf(
CredentialsMatchers.instanceOf(org.jenkinsci.plugins.kubernetes.credentials.TokenProducer.class),
AuthenticationTokens.matcher(KubernetesAuth.class)
)
);
return result;
}
}
private static class CleanupDisposer extends Disposer {
private static final long serialVersionUID = 3006113419319201358L;
private Set<String> configFiles;
public CleanupDisposer(Set<String> tempFiles) {
this.configFiles = tempFiles;
}
@Override
public void tearDown(Run<?, ?> build, FilePath workspace, Launcher launcher, TaskListener listener) throws IOException, InterruptedException {
for (String configFile : configFiles) {
workspace.child(configFile).delete();
}
}
}
}
|
/*
* Copyright 2012-2018 Chronicle Map Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.openhft.chronicle.map;
import com.google.common.collect.HashBiMap;
import com.google.common.primitives.Ints;
import net.openhft.chronicle.bytes.BytesMarshallable;
import net.openhft.chronicle.bytes.BytesStore;
import net.openhft.chronicle.core.values.IntValue;
import net.openhft.chronicle.core.values.LongValue;
import net.openhft.chronicle.set.Builder;
import net.openhft.chronicle.threads.NamedThreadFactory;
import net.openhft.chronicle.values.Values;
import org.junit.Ignore;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import static java.util.stream.Collectors.toSet;
import static org.junit.Assert.*;
@SuppressWarnings({"unchecked", "ResultOfMethodCallIgnored"})
public class ChronicleMapTest {
static final LongValue ONE = Values.newHeapInstance(LongValue.class);
static long count = 0;
static {
ONE.setValue(1);
}
private StringBuilder sb = new StringBuilder();
static void assertKeySet(Set<Integer> keySet, int[] expectedKeys) {
Set<Integer> expectedSet = new HashSet<Integer>();
for (int expectedKey : expectedKeys) {
expectedSet.add(expectedKey);
}
assertEquals(expectedSet, keySet);
}
static void assertValues(Collection<CharSequence> values, CharSequence[] expectedValues) {
List<String> expectedList = new ArrayList<String>();
for (CharSequence expectedValue : expectedValues) {
expectedList.add(expectedValue.toString());
}
Collections.sort(expectedList);
List<String> actualList = new ArrayList<String>();
for (CharSequence actualValue : values) {
actualList.add(actualValue.toString());
}
Collections.sort(actualList);
assertEquals(expectedList, actualList);
}
static void assertEntrySet(Set<Map.Entry<Integer, CharSequence>> entrySet, int[] expectedKeys, CharSequence[] expectedValues) {
Set<Map.Entry<Integer, CharSequence>> expectedSet = new HashSet<Map.Entry<Integer, CharSequence>>();
for (int i = 0; i < expectedKeys.length; i++) {
expectedSet.add(new AbstractMap.SimpleEntry<>(expectedKeys[i],
expectedValues[i].toString()));
}
entrySet = entrySet.stream().map(e ->
new AbstractMap.SimpleImmutableEntry<Integer, CharSequence>(
e.getKey(), e.getValue().toString()))
.collect(toSet());
assertEquals(expectedSet, entrySet);
}
static void assertMap(Map<Integer, CharSequence> map, int[] expectedKeys, CharSequence[] expectedValues) {
assertEquals(expectedKeys.length, map.size());
for (int i = 0; i < expectedKeys.length; i++) {
assertEquals("On position " + i,
expectedValues[i].toString(), map.get(expectedKeys[i]).toString());
}
}
public static LongValue nativeLongValue() {
return Values.newNativeReference(LongValue.class);
}
public static IntValue nativeIntValue() {
return Values.newNativeReference(IntValue.class);
}
static File getPersistenceFile() {
String TMP = System.getProperty("java.io.tmpdir");
File file = new File(TMP + "/chm-test" + System.nanoTime() + count++);
file.deleteOnExit();
return file;
}
private static void printStatus() {
if (!new File("/proc/self/status").exists()) return;
try {
BufferedReader br = new BufferedReader(new FileReader("/proc/self/status"));
for (String line; (line = br.readLine()) != null; )
if (line.startsWith("Vm"))
System.out.print(line.replaceAll(" +", " ") + ", ");
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private ChronicleMap<Integer, CharSequence> getViewTestMap(int noOfElements) throws IOException {
ChronicleMap<Integer, CharSequence> map =
ChronicleMapBuilder.of(Integer.class, CharSequence.class)
.entries(noOfElements * 2 + 100)
.averageValueSize((noOfElements + "").length())
.putReturnsNull(true)
.removeReturnsNull(true).create();
int[] expectedKeys = new int[noOfElements];
String[] expectedValues = new String[noOfElements];
for (int i = 1; i <= noOfElements; i++) {
String value = "" + i;
map.put(i, value);
expectedKeys[i - 1] = i;
expectedValues[i - 1] = value;
}
return map;
}
@Test
public void testRemoveWithKey() {
try (final ChronicleMap<CharSequence, CharSequence> map =
ChronicleMapBuilder
.of(CharSequence.class, CharSequence.class)
.entries(10)
.averageKey("key1").averageValue("one")
.minSegments(2).create()) {
assertFalse(map.containsKey("key3"));
map.put("key1", "one");
map.put("key2", "two");
assertEquals(2, map.size());
assertTrue(map.containsKey("key1"));
assertTrue(map.containsKey("key2"));
assertFalse(map.containsKey("key3"));
assertEquals("one", map.get("key1").toString());
assertEquals("two", map.get("key2").toString());
final CharSequence result = map.remove("key1");
assertEquals(1, map.size());
assertEquals("one", result.toString());
assertFalse(map.containsKey("key1"));
assertEquals(null, map.get("key1"));
assertEquals("two", map.get("key2").toString());
assertFalse(map.containsKey("key3"));
// lets add one more item for luck !
map.put("key3", "three");
assertEquals("three", map.get("key3").toString());
assertTrue(map.containsKey("key3"));
assertEquals(2, map.size());
// and just for kicks we'll overwrite what we have
map.put("key3", "overwritten");
assertEquals("overwritten", map.get("key3").toString());
assertTrue(map.containsKey("key3"));
assertEquals(2, map.size());
}
}
@Test
public void testByteArrayPersistenceFileReuse() throws IOException {
final File persistenceFile = Builder.getPersistenceFile();
for (int i = 0; i < 3; i++) {
try (ChronicleMap<byte[], byte[]> map = ChronicleMap.of(byte[].class, byte[].class)
.entries(1)
.averageKey("hello".getBytes()).averageValue("world".getBytes())
.createPersistedTo(persistenceFile)) {
byte[] o = map.get("hello".getBytes());
System.out.println(o == null ? "null" : new String(o));
map.put("hello".getBytes(), "world".getBytes());
}
}
persistenceFile.delete();
}
@Test
public void testEqualsCharSequence() {
ChronicleMapBuilder<CharSequence, CharSequence> builder = ChronicleMapBuilder
.of(CharSequence.class, CharSequence.class)
.entries(1)
.averageKey("hello").averageValue("world");
try (final ChronicleMap<CharSequence, CharSequence> map1 = builder.create()) {
map1.put("hello", "world");
try (final ChronicleMap<CharSequence, CharSequence> map2 = builder.create()) {
map2.put("hello", "world");
assertEquals(map1, map2);
}
}
}
@Test
public void testEqualsCharArray() {
char[] value = new char[5];
Arrays.fill(value, 'X');
ChronicleMapBuilder<CharSequence, char[]> builder = ChronicleMapBuilder
.of(CharSequence.class, char[].class)
.entries(1)
.averageKey("hello").averageValue(value);
try (final ChronicleMap<CharSequence, char[]> map1 = builder.create()) {
map1.put("hello", value);
try (final ChronicleMap<CharSequence, char[]> map2 = builder.create()) {
map2.put("hello", value);
assertEquals(map1, map2);
}
}
}
@Test
public void testEqualsByteArray() {
byte[] value = new byte[5];
Arrays.fill(value, (byte) 'X');
ChronicleMapBuilder<CharSequence, byte[]> builder = ChronicleMapBuilder
.of(CharSequence.class, byte[].class)
.entries(1)
.averageKey("hello").averageValue(value);
try (final ChronicleMap<CharSequence, byte[]> map1 = builder.create()) {
map1.put("hello", value);
try (final ChronicleMap<CharSequence, byte[]> map2 = builder.create()) {
map2.put("hello", value);
assertEquals(map1, map2);
}
}
}
@Test
public void testSize() {
try (final ChronicleMap<CharSequence, CharSequence> map =
ChronicleMap.of(CharSequence.class, CharSequence.class)
.averageKey("key-1024").averageValue("value")
.minSegments(1024)
.entries(1024)
.removeReturnsNull(true).create()) {
for (int i = 1; i < 1024; i++) {
map.put("key" + i, "value");
assertEquals(i, map.size());
}
for (int i = 1023; i >= 1; ) {
map.remove("key" + i);
i--;
assertEquals(i, map.size());
}
}
}
@Test
public void testRemoveInteger() throws IOException {
int count = 300;
try (final ChronicleMap<Object, Object> map = ChronicleMapBuilder
.of(Object.class, Object.class)
.averageKey(1).averageValue(1)
.entries(count)
.minSegments(2).create()) {
for (int i = 1; i < count; i++) {
map.put(i, i);
assertEquals(i, map.size());
}
for (int i = count - 1; i >= 1; ) {
Integer j = (Integer) map.put(i, i);
assertEquals(i, j.intValue());
Integer j2 = (Integer) map.remove(i);
assertEquals(i, j2.intValue());
i--;
assertEquals(i, map.size());
}
}
}
@Test
public void testRemoveWithKeyAndRemoveReturnsNull() {
try (final ChronicleMap<CharSequence, CharSequence> map =
ChronicleMapBuilder.of(CharSequence.class, CharSequence.class)
.entries(10)
.averageKey("key1").averageValue("one")
.minSegments(2)
.removeReturnsNull(true).create()) {
assertFalse(map.containsKey("key3"));
map.put("key1", "one");
map.put("key2", "two");
assertEquals(2, map.size());
assertTrue(map.containsKey("key1"));
assertTrue(map.containsKey("key2"));
assertFalse(map.containsKey("key3"));
assertEquals("one", map.get("key1").toString());
assertEquals("two", map.get("key2").toString());
final CharSequence result = map.remove("key1");
assertEquals(null, result);
assertEquals(1, map.size());
assertFalse(map.containsKey("key1"));
assertEquals(null, map.get("key1"));
assertEquals("two", map.get("key2").toString());
assertFalse(map.containsKey("key3"));
// lets add one more item for luck !
map.put("key3", "three");
assertEquals("three", map.get("key3").toString());
assertTrue(map.containsKey("key3"));
assertEquals(2, map.size());
// and just for kicks we'll overwrite what we have
map.put("key3", "overwritten");
assertEquals("overwritten", map.get("key3").toString());
assertTrue(map.containsKey("key3"));
assertEquals(2, map.size());
}
}
@Test
public void testReplaceWithKey() {
try (final ChronicleMap<CharSequence, CharSequence> map =
ChronicleMapBuilder.of(CharSequence.class, CharSequence.class)
.entries(10)
.averageKey("key1").averageValue("one")
.minSegments(2).create()) {
map.put("key1", "one");
map.put("key2", "two");
assertEquals(2, map.size());
assertEquals("one", map.get("key1").toString());
assertEquals("two", map.get("key2").toString());
assertTrue(map.containsKey("key1"));
assertTrue(map.containsKey("key2"));
final CharSequence result = map.replace("key1", "newValue");
assertEquals("one", result.toString());
assertTrue(map.containsKey("key1"));
assertTrue(map.containsKey("key2"));
assertEquals(2, map.size());
assertEquals("newValue", map.get("key1").toString());
assertEquals("two", map.get("key2").toString());
assertTrue(map.containsKey("key1"));
assertTrue(map.containsKey("key2"));
assertFalse(map.containsKey("key3"));
assertEquals(2, map.size());
// let and one more item for luck !
map.put("key3", "three");
assertEquals(3, map.size());
assertTrue(map.containsKey("key1"));
assertTrue(map.containsKey("key2"));
assertTrue(map.containsKey("key3"));
assertEquals("three", map.get("key3").toString());
// and just for kicks we'll overwrite what we have
map.put("key3", "overwritten");
assertEquals("overwritten", map.get("key3").toString());
assertTrue(map.containsKey("key1"));
assertTrue(map.containsKey("key2"));
assertTrue(map.containsKey("key3"));
final CharSequence result2 = map.replace("key2", "newValue");
assertEquals("two", result2.toString());
assertEquals("newValue", map.get("key2").toString());
final CharSequence result3 = map.replace("rubbish", "newValue");
assertEquals(null, result3);
assertFalse(map.containsKey("rubbish"));
assertEquals(3, map.size());
}
}
@Test
public void testReplaceWithKeyAnd2Params() {
try (final ChronicleMap<CharSequence, CharSequence> map =
ChronicleMapBuilder.of(CharSequence.class, CharSequence.class)
.entries(10)
.averageKey("key1").averageValue("one")
.minSegments(2).create()) {
map.put("key1", "one");
map.put("key2", "two");
assertEquals("one", map.get("key1").toString());
assertEquals("two", map.get("key2").toString());
final boolean result = map.replace("key1", "one", "newValue");
assertEquals(true, result);
assertEquals("newValue", map.get("key1").toString());
assertEquals("two", map.get("key2").toString());
// let and one more item for luck !
map.put("key3", "three");
assertEquals("three", map.get("key3").toString());
// and just for kicks we'll overwrite what we have
map.put("key3", "overwritten");
assertEquals("overwritten", map.get("key3").toString());
final boolean result2 = map.replace("key2", "two", "newValue2");
assertEquals(true, result2);
assertEquals("newValue2", map.get("key2").toString());
final boolean result3 = map.replace("newKey", "", "newValue");
assertEquals(false, result3);
final boolean result4 = map.replace("key2", "newValue2", "newValue2");
assertEquals(true, result4);
}
}
// i7-3970X CPU @ 3.50GHz, hex core: -verbose:gc -Xmx64m
// to tmpfs file system
// 10M users, updated 12 times. Throughput 19.3 M ops/sec, no GC!
// 50M users, updated 12 times. Throughput 19.8 M ops/sec, no GC!
// 100M users, updated 12 times. Throughput 19.0M ops/sec, no GC!
// 200M users, updated 12 times. Throughput 18.4 M ops/sec, no GC!
// 400M users, updated 12 times. Throughput 18.4 M ops/sec, no GC!
// to ext4 file system.
// 10M users, updated 12 times. Throughput 17.7 M ops/sec, no GC!
// 50M users, updated 12 times. Throughput 16.5 M ops/sec, no GC!
// 100M users, updated 12 times. Throughput 15.9 M ops/sec, no GC!
// 200M users, updated 12 times. Throughput 15.4 M ops/sec, no GC!
// 400M users, updated 12 times. Throughput 7.8 M ops/sec, no GC!
// 600M users, updated 12 times. Throughput 5.8 M ops/sec, no GC!
// dual E5-2650v2 @ 2.6 GHz, 128 GB: -verbose:gc -Xmx32m
// to tmpfs
// TODO small GC on startup should be tidied up, [GC 9216K->1886K(31744K), 0.0036750 secs]
// 10M users, updated 16 times. Throughput 33.0M ops/sec, VmPeak: 5373848 kB, VmRSS: 544252 kB
// 50M users, updated 16 times. Throughput 31.2 M ops/sec, VmPeak: 9091804 kB, VmRSS: 3324732 kB
// 250M users, updated 16 times. Throughput 30.0 M ops/sec, VmPeak: 24807836 kB, VmRSS: 14329112 kB
// 1000M users, updated 16 times, Throughput 24.1 M ops/sec, VmPeak: 85312732 kB, VmRSS: 57165952 kB
// 2500M users, updated 16 times, Throughput 23.5 M ops/sec, VmPeak: 189545308 kB, VmRSS: 126055868 kB
// to ext4
// 10M users, updated 16 times. Throughput 28.4 M ops/sec, VmPeak: 5438652 kB, VmRSS: 544624 kB
// 50M users, updated 16 times. Throughput 28.2 M ops/sec, VmPeak: 9091804 kB, VmRSS: 9091804 kB
// 250M users, updated 16 times. Throughput 26.1 M ops/sec, VmPeak: 24807836 kB, VmRSS: 24807836 kB
// 1000M users, updated 16 times, Throughput 1.3 M ops/sec, TODO FIX this
@Test
public void testRemoveWithKeyAndValue() {
try (final ChronicleMap<CharSequence, CharSequence> map =
ChronicleMapBuilder.of(CharSequence.class, CharSequence.class)
.entries(10)
.averageKey("key1").averageValue("one")
.minSegments(2).create()) {
map.put("key1", "one");
map.put("key2", "two");
assertEquals("one", map.get("key1").toString());
assertEquals("two", map.get("key2").toString());
// a false remove
final boolean wasRemoved1 = map.remove("key1", "three");
assertFalse(wasRemoved1);
assertEquals(null, map.get("key1").toString(), "one");
assertEquals("two", map.get("key2").toString(), "two");
map.put("key1", "one");
final boolean wasRemoved2 = map.remove("key1", "three");
assertFalse(wasRemoved2);
// lets add one more item for luck !
map.put("key3", "three");
assertEquals("three", map.get("key3").toString());
// and just for kicks we'll overwrite what we have
map.put("key3", "overwritten");
assertEquals("overwritten", map.get("key3").toString());
}
}
@Test
public void testAcquireWithNullContainer() {
try (ChronicleMap<CharSequence, LongValue> map =
ChronicleMapBuilder.of(CharSequence.class, LongValue.class)
.averageKey("key")
.entries(1000)
.entryAndValueOffsetAlignment(4)
.create()) {
map.acquireUsing("key", Values.newNativeReference(LongValue.class));
assertEquals(0, map.acquireUsing("key", null).getValue());
}
}
// i7-3970X CPU @ 3.50GHz, hex core: -Xmx30g -verbose:gc
// 10M users, updated 12 times. Throughput 16.2 M ops/sec, longest [Full GC 853669K->852546K(3239936K), 0.8255960 secs]
// 50M users, updated 12 times. Throughput 13.3 M ops/sec, longest [Full GC 5516214K->5511353K(13084544K), 3.5752970 secs]
// 100M users, updated 12 times. Throughput 11.8 M ops/sec, longest [Full GC 11240703K->11233711K(19170432K), 5.8783010 secs]
// 200M users, updated 12 times. Throughput 4.2 M ops/sec, longest [Full GC 25974721K->22897189K(27962048K), 21.7962600 secs]
// dual E5-2650v2 @ 2.6 GHz, 128 GB: -verbose:gc -Xmx100g
// 10M users, updated 16 times. Throughput 155.3 M ops/sec, VmPeak: 113291428 kB, VmRSS: 9272176 kB, [Full GC 1624336K->1616457K(7299072K), 2.5381610 secs]
// 50M users, updated 16 times. Throughput 120.4 M ops/sec, VmPeak: 113291428 kB, VmRSS: 28436248 kB [Full GC 6545332K->6529639K(18179584K), 6.9053810 secs]
// 250M users, updated 16 times. Throughput 114.1 M ops/sec, VmPeak: 113291428 kB, VmRSS: 76441464 kB [Full GC 41349527K->41304543K(75585024K), 17.3217490 secs]
// 1000M users, OutOfMemoryError.
@Test
public void testGetWithNullContainer() {
try (ChronicleMap<CharSequence, LongValue> map =
ChronicleMapBuilder.of(CharSequence.class, LongValue.class)
.averageKey("key")
.entries(10)
.entryAndValueOffsetAlignment(4)
.create()) {
map.acquireUsing("key", Values.newNativeReference(LongValue.class));
assertEquals(0, map.getUsing("key", null).getValue());
}
}
@Test
public void testGetWithoutAcquireFirst() {
try (ChronicleMap<CharSequence, LongValue> map =
ChronicleMapBuilder.of(CharSequence.class, LongValue.class)
.averageKey("key")
.entries(10)
.entryAndValueOffsetAlignment(4)
.create()) {
assertNull(map.getUsing("key", Values.newNativeReference(LongValue.class)));
}
}
@Test
public void testAcquireAndGet() throws IOException, ClassNotFoundException,
IllegalAccessException, InstantiationException {
int entries = 3/*00 * 1000*/;
try (ChronicleMap<CharSequence, LongValue> map2 = ChronicleMapBuilder.of(CharSequence.class,
LongValue.class)
.entries((long) entries)
.minSegments(1)
.averageKeySize(10)
.entryAndValueOffsetAlignment(8)
.create()) {
LongValue value4 = Values.newNativeReference(LongValue.class);
LongValue value22 = Values.newNativeReference(LongValue.class);
LongValue value32 = Values.newNativeReference(LongValue.class);
for (int j2 = 1; j2 <= 3; j2++) {
for (int i2 = 0; i2 < entries; i2++) {
CharSequence userCS2 = getUserCharSequence(i2);
if (j2 > 1) {
assertNotNull(userCS2.toString(), map2.getUsing(userCS2, value4));
} else {
map2.acquireUsing(userCS2, value4);
}
if (i2 >= 1)
assertTrue(userCS2.toString(), map2.containsKey(getUserCharSequence(1)));
assertEquals(userCS2.toString(), j2 - 1, value4.getValue());
value4.addAtomicValue(1);
assertEquals(value22, map2.acquireUsing(userCS2, value22));
assertEquals(j2, value22.getValue());
assertEquals(value32, map2.getUsing(userCS2, value32));
assertEquals(j2, value32.getValue());
}
}
try (ChronicleMap<CharSequence, LongValue> map1 = ChronicleMapBuilder.of(CharSequence.class,
LongValue.class)
.entries((long) entries)
// .minSegments(1)
.averageKeySize(10)
// .entryAndValueOffsetAlignment(8)
.create()) {
LongValue value1 = Values.newNativeReference(LongValue.class);
LongValue value21 = Values.newNativeReference(LongValue.class);
LongValue value31 = Values.newNativeReference(LongValue.class);
for (int j1 = 1; j1 <= 3; j1++) {
for (int i1 = 0; i1 < entries; i1++) {
CharSequence userCS1 = getUserCharSequence(i1);
if (j1 > 1) {
assertNotNull(userCS1.toString(), map1.getUsing(userCS1, value1));
} else {
map1.acquireUsing(userCS1, value1);
}
if (i1 >= 1)
assertTrue(userCS1.toString(), map1.containsKey(getUserCharSequence(1)));
assertEquals(userCS1.toString(), j1 - 1, value1.getValue());
value1.addAtomicValue(1);
assertEquals(value21, map1.acquireUsing(userCS1, value21));
assertEquals(j1, value21.getValue());
assertEquals(value31, map1.getUsing(userCS1, value31));
assertEquals(j1, value31.getValue());
}
}
}
try (ChronicleMap<CharSequence, LongValue> map = ChronicleMapBuilder.of(CharSequence
.class, LongValue.class)
.entries((long) entries)
.minSegments(1)
.averageKeySize(10)
.entryAndValueOffsetAlignment(8)
.create()) {
LongValue value = Values.newNativeReference(LongValue.class);
LongValue value2 = Values.newNativeReference(LongValue.class);
LongValue value3 = Values.newNativeReference(LongValue.class);
for (int j = 1; j <= 3; j++) {
for (int i = 0; i < entries; i++) {
CharSequence userCS = getUserCharSequence(i);
if (j > 1) {
assertNotNull(userCS.toString(), map.getUsing(userCS, value));
} else {
map.acquireUsing(userCS, value);
}
if (i >= 1)
assertTrue(userCS.toString(), map.containsKey(getUserCharSequence(1)));
assertEquals(userCS.toString(), j - 1, value.getValue());
value.addAtomicValue(1);
assertEquals(value2, map.acquireUsing(userCS, value2));
assertEquals(j, value2.getValue());
assertEquals(value3, map.getUsing(userCS, value3));
assertEquals(j, value3.getValue());
}
}
}
}
}
@Test
public void testAcquireFromMultipleThreads() throws InterruptedException {
int entries = 1000 * 1000;
try (ChronicleMap<CharSequence, LongValue> map2 = ChronicleMapBuilder.of(CharSequence.class,
LongValue.class)
.entries((long) entries)
.minSegments(128)
.averageKeySize(10)
.entryAndValueOffsetAlignment(1)
.create()) {
CharSequence key2 = getUserCharSequence(0);
map2.acquireUsing(key2, Values.newNativeReference(LongValue.class));
int iterations2 = 10000;
int noOfThreads2 = 10;
CyclicBarrier barrier2 = new CyclicBarrier(noOfThreads2);
Thread[] threads2 = new Thread[noOfThreads2];
for (int t2 = 0; t2 < noOfThreads2; t2++) {
threads2[t2] = new Thread(new IncrementRunnable(map2, key2, iterations2, barrier2));
threads2[t2].start();
}
for (int t2 = 0; t2 < noOfThreads2; t2++) {
threads2[t2].join();
}
assertEquals(noOfThreads2 * iterations2,
map2.acquireUsing(key2, Values.newNativeReference(LongValue.class)).getValue());
try (ChronicleMap<CharSequence, LongValue> map1 = ChronicleMapBuilder.of(CharSequence
.class, LongValue.class)
.entries((long) entries)
.minSegments(128)
.averageKeySize(10)
.entryAndValueOffsetAlignment(4)
.create()) {
CharSequence key1 = getUserCharSequence(0);
map1.acquireUsing(key1, Values.newNativeReference(LongValue.class));
int iterations1 = 10000;
int noOfThreads1 = 10;
CyclicBarrier barrier1 = new CyclicBarrier(noOfThreads1);
Thread[] threads1 = new Thread[noOfThreads1];
for (int t1 = 0; t1 < noOfThreads1; t1++) {
threads1[t1] = new Thread(new IncrementRunnable(map1, key1, iterations1, barrier1));
threads1[t1].start();
}
for (int t1 = 0; t1 < noOfThreads1; t1++) {
threads1[t1].join();
}
assertEquals(noOfThreads1 * iterations1,
map1.acquireUsing(key1, Values.newNativeReference(LongValue.class)).getValue());
try (ChronicleMap<CharSequence, LongValue> map = ChronicleMapBuilder.of(CharSequence
.class, LongValue.class)
.entries((long) entries)
.minSegments(128)
.averageKeySize(10)
.entryAndValueOffsetAlignment(8)
.create()) {
CharSequence key = getUserCharSequence(0);
map.acquireUsing(key, Values.newNativeReference(LongValue.class));
int iterations = 10000;
int noOfThreads = 10;
CyclicBarrier barrier = new CyclicBarrier(noOfThreads);
Thread[] threads = new Thread[noOfThreads];
for (int t = 0; t < noOfThreads; t++) {
threads[t] = new Thread(new IncrementRunnable(map, key, iterations, barrier));
threads[t].start();
}
for (int t = 0; t < noOfThreads; t++) {
threads[t].join();
}
assertEquals(noOfThreads * iterations,
map.acquireUsing(key, Values.newNativeReference(LongValue.class)).getValue());
}
}
}
}
@Test
public void testLargerEntries() {
for (int segments : new int[]{128, 256, 512, 1024}) {
int entries = 100000, entrySize = 512;
ChronicleMapBuilder<CharSequence, CharSequence> builder = ChronicleMapBuilder
.of(CharSequence.class, CharSequence.class)
.entries(entries * 11 / 10)
.actualSegments(segments)
.averageKeySize(14)
.averageValueSize(entrySize - 14 - 2);
try (ChronicleMap<CharSequence, CharSequence> map = builder.create()) {
StringBuilder sb = new StringBuilder();
while (sb.length() < entrySize - 14 - 2)
sb.append('+');
for (int i = 0; i < entries; i++) {
map.put("us:" + i, sb);
}
}
}
}
@Test
@Ignore("Performance test")
public void testAcquirePerf256()
throws IOException, ClassNotFoundException, IllegalAccessException,
InstantiationException, InterruptedException, ExecutionException {
// int runs = Integer.getInteger("runs", 10);
int procs = 1; // Runtime.getRuntime().availableProcessors();
int threads = procs * 3;
for (int runs : new int[]{1, /*10, 250, 500, 1000, 2500*/}) {
for (int entrySize : new int[]{240, 256}) {
int valuePadding = entrySize - 16;
char[] chars = new char[valuePadding];
Arrays.fill(chars, 'x');
final StringBuilder value0 = new StringBuilder();
value0.append(chars);
for (int segments : new int[]{/*128, 256, */512/*, 1024, 2048*/}) {
final long entries = runs * 1000 * 1000L;
ChronicleMapBuilder<CharSequence, CharSequence> builder = ChronicleMapBuilder
.of(CharSequence.class, CharSequence.class)
.entries(entries)
.actualSegments(segments)
.averageKeySize(14)
.averageValueSize(value0.length() + 4);
// File tmpFile = File.createTempFile("testAcquirePerf", ".deleteme");
// tmpFile.deleteOnExit();
//createPersistedTo(tmpFile);
try (final ChronicleMap<CharSequence, CharSequence> map = builder.create()) {
int count = runs > 500 ? 2 : 3;
System.out.println("\nKey size: " + runs + " Million entries. " + builder);
for (int j = 0; j < count; j++) {
long start = System.currentTimeMillis();
for (int i = 0; i < threads; i++) {
final int t = i;
Random rand = new Random(t);
StringBuilder key = new StringBuilder();
StringBuilder value = new StringBuilder();
long next = 50 * 1000 * 1000;
// use a factor to give up to 10 digit numbers.
int factor = Math.max(1,
(int) ((10 * 1000 * 1000 * 1000L - 1) / entries));
for (long k = t; k < entries; k++) {
key.setLength(0);
key.append("us:");
key.append(k * factor);
// 75% reads, 25% writes.
if (rand.nextInt(4) > 0) {
map.getUsing(key, value);
} else {
try (net.openhft.chronicle.core.io.Closeable c =
map.acquireContext(key, value)) {
if (value.length() < value0.length() - 1)
value.append(value0);
else if (value.length() > value0.length())
value.setLength(value0.length() - 1);
else
value.append('+');
}
}
}
long time = System.currentTimeMillis() - start;
System.out.printf("EntrySize: %,d Entries: %,d M Segments: " +
"%,d Throughput %.1f M ops/sec%n",
entrySize, runs, segments,
threads * entries / 1000.0 / time);
}
printStatus();
}
}
}
}
}
}
@Test
@Ignore("Performance test")
public void testAcquirePerf()
throws IOException, ClassNotFoundException, IllegalAccessException,
InstantiationException, InterruptedException, ExecutionException {
// int runs = Integer.getInteger("runs", 10);
int procs = 1; // Runtime.getRuntime().availableProcessors();
int threads = procs * 3;
ExecutorService es = Executors.newFixedThreadPool(procs, new NamedThreadFactory("test"));
for (int runs : new int[]{1, /*10, 250, 500, 1000, 2500*/}) {
for (int entrySize : new int[]{240, 256}) {
int valuePadding = entrySize - 16;
char[] chars = new char[valuePadding];
Arrays.fill(chars, 'x');
final StringBuilder value0 = new StringBuilder();
value0.append(chars);
for (int segments : new int[]{/*128, 256, */512/*, 1024, 2048*/}) {
final long entries = runs * 1000 * 1000L;
ChronicleMapBuilder<CharSequence, CharSequence> builder = ChronicleMapBuilder
.of(CharSequence.class, CharSequence.class)
.entries(entries)
.actualSegments(segments)
.averageKeySize(14)
.averageValueSize(value0.length() + 4);
// File tmpFile = File.createTempFile("testAcquirePerf", ".deleteme");
// tmpFile.deleteOnExit();
try (final ChronicleMap<CharSequence, CharSequence> map =
builder.create()) {
int count = runs > 500 ? 2 : 3;
final int independence = Math.min(procs, runs > 500 ? 8 : 4);
System.out.println("\nKey size: " + runs + " Million entries. " + builder);
for (int j = 0; j < count; j++) {
long start = System.currentTimeMillis();
List<Future> futures = new ArrayList<>();
for (int i = 0; i < threads; i++) {
final int t = i;
futures.add(es.submit(new Runnable() {
@Override
public void run() {
Random rand = new Random(t);
StringBuilder key = new StringBuilder();
StringBuilder value = new StringBuilder();
long next = 50 * 1000 * 1000;
// use a factor to give up to 10 digit numbers.
int factor = Math.max(1,
(int) ((10 * 1000 * 1000 * 1000L - 1) / entries));
for (long j = t % independence;
j < entries + independence - 1;
j += independence) {
key.setLength(0);
key.append("us:");
key.append(j * factor);
// 75% reads, 25% writes.
if (rand.nextInt(4) > 0) {
map.getUsing(key, value);
} else {
try (net.openhft.chronicle.core.io.Closeable c =
map.acquireContext(key, value)) {
if (value.length() < value0.length() - 1)
value.append(value0);
else if (value.length() > value0.length())
value.setLength(value0.length() - 1);
else
value.append('+');
}
}
}
}
}));
}
for (Future future : futures) {
future.get();
}
long time = System.currentTimeMillis() - start;
System.out.printf("EntrySize: %,d Entries: %,d M " +
"Segments: %,d Throughput %.1f M ops/sec%n",
entrySize, runs, segments,
threads * entries / independence / 1000.0 / time);
}
}
printStatus();
}
}
}
es.shutdown();
es.awaitTermination(1, TimeUnit.MINUTES);
}
@Test
@Ignore("Performance test")
public void testAcquireLockedPerf()
throws IOException, InterruptedException, ExecutionException {
// int runs = Integer.getInteger("runs", 10);
int procs = Runtime.getRuntime().availableProcessors();
if (procs > 8) procs--;
int threads = procs * 3;
ExecutorService es = Executors.newFixedThreadPool(procs, new NamedThreadFactory("test"));
for (int runs : new int[]{1, 2, 5, 10, 25, 50, 100, 500, 1000, 2500}) {
final long entries = runs * 1000 * 1000L;
ChronicleMapBuilder<CharSequence, LongValue> builder = ChronicleMapBuilder
.of(CharSequence.class, LongValue.class)
.entries(entries)
.entryAndValueOffsetAlignment(8)
.actualSegments(256)
.averageKeySize(13);
File tmpFile = File.createTempFile("testAcquirePerf", ".deleteme");
tmpFile.deleteOnExit();
try (ChronicleMap<CharSequence, LongValue> map = builder.createPersistedTo(tmpFile)) {
int count = runs >= 5 ? 2 : 3;
final int independence = Math.min(procs, runs > 500 ? 8 : 4);
System.out.println("\nKey size: " + runs + " Million entries. " + builder);
for (int j = 0; j < count; j++) {
long start = System.currentTimeMillis();
List<Future> futures = new ArrayList<>();
for (int i = 0; i < threads; i++) {
final int t = i;
futures.add(es.submit(new Runnable() {
@Override
public void run() {
LongValue value = nativeLongValue();
StringBuilder sb = new StringBuilder();
long next = 50 * 1000 * 1000;
Random rand = new Random();
// use a factor to give up to 10 digit numbers.
int factor = Math.max(1,
(int) ((10 * 1000 * 1000 * 1000L - 1) / entries));
for (long j = t % independence; j < entries + independence - 1;
j += independence) {
sb.setLength(0);
sb.append("us:");
sb.append(j * factor);
long n;
// 75% read
if (rand.nextBoolean() || rand.nextBoolean()) {
try (ExternalMapQueryContext<?, LongValue, ?> c =
map.queryContext(sb)) {
MapEntry<?, LongValue> entry = c.entry();
if (entry != null) {
// Attempt to pass abstraction hierarchies
net.openhft.chronicle.hash.Data<LongValue> v =
entry.value();
n = v.bytes().readVolatileLong(v.offset()) + 1;
} else {
n = 1;
}
}
} else {
try (ExternalMapQueryContext<CharSequence, LongValue, ?> c =
map.queryContext(sb)) {
c.updateLock().lock();
MapEntry<?, LongValue> entry = c.entry();
if (entry != null) {
net.openhft.chronicle.hash.Data<LongValue> v = entry.value();
n = ((BytesStore) v.bytes()).addAndGetLong(
v.offset(), 1);
} else {
c.insert(c.absentEntry(), c.wrapValueAsData(ONE));
n = 1;
}
}
}
}
}
}));
}
for (Future future : futures) {
future.get();
}
long time = System.currentTimeMillis() - start;
System.out.printf("Throughput %.1f M ops/sec%n",
threads * entries / independence / 1000.0 / time);
}
}
printStatus();
tmpFile.delete();
}
es.shutdown();
es.awaitTermination(1, TimeUnit.MINUTES);
}
@Test
@Ignore("Performance test")
public void testAcquireLockedLLPerf()
throws IOException, ClassNotFoundException, IllegalAccessException,
InstantiationException, InterruptedException, ExecutionException {
// int runs = Integer.getInteger("runs", 10);
int procs = Runtime.getRuntime().availableProcessors();
int threads = procs * 3; // runs > 100 ? procs / 2 : procs;
ExecutorService es = Executors.newFixedThreadPool(procs, new NamedThreadFactory("test"));
for (int runs : new int[]{10, 50, 100, 250, 500, 1000, 2500}) {
// JAVA 8 produces more garbage than previous versions for internal work.
// System.gc();
final long entries = runs * 1000 * 1000L;
ChronicleMapBuilder<LongValue, LongValue> builder = ChronicleMapBuilder
.of(LongValue.class, LongValue.class)
.entries(entries)
.actualSegments(8 * 1024);
File tmpFile = File.createTempFile("testAcquirePerf", ".deleteme");
tmpFile.deleteOnExit();
try (ChronicleMap<LongValue, LongValue> map = builder.createPersistedTo(tmpFile)) {
int count = runs > 500 ? runs > 1200 ? 3 : 5 : 5;
final int independence = Math.min(procs, runs > 500 ? 8 : 4);
System.out.println("\nKey size: " + runs + " Million entries. " + builder);
for (int j = 0; j < count; j++) {
long start = System.currentTimeMillis();
List<Future> futures = new ArrayList<Future>();
for (int i = 0; i < threads; i++) {
final int t = i;
futures.add(es.submit(new Runnable() {
@Override
public void run() {
LongValue key = Values.newHeapInstance(LongValue.class);
LongValue value = nativeLongValue();
long next = 50 * 1000 * 1000;
// use a factor to give up to 10 digit numbers.
int factor = Math.max(1,
(int) ((10 * 1000 * 1000 * 1000L - 1) / entries));
for (long j = t % independence; j < entries + independence - 1;
j += independence) {
key.setValue(j * factor);
long n;
try (net.openhft.chronicle.core.io.Closeable c =
map.acquireContext(key, value)) {
n = value.addValue(1);
}
assert n > 0 && n < 1000 : "Counter corrupted " + n;
if (t == 0 && j >= next) {
long size = map.longSize();
if (size < 0) throw new AssertionError("size: " + size);
System.out.println(j + ", size: " + size);
next += 50 * 1000 * 1000;
}
}
}
}));
}
for (Future future : futures) {
future.get();
}
long time = System.currentTimeMillis() - start;
System.out.printf("Throughput %.1f M ops/sec%n",
threads * entries / independence / 1000.0 / time);
}
}
printStatus();
tmpFile.delete();
}
es.shutdown();
es.awaitTermination(1, TimeUnit.MINUTES);
}
@Test
@Ignore("Performance test")
public void testCHMAcquirePerf() throws IOException, ClassNotFoundException, IllegalAccessException, InstantiationException, InterruptedException {
for (int runs : new int[]{10, 50, 250, 500, 1000, 2500}) {
System.out.println("Testing " + runs + " million entries");
final long entries = runs * 1000 * 1000L;
int procs = Runtime.getRuntime().availableProcessors();
int threads = procs * 2;
int count = runs > 500 ? runs > 1200 ? 1 : 2 : 5;
final int independence = Math.min(procs, runs > 500 ? 8 : 4);
for (int j = 0; j < count; j++) {
final ConcurrentMap<String, AtomicInteger> map = new ConcurrentHashMap<String, AtomicInteger>((int) (entries * 5 / 4), 1.0f, 1024);
long start = System.currentTimeMillis();
ExecutorService es = Executors.newFixedThreadPool(procs, new NamedThreadFactory("test"));
for (int i = 0; i < threads; i++) {
final int t = i;
es.submit(new Runnable() {
@Override
public void run() {
StringBuilder sb = new StringBuilder();
int next = 50 * 1000 * 1000;
// use a factor to give up to 10 digit numbers.
int factor = Math.max(1, (int) ((10 * 1000 * 1000 * 1000L - 1) / entries));
for (long i = t % independence; i < entries; i += independence) {
sb.setLength(0);
sb.append("u:");
sb.append(i * factor);
String key = sb.toString();
AtomicInteger count = map.get(key);
if (count == null) {
map.putIfAbsent(key, new AtomicInteger());
count = map.get(key);
}
count.getAndIncrement();
if (t == 0 && i == next) {
System.out.println(i);
next += 50 * 1000 * 1000;
}
}
}
});
}
es.shutdown();
es.awaitTermination(10, TimeUnit.MINUTES);
printStatus();
long time = System.currentTimeMillis() - start;
System.out.printf("Throughput %.1f M ops/sec%n", threads * entries / 1000.0 / time);
}
}
}
private CharSequence getUserCharSequence(int i) {
sb.setLength(0);
sb.append("u:");
sb.append(i * 9876); // test 10 digit user numbers.
return sb;
}
@Test
public void testPutAndRemove() throws IOException, ClassNotFoundException,
IllegalAccessException, InstantiationException {
int entries = 100 * 1000;
try (ChronicleMap<CharSequence, CharSequence> map =
ChronicleMapBuilder.of(CharSequence.class, CharSequence.class)
.entries(entries)
.minSegments(16)
.averageKeySize("user:".length() + 6)
.averageValueSize("value:".length() + 6)
.putReturnsNull(true)
.removeReturnsNull(true).create()) {
StringBuilder key = new StringBuilder();
StringBuilder value = new StringBuilder();
StringBuilder value2 = new StringBuilder();
for (int j = 1; j <= 3; j++) {
for (int i = 0; i < entries; i++) {
key.setLength(0);
key.append("user:").append(i);
value.setLength(0);
value.append("value:").append(i);
// System.out.println(key);
assertNull(map.getUsing(key, value));
assertNull(map.put(key, value));
assertNotNull(map.getUsing(key, value2));
assertEquals(value.toString(), value2.toString());
assertNull(map.remove(key));
assertNull(map.getUsing(key, value));
}
}
}
}
@Test
public void mapRemoveReflectedInViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3);) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
map.remove(2);
assertMap(map, new int[]{1, 3}, new CharSequence[]{"1", "3"});
assertEntrySet(entrySet, new int[]{1, 3}, new CharSequence[]{"1", "3"});
assertEntrySet(map.entrySet(), new int[]{1, 3}, new CharSequence[]{"1", "3"});
assertKeySet(keySet, new int[]{1, 3});
assertKeySet(map.keySet(), new int[]{1, 3});
assertValues(values, new CharSequence[]{"1", "3"});
assertValues(map.values(), new CharSequence[]{"1", "3"});
}
}
@Test
public void mapPutReflectedInViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
map.put(4, "4");
assertMap(map, new int[]{4, 2, 3, 1}, new CharSequence[]{"4", "2", "3", "1"});
assertEntrySet(entrySet, new int[]{4, 2, 3, 1}, new CharSequence[]{"4", "2", "3", "1"});
assertEntrySet(map.entrySet(), new int[]{4, 2, 3, 1}, new CharSequence[]{"4", "2", "3", "1"});
assertKeySet(keySet, new int[]{4, 2, 3, 1});
assertKeySet(map.keySet(), new int[]{4, 2, 3, 1});
assertValues(values, new CharSequence[]{"2", "1", "4", "3"});
assertValues(map.values(), new CharSequence[]{"2", "1", "4", "3"});
}
}
@Test
public void entrySetRemoveReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
entrySet.remove(new AbstractMap.SimpleEntry<Integer, CharSequence>(2, "2"));
assertMap(map, new int[]{1, 3}, new CharSequence[]{"1", "3"});
assertEntrySet(entrySet, new int[]{1, 3}, new CharSequence[]{"1", "3"});
assertKeySet(keySet, new int[]{1, 3});
assertValues(values, new CharSequence[]{"1", "3"});
}
}
@Test
public void keySetRemoveReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
keySet.remove(2);
assertMap(map, new int[]{1, 3}, new CharSequence[]{"1", "3"});
assertEntrySet(entrySet, new int[]{1, 3}, new CharSequence[]{"1", "3"});
assertKeySet(keySet, new int[]{1, 3});
assertValues(values, new CharSequence[]{"1", "3"});
}
}
@Test
public void valuesRemoveReflectedInMap() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
values.removeIf("2"::contentEquals);
assertMap(map, new int[]{1, 3}, new CharSequence[]{"1", "3"});
assertEntrySet(entrySet, new int[]{1, 3}, new CharSequence[]{"1", "3"});
assertKeySet(keySet, new int[]{1, 3});
assertValues(values, new CharSequence[]{"1", "3"});
}
}
@Test
public void entrySetIteratorRemoveReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Map<Integer, CharSequence> refMap = new HashMap(map);
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
Iterator<Map.Entry<Integer, CharSequence>> entryIterator = entrySet.iterator();
entryIterator.next();
refMap.remove(entryIterator.next().getKey());
entryIterator.remove();
int[] expectedKeys = Ints.toArray(refMap.keySet());
CharSequence[] expectedValues = refMap.values().toArray(new CharSequence[0]);
assertMap(map, expectedKeys, expectedValues);
assertEntrySet(entrySet, expectedKeys, expectedValues);
assertKeySet(keySet, expectedKeys);
assertValues(values, expectedValues);
}
}
@Test
public void keySetIteratorRemoveReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Map<Integer, CharSequence> refMap = new HashMap(map);
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
Iterator<Integer> keyIterator = keySet.iterator();
keyIterator.next();
refMap.remove(keyIterator.next());
keyIterator.remove();
int[] expectedKeys = Ints.toArray(refMap.keySet());
CharSequence[] expectedValues = refMap.values().toArray(new CharSequence[0]);
assertMap(map, expectedKeys, expectedValues);
assertEntrySet(entrySet, expectedKeys, expectedValues);
assertKeySet(keySet, expectedKeys);
assertValues(values, expectedValues);
}
}
@Test
public void valuesIteratorRemoveReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
HashBiMap<Integer, CharSequence> refMap = HashBiMap.create();
map.forEach((k, v) -> refMap.put(k, v.toString()));
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
Iterator<CharSequence> valueIterator = values.iterator();
valueIterator.next();
refMap.inverse().remove(valueIterator.next().toString());
valueIterator.remove();
int[] expectedKeys = Ints.toArray(refMap.keySet());
CharSequence[] expectedValues = new CharSequence[expectedKeys.length];
for (int i = 0; i < expectedKeys.length; i++) {
expectedValues[i] = refMap.get(expectedKeys[i]);
}
assertMap(map, expectedKeys, expectedValues);
assertEntrySet(entrySet, expectedKeys, expectedValues);
assertKeySet(keySet, expectedKeys);
assertValues(values, expectedValues);
}
}
@Test
public void entrySetRemoveAllReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
entrySet.removeAll(
Arrays.asList(
new AbstractMap.SimpleEntry<Integer, CharSequence>(1, "1"),
new AbstractMap.SimpleEntry<Integer, CharSequence>(2, "2")
)
);
assertMap(map, new int[]{3}, new CharSequence[]{"3"});
assertEntrySet(entrySet, new int[]{3}, new CharSequence[]{"3"});
assertKeySet(keySet, new int[]{3});
assertValues(values, new CharSequence[]{"3"});
}
}
@Test
public void keySetRemoveAllReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
keySet.removeAll(Arrays.asList(1, 2));
assertMap(map, new int[]{3}, new CharSequence[]{"3"});
assertEntrySet(entrySet, new int[]{3}, new CharSequence[]{"3"});
assertKeySet(keySet, new int[]{3});
assertValues(values, new CharSequence[]{"3"});
}
}
@Test
public void valuesRemoveAllReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
values.removeIf(e -> "1".contentEquals(e) || "2".contentEquals(e));
assertMap(map, new int[]{3}, new CharSequence[]{"3"});
assertEntrySet(entrySet, new int[]{3}, new CharSequence[]{"3"});
assertKeySet(keySet, new int[]{3});
assertValues(values, new CharSequence[]{"3"});
}
}
@Test
public void entrySetRetainAllReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
entrySet.removeIf(e ->
!(e.getKey().equals(1) && "1".contentEquals(e.getValue())) &&
!(e.getKey().equals(2) && "2".contentEquals(e.getValue())));
assertMap(map, new int[]{2, 1}, new CharSequence[]{"2", "1"});
assertEntrySet(entrySet, new int[]{2, 1}, new CharSequence[]{"2", "1"});
assertKeySet(keySet, new int[]{2, 1});
assertValues(values, new CharSequence[]{"2", "1"});
}
}
@Test
public void keySetRetainAllReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
keySet.retainAll(Arrays.asList(1, 2));
assertMap(map, new int[]{2, 1}, new CharSequence[]{"2", "1"});
assertEntrySet(entrySet, new int[]{2, 1}, new CharSequence[]{"2", "1"});
assertKeySet(keySet, new int[]{2, 1});
assertValues(values, new CharSequence[]{"2", "1"});
}
}
@Test
public void valuesRetainAllReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
values.removeIf(v -> !"1".contentEquals(v) && !"2".contentEquals(v));
assertMap(map, new int[]{2, 1}, new CharSequence[]{"2", "1"});
assertEntrySet(entrySet, new int[]{2, 1}, new CharSequence[]{"2", "1"});
assertKeySet(keySet, new int[]{2, 1});
assertValues(values, new CharSequence[]{"2", "1"});
}
}
@Test
public void entrySetClearReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
entrySet.clear();
org.junit.Assert.assertTrue(map.isEmpty());
org.junit.Assert.assertTrue(entrySet.isEmpty());
org.junit.Assert.assertTrue(keySet.isEmpty());
org.junit.Assert.assertTrue(values.isEmpty());
}
}
@Test
public void keySetClearReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
keySet.clear();
org.junit.Assert.assertTrue(map.isEmpty());
org.junit.Assert.assertTrue(entrySet.isEmpty());
org.junit.Assert.assertTrue(keySet.isEmpty());
org.junit.Assert.assertTrue(values.isEmpty());
}
}
@Test
public void valuesClearReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(3)) {
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
values.clear();
org.junit.Assert.assertTrue(map.isEmpty());
org.junit.Assert.assertTrue(entrySet.isEmpty());
org.junit.Assert.assertTrue(keySet.isEmpty());
org.junit.Assert.assertTrue(values.isEmpty());
}
}
@Test
public void clearMapViaEntryIteratorRemoves() throws IOException {
int noOfElements = 16 * 1024;
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(noOfElements)) {
int sum = 0;
for (Iterator it = map.entrySet().iterator(); it.hasNext(); ) {
it.next();
it.remove();
++sum;
}
assertEquals(noOfElements, sum);
}
}
@Test
public void clearMapViaKeyIteratorRemoves() throws IOException {
int noOfElements = 16 * 1024;
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(noOfElements)) {
Set<Integer> keys = new HashSet<Integer>();
for (int i = 1; i <= noOfElements; i++) {
keys.add(i);
}
int sum = 0;
for (Iterator it = map.keySet().iterator(); it.hasNext(); ) {
Object key = it.next();
keys.remove(key);
it.remove();
++sum;
}
assertEquals(noOfElements, sum);
}
}
@Test(expected = IllegalStateException.class)
public void testRemoveWhenNextIsNotCalled() throws IOException {
ChronicleMap<Integer, CharSequence> map = getViewTestMap(2);
Iterator<Integer> iterator = map.keySet().iterator();
iterator.remove();
}
@Test
public void clearMapViaValueIteratorRemoves() throws IOException {
int noOfElements = 16 * 1024;
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(noOfElements)) {
int sum = 0;
for (Iterator it = map.values().iterator(); it.hasNext(); ) {
it.next();
it.remove();
++sum;
}
assertEquals(noOfElements, sum);
}
}
@Test
public void entrySetValueReflectedInMapAndOtherViews() throws IOException {
try (ChronicleMap<Integer, CharSequence> map = getViewTestMap(0)) {
map.put(1, "A");
Set<Map.Entry<Integer, CharSequence>> entrySet = map.entrySet();
Set<Integer> keySet = map.keySet();
Collection<CharSequence> values = map.values();
assertMap(map, new int[]{1}, new CharSequence[]{"A"});
assertEntrySet(entrySet, new int[]{1}, new CharSequence[]{"A"});
assertKeySet(keySet, new int[]{1});
assertValues(values, new String[]{"A"});
entrySet.iterator().next().setValue("B");
assertMap(map, new int[]{1}, new CharSequence[]{"B"});
assertEntrySet(entrySet, new int[]{1}, new CharSequence[]{"B"});
assertEntrySet(map.entrySet(), new int[]{1}, new CharSequence[]{"B"});
assertKeySet(keySet, new int[]{1});
assertKeySet(map.keySet(), new int[]{1});
assertValues(values, new String[]{"B"});
assertValues(map.values(), new String[]{"B"});
}
}
@Test
public void equalsTest() throws IOException {
try (final ChronicleMap<Integer, String> map1 = ChronicleMap.of(Integer.class, String.class)
.averageValue("one").entries(2).create()) {
map1.put(1, "one");
map1.put(2, "two");
try (ChronicleMap<Integer, String> map2 = ChronicleMap.of(Integer.class, String.class)
.averageValue("one").entries(2).create()) {
map2.put(1, "one");
map2.put(2, "two");
assertEquals(map1, map2);
}
}
}
@Test
public void testPutLongValue() throws IOException {
final ChronicleMapBuilder<CharSequence, LongValue> builder = ChronicleMapBuilder
.of(CharSequence.class, LongValue.class)
.entries(1000)
.averageKeySize("x".length());
try (final ChronicleMap<CharSequence, LongValue> map = builder.create()) {
LongValue value = nativeLongValue();
try {
map.put("x", value);
} catch (IllegalStateException | NullPointerException e) {
// ok
return;
}
throw new AssertionError("Should throw either IllegalStateException or " +
"NullPointerException, but succeed");
}
}
@Test
public void testOffheapAcquireUsingLocked() throws IOException {
ChronicleMapBuilder<CharSequence, LongValue> builder = ChronicleMapBuilder
.of(CharSequence.class, LongValue.class)
.entries(1000)
.averageKeySize("one".length());
try (final ChronicleMap<CharSequence, LongValue> map = builder.create()) {
LongValue value = nativeLongValue();
// this will add the entry
try (net.openhft.chronicle.core.io.Closeable c = map.acquireContext("one", value)) {
assertEquals(0, value.getValue());
value.addValue(1);
}
// check that the entry was added
try (ExternalMapQueryContext<CharSequence, LongValue, ?> c = map.queryContext("one")) {
MapEntry<CharSequence, LongValue> entry = c.entry();
assertNotNull(entry);
LongValue v = entry.value().getUsing(value);
assert v == value;
assertEquals(1, v.getValue());
}
// this will remove the entry
try (ExternalMapQueryContext<CharSequence, LongValue, ?> c = map.queryContext("one")) {
c.updateLock().lock();
c.remove(c.entry());
}
// check that the entry was removed
try (ExternalMapQueryContext<CharSequence, LongValue, ?> c = map.queryContext("one")) {
c.updateLock().lock();
assertNotNull(c.absentEntry());
}
try (net.openhft.chronicle.core.io.Closeable c =
map.acquireContext("one", value)) {
assertEquals(0, value.getValue());
}
try (net.openhft.chronicle.core.io.Closeable c =
map.acquireContext("one", value)) {
value.addValue(1);
}
// check that the entry was removed
try (ExternalMapQueryContext<CharSequence, LongValue, ?> c = map.queryContext("one")) {
LongValue v = c.entry().value().getUsing(value);
assert value == v;
assertEquals(1, c.entry().value().get().getValue());
}
}
}
@Test(expected = IllegalArgumentException.class)
public void testAcquireUsingLockedWithString() throws IOException {
ChronicleMapBuilder<CharSequence, String> builder = ChronicleMapBuilder
.of(CharSequence.class, String.class)
.averageKey("one").averageValue("")
.entries(1000);
try (final ChronicleMap<CharSequence, String> map = builder.create()) {
// this will add the entry
try (net.openhft.chronicle.core.io.Closeable c = map.acquireContext("one", "")) {
// do nothing
}
}
}
@Test
public void testOnheapAcquireUsingLockedStringBuilder() throws IOException {
try (final ChronicleMap<CharSequence, CharSequence> map = ChronicleMapBuilder
.of(CharSequence.class, CharSequence.class)
.entries(1000)
.averageKeySize("one".length())
.averageValueSize("Hello World".length())
.create()) {
StringBuilder value = new StringBuilder();
try (net.openhft.chronicle.core.io.Closeable c =
map.acquireContext("one", value)) {
value.append("Hello World");
}
assertEquals("Hello World", value.toString());
}
}
@Test
public void testOnheapAcquireUsingLocked() throws IOException {
File tmpFile = File.createTempFile("testAcquireUsingLocked", ".deleteme");
tmpFile.deleteOnExit();
try (final ChronicleMap<CharSequence, LongValue> map = ChronicleMapBuilder
.of(CharSequence.class, LongValue.class)
.entries(1000)
.averageKeySize("one".length()).createPersistedTo(tmpFile)) {
LongValue value = Values.newNativeReference(LongValue.class);
try (ExternalMapQueryContext<CharSequence, LongValue, ?> c = map.queryContext("one")) {
assertNotNull(c.absentEntry());
}
try (net.openhft.chronicle.core.io.Closeable c =
map.acquireContext("one", value)) {
value.setValue(10);
}
// this will add the entry
try (net.openhft.chronicle.core.io.Closeable c =
map.acquireContext("one", value)) {
value.addValue(1);
}
// check that the entry was added
try (ExternalMapQueryContext<CharSequence, LongValue, ?> c = map.queryContext("one")) {
MapEntry<CharSequence, LongValue> entry = c.entry();
assertNotNull(entry);
assertEquals(11, entry.value().get().getValue());
}
// this will remove the entry
try (ExternalMapQueryContext<CharSequence, LongValue, ?> c = map.queryContext("one")) {
c.updateLock().lock();
c.remove(c.entry());
}
// check that the entry was removed
try (ExternalMapQueryContext<CharSequence, LongValue, ?> c = map.queryContext("one")) {
assertNotNull(c.absentEntry());
}
try (net.openhft.chronicle.core.io.Closeable c =
map.acquireContext("one", value)) {
assertEquals(0, value.getValue());
}
value.setValue(1);
try (ExternalMapQueryContext<CharSequence, LongValue, ?> c = map.queryContext("one")) {
assertEquals(1, c.entry().value().get().getValue());
}
try (net.openhft.chronicle.core.io.Closeable c =
map.acquireContext("one", value)) {
value.addValue(1);
}
// check that the entry was removed
try (ExternalMapQueryContext<CharSequence, LongValue, ?> c = map.queryContext("one")) {
LongValue value1 = c.entry().value().get();
assertEquals(2, value1.getValue());
}
}
tmpFile.delete();
}
@Test(expected = IllegalArgumentException.class)
public void testBytesMarshallableMustBeConcreteValueType() {
try (ChronicleMap<CharSequence, BMSUper> map = ChronicleMapBuilder
.of(CharSequence.class, BMSUper.class)
.entries(1)
.averageKey("hello")
.averageValue(new BMClass())
.create()) {
map.put("hi", new BMClass());
}
}
interface BMSUper {
}
static class BMClass implements BytesMarshallable, BMSUper {
}
private static final class IncrementRunnable implements Runnable {
private final ChronicleMap<CharSequence, LongValue> map;
private final CharSequence key;
private final int iterations;
private final CyclicBarrier barrier;
private IncrementRunnable(ChronicleMap<CharSequence, LongValue> map, CharSequence key,
int iterations, CyclicBarrier barrier) {
this.map = map;
this.key = key;
this.iterations = iterations;
this.barrier = barrier;
}
@Override
public void run() {
try {
LongValue value = Values.newNativeReference(LongValue.class);
barrier.await();
for (int i = 0; i < iterations; i++) {
map.acquireUsing(key, value);
value.addAtomicValue(1);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
|
package io.vertx.up.uca.marshal;
import io.vertx.core.json.JsonObject;
public interface Transformer<T> {
T transform(JsonObject input);
}
|
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.searchdefinition.derived;
import ai.vespa.rankingexpression.importer.configmodelview.ImportedMlModels;
import com.google.common.collect.ImmutableList;
import com.yahoo.collections.Pair;
import com.yahoo.compress.Compressor;
import com.yahoo.config.model.api.ModelContext;
import com.yahoo.search.query.profile.QueryProfileRegistry;
import com.yahoo.searchdefinition.OnnxModel;
import com.yahoo.searchdefinition.LargeRankExpressions;
import com.yahoo.searchdefinition.RankExpressionBody;
import com.yahoo.searchdefinition.document.RankType;
import com.yahoo.searchdefinition.RankProfile;
import com.yahoo.searchdefinition.expressiontransforms.OnnxModelTransformer;
import com.yahoo.searchlib.rankingexpression.ExpressionFunction;
import com.yahoo.searchlib.rankingexpression.RankingExpression;
import com.yahoo.searchlib.rankingexpression.parser.ParseException;
import com.yahoo.searchlib.rankingexpression.rule.ReferenceNode;
import com.yahoo.searchlib.rankingexpression.rule.SerializationContext;
import com.yahoo.tensor.TensorType;
import com.yahoo.vespa.config.search.RankProfilesConfig;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
* A rank profile derived from a search definition, containing exactly the features available natively in the server
*
* @author bratseth
*/
public class RawRankProfile implements RankProfilesConfig.Producer {
/** A reusable compressor with default settings */
private static final Compressor compressor = new Compressor();
private final String keyEndMarker = "\r=";
private final String valueEndMarker = "\r\n";
// TODO: These are to expose coupling between the strings used here and elsewhere
public final static String summaryFeatureFefPropertyPrefix = "vespa.summary.feature";
public final static String rankFeatureFefPropertyPrefix = "vespa.dump.feature";
private final String name;
private final Compressor.Compression compressedProperties;
/**
* Creates a raw rank profile from the given rank profile
*/
public RawRankProfile(RankProfile rankProfile, LargeRankExpressions largeExpressions,
QueryProfileRegistry queryProfiles, ImportedMlModels importedModels,
AttributeFields attributeFields, ModelContext.Properties deployProperties) {
this.name = rankProfile.getName();
compressedProperties = compress(new Deriver(rankProfile.compile(queryProfiles, importedModels),
attributeFields, deployProperties).derive(largeExpressions));
}
private Compressor.Compression compress(List<Pair<String, String>> properties) {
StringBuilder b = new StringBuilder();
for (Pair<String, String> property : properties)
b.append(property.getFirst()).append(keyEndMarker).append(property.getSecond()).append(valueEndMarker);
return compressor.compress(b.toString().getBytes(StandardCharsets.UTF_8));
}
private List<Pair<String, String>> decompress(Compressor.Compression compression) {
String propertiesString = new String(compressor.decompress(compression), StandardCharsets.UTF_8);
if (propertiesString.isEmpty()) return ImmutableList.of();
ImmutableList.Builder<Pair<String, String>> properties = new ImmutableList.Builder<>();
for (int pos = 0; pos < propertiesString.length();) {
int keyEndPos = propertiesString.indexOf(keyEndMarker, pos);
String key = propertiesString.substring(pos, keyEndPos);
pos = keyEndPos + keyEndMarker.length();
int valueEndPos = propertiesString.indexOf(valueEndMarker, pos);
String value = propertiesString.substring(pos, valueEndPos);
pos = valueEndPos + valueEndMarker.length();
properties.add(new Pair<>(key, value));
}
return properties.build();
}
public String getName() { return name; }
@Override
public String toString() {
return " rank profile " + name;
}
@Override
public void getConfig(RankProfilesConfig.Builder builder) {
RankProfilesConfig.Rankprofile.Builder b = new RankProfilesConfig.Rankprofile.Builder().name(getName());
getRankProperties(b);
builder.rankprofile(b);
}
private void getRankProperties(RankProfilesConfig.Rankprofile.Builder b) {
RankProfilesConfig.Rankprofile.Fef.Builder fefB = new RankProfilesConfig.Rankprofile.Fef.Builder();
for (Pair<String, String> p : decompress(compressedProperties))
fefB.property(new RankProfilesConfig.Rankprofile.Fef.Property.Builder().name(p.getFirst()).value(p.getSecond()));
b.fef(fefB);
}
/**
* Returns the properties of this as an unmodifiable list.
* Note: This method is expensive.
*/
public List<Pair<String, String>> configProperties() { return decompress(compressedProperties); }
private static class Deriver {
private final Map<String, FieldRankSettings> fieldRankSettings = new java.util.LinkedHashMap<>();
private final Set<ReferenceNode> summaryFeatures;
private final Set<ReferenceNode> rankFeatures;
private final List<RankProfile.RankProperty> rankProperties;
/**
* Rank properties for weight settings to make these available to feature executors
*/
private final List<RankProfile.RankProperty> boostAndWeightRankProperties = new ArrayList<>();
private final boolean ignoreDefaultRankFeatures;
private final RankProfile.MatchPhaseSettings matchPhaseSettings;
private final int rerankCount;
private final int keepRankCount;
private final int numThreadsPerSearch;
private final int minHitsPerThread;
private final int numSearchPartitions;
private final double termwiseLimit;
private final double rankScoreDropLimit;
private final int largeRankExpressionLimit;
private final boolean distributeLargeRankExpressions;
private final boolean useDistributedRankExpressions;
/**
* The rank type definitions used to derive settings for the native rank features
*/
private final NativeRankTypeDefinitionSet nativeRankTypeDefinitions = new NativeRankTypeDefinitionSet("default");
private final Map<String, String> attributeTypes;
private final Map<String, String> queryFeatureTypes;
private final Set<String> filterFields = new java.util.LinkedHashSet<>();
private final String rankprofileName;
private RankingExpression firstPhaseRanking;
private RankingExpression secondPhaseRanking;
/**
* Creates a raw rank profile from the given rank profile
*/
Deriver(RankProfile compiled, AttributeFields attributeFields, ModelContext.Properties deployProperties)
{
rankprofileName = compiled.getName();
attributeTypes = compiled.getAttributeTypes();
queryFeatureTypes = compiled.getQueryFeatureTypes();
firstPhaseRanking = compiled.getFirstPhaseRanking();
secondPhaseRanking = compiled.getSecondPhaseRanking();
summaryFeatures = new LinkedHashSet<>(compiled.getSummaryFeatures());
rankFeatures = compiled.getRankFeatures();
rerankCount = compiled.getRerankCount();
matchPhaseSettings = compiled.getMatchPhaseSettings();
numThreadsPerSearch = compiled.getNumThreadsPerSearch();
minHitsPerThread = compiled.getMinHitsPerThread();
numSearchPartitions = compiled.getNumSearchPartitions();
termwiseLimit = compiled.getTermwiseLimit().orElse(deployProperties.featureFlags().defaultTermwiseLimit());
keepRankCount = compiled.getKeepRankCount();
rankScoreDropLimit = compiled.getRankScoreDropLimit();
ignoreDefaultRankFeatures = compiled.getIgnoreDefaultRankFeatures();
largeRankExpressionLimit = deployProperties.featureFlags().largeRankExpressionLimit();
distributeLargeRankExpressions = deployProperties.featureFlags().distributeExternalRankExpressions();
useDistributedRankExpressions = deployProperties.featureFlags().useExternalRankExpressions();
rankProperties = new ArrayList<>(compiled.getRankProperties());
Map<String, RankProfile.RankingExpressionFunction> functions = compiled.getFunctions();
List<ExpressionFunction> functionExpressions = functions.values().stream().map(f -> f.function()).collect(Collectors.toList());
Map<String, String> functionProperties = new LinkedHashMap<>();
SerializationContext functionSerializationContext = new SerializationContext(functionExpressions);
if (firstPhaseRanking != null) {
functionProperties.putAll(firstPhaseRanking.getRankProperties(functionSerializationContext));
}
if (secondPhaseRanking != null) {
functionProperties.putAll(secondPhaseRanking.getRankProperties(functionSerializationContext));
}
derivePropertiesAndSummaryFeaturesFromFunctions(functions, functionProperties, functionSerializationContext);
deriveOnnxModelFunctionsAndSummaryFeatures(compiled);
deriveRankTypeSetting(compiled, attributeFields);
deriveFilterFields(compiled);
deriveWeightProperties(compiled);
}
private void deriveFilterFields(RankProfile rp) {
filterFields.addAll(rp.allFilterFields());
}
private void derivePropertiesAndSummaryFeaturesFromFunctions(Map<String, RankProfile.RankingExpressionFunction> functions,
Map<String, String> functionProperties,
SerializationContext functionContext) {
if (functions.isEmpty()) return;
replaceFunctionSummaryFeatures(functionContext);
// First phase, second phase and summary features should add all required functions to the context.
// However, we need to add any functions not referenced in those anyway for model-evaluation.
deriveFunctionProperties(functions, functionProperties, functionContext);
for (Map.Entry<String, String> e : functionProperties.entrySet()) {
rankProperties.add(new RankProfile.RankProperty(e.getKey(), e.getValue()));
}
}
private void deriveFunctionProperties(Map<String, RankProfile.RankingExpressionFunction> functions,
Map<String, String> functionProperties,
SerializationContext context) {
for (Map.Entry<String, RankProfile.RankingExpressionFunction> e : functions.entrySet()) {
String propertyName = RankingExpression.propertyName(e.getKey());
if (context.serializedFunctions().containsKey(propertyName)) continue;
String expressionString = e.getValue().function().getBody().getRoot().toString(context).toString();
context.addFunctionSerialization(propertyName, expressionString);
for (Map.Entry<String, TensorType> argumentType : e.getValue().function().argumentTypes().entrySet())
context.addArgumentTypeSerialization(e.getKey(), argumentType.getKey(), argumentType.getValue());
if (e.getValue().function().returnType().isPresent())
context.addFunctionTypeSerialization(e.getKey(), e.getValue().function().returnType().get());
// else if (e.getValue().function().arguments().isEmpty()) TODO: Enable this check when we resolve all types
// throw new IllegalStateException("Type of function '" + e.getKey() + "' is not resolved");
}
functionProperties.putAll(context.serializedFunctions());
}
private void replaceFunctionSummaryFeatures(SerializationContext context) {
if (summaryFeatures == null) return;
Map<String, ReferenceNode> functionSummaryFeatures = new LinkedHashMap<>();
for (Iterator<ReferenceNode> i = summaryFeatures.iterator(); i.hasNext(); ) {
ReferenceNode referenceNode = i.next();
// Is the feature a function?
ExpressionFunction function = context.getFunction(referenceNode.getName());
if (function != null) {
String propertyName = RankingExpression.propertyName(referenceNode.getName());
String expressionString = function.getBody().getRoot().toString(context).toString();
context.addFunctionSerialization(propertyName, expressionString);
ReferenceNode newReferenceNode = new ReferenceNode("rankingExpression(" + referenceNode.getName() + ")", referenceNode.getArguments().expressions(), referenceNode.getOutput());
functionSummaryFeatures.put(referenceNode.getName(), newReferenceNode);
i.remove(); // Will add the expanded one in next block
}
}
// Then, replace the summary features that were functions
for (Map.Entry<String, ReferenceNode> e : functionSummaryFeatures.entrySet()) {
summaryFeatures.add(e.getValue());
}
}
private void deriveWeightProperties(RankProfile rankProfile) {
for (RankProfile.RankSetting setting : rankProfile.rankSettings()) {
if (!setting.getType().equals(RankProfile.RankSetting.Type.WEIGHT)) {
continue;
}
boostAndWeightRankProperties.add(new RankProfile.RankProperty("vespa.fieldweight." + setting.getFieldName(),
String.valueOf(setting.getIntValue())));
}
}
/**
* Adds the type boosts from a rank profile
*/
private void deriveRankTypeSetting(RankProfile rankProfile, AttributeFields attributeFields) {
for (Iterator<RankProfile.RankSetting> i = rankProfile.rankSettingIterator(); i.hasNext(); ) {
RankProfile.RankSetting setting = i.next();
if (!setting.getType().equals(RankProfile.RankSetting.Type.RANKTYPE)) continue;
deriveNativeRankTypeSetting(setting.getFieldName(), (RankType) setting.getValue(), attributeFields,
hasDefaultRankTypeSetting(rankProfile, setting.getFieldName()));
}
}
private void deriveNativeRankTypeSetting(String fieldName, RankType rankType, AttributeFields attributeFields, boolean isDefaultSetting) {
if (isDefaultSetting) return;
NativeRankTypeDefinition definition = nativeRankTypeDefinitions.getRankTypeDefinition(rankType);
if (definition == null) throw new IllegalArgumentException("In field '" + fieldName + "': " +
rankType + " is known but has no implementation. " +
"Supported rank types: " +
nativeRankTypeDefinitions.types().keySet());
FieldRankSettings settings = deriveFieldRankSettings(fieldName);
for (Iterator<NativeTable> i = definition.rankSettingIterator(); i.hasNext(); ) {
NativeTable table = i.next();
// only add index field tables if we are processing an index field and
// only add attribute field tables if we are processing an attribute field
if ((FieldRankSettings.isIndexFieldTable(table) && attributeFields.getAttribute(fieldName) == null) ||
(FieldRankSettings.isAttributeFieldTable(table) && attributeFields.getAttribute(fieldName) != null)) {
settings.addTable(table);
}
}
}
private boolean hasDefaultRankTypeSetting(RankProfile rankProfile, String fieldName) {
RankProfile.RankSetting setting =
rankProfile.getRankSetting(fieldName, RankProfile.RankSetting.Type.RANKTYPE);
return setting != null && setting.getValue().equals(RankType.DEFAULT);
}
private FieldRankSettings deriveFieldRankSettings(String fieldName) {
FieldRankSettings settings = fieldRankSettings.get(fieldName);
if (settings == null) {
settings = new FieldRankSettings(fieldName);
fieldRankSettings.put(fieldName, settings);
}
return settings;
}
/** Derives the properties this produces */
public List<Pair<String, String>> derive(LargeRankExpressions largeRankExpressions) {
List<Pair<String, String>> properties = new ArrayList<>();
for (RankProfile.RankProperty property : rankProperties) {
if (RankingExpression.propertyName(RankProfile.FIRST_PHASE).equals(property.getName())) {
// Could have been set by function expansion. Set expressions, then skip this property.
try {
firstPhaseRanking = new RankingExpression(property.getValue());
} catch (ParseException e) {
throw new IllegalArgumentException("Could not parse first phase expression", e);
}
}
else if (RankingExpression.propertyName(RankProfile.SECOND_PHASE).equals(property.getName())) {
try {
secondPhaseRanking = new RankingExpression(property.getValue());
} catch (ParseException e) {
throw new IllegalArgumentException("Could not parse second phase expression", e);
}
}
else {
properties.add(new Pair<>(property.getName(), property.getValue()));
}
}
properties.addAll(deriveRankingPhaseRankProperties(firstPhaseRanking, RankProfile.FIRST_PHASE));
properties.addAll(deriveRankingPhaseRankProperties(secondPhaseRanking, RankProfile.SECOND_PHASE));
for (FieldRankSettings settings : fieldRankSettings.values()) {
properties.addAll(settings.deriveRankProperties());
}
for (RankProfile.RankProperty property : boostAndWeightRankProperties) {
properties.add(new Pair<>(property.getName(), property.getValue()));
}
for (ReferenceNode feature : summaryFeatures) {
properties.add(new Pair<>(summaryFeatureFefPropertyPrefix, feature.toString()));
}
for (ReferenceNode feature : rankFeatures) {
properties.add(new Pair<>(rankFeatureFefPropertyPrefix, feature.toString()));
}
if (numThreadsPerSearch > 0) {
properties.add(new Pair<>("vespa.matching.numthreadspersearch", numThreadsPerSearch + ""));
}
if (minHitsPerThread > 0) {
properties.add(new Pair<>("vespa.matching.minhitsperthread", minHitsPerThread + ""));
}
if (numSearchPartitions >= 0) {
properties.add(new Pair<>("vespa.matching.numsearchpartitions", numSearchPartitions + ""));
}
if (termwiseLimit < 1.0) {
properties.add(new Pair<>("vespa.matching.termwise_limit", termwiseLimit + ""));
}
if (matchPhaseSettings != null) {
properties.add(new Pair<>("vespa.matchphase.degradation.attribute", matchPhaseSettings.getAttribute()));
properties.add(new Pair<>("vespa.matchphase.degradation.ascendingorder", matchPhaseSettings.getAscending() + ""));
properties.add(new Pair<>("vespa.matchphase.degradation.maxhits", matchPhaseSettings.getMaxHits() + ""));
properties.add(new Pair<>("vespa.matchphase.degradation.maxfiltercoverage", matchPhaseSettings.getMaxFilterCoverage() + ""));
properties.add(new Pair<>("vespa.matchphase.degradation.samplepercentage", matchPhaseSettings.getEvaluationPoint() + ""));
properties.add(new Pair<>("vespa.matchphase.degradation.postfiltermultiplier", matchPhaseSettings.getPrePostFilterTippingPoint() + ""));
RankProfile.DiversitySettings diversitySettings = matchPhaseSettings.getDiversity();
if (diversitySettings != null) {
properties.add(new Pair<>("vespa.matchphase.diversity.attribute", diversitySettings.getAttribute()));
properties.add(new Pair<>("vespa.matchphase.diversity.mingroups", String.valueOf(diversitySettings.getMinGroups())));
properties.add(new Pair<>("vespa.matchphase.diversity.cutoff.factor", String.valueOf(diversitySettings.getCutoffFactor())));
properties.add(new Pair<>("vespa.matchphase.diversity.cutoff.strategy", String.valueOf(diversitySettings.getCutoffStrategy())));
}
}
if (rerankCount > -1) {
properties.add(new Pair<>("vespa.hitcollector.heapsize", rerankCount + ""));
}
if (keepRankCount > -1) {
properties.add(new Pair<>("vespa.hitcollector.arraysize", keepRankCount + ""));
}
if (rankScoreDropLimit > -Double.MAX_VALUE) {
properties.add(new Pair<>("vespa.hitcollector.rankscoredroplimit", rankScoreDropLimit + ""));
}
if (ignoreDefaultRankFeatures) {
properties.add(new Pair<>("vespa.dump.ignoredefaultfeatures", String.valueOf(true)));
}
for (String fieldName : filterFields) {
properties.add(new Pair<>("vespa.isfilterfield." + fieldName, String.valueOf(true)));
}
for (Map.Entry<String, String> attributeType : attributeTypes.entrySet()) {
properties.add(new Pair<>("vespa.type.attribute." + attributeType.getKey(), attributeType.getValue()));
}
for (Map.Entry<String, String> queryFeatureType : queryFeatureTypes.entrySet()) {
properties.add(new Pair<>("vespa.type.query." + queryFeatureType.getKey(), queryFeatureType.getValue()));
}
if (properties.size() >= 1000000) throw new RuntimeException("Too many rank properties");
distributeLargeExpressionsAsFiles(properties, largeRankExpressions);
return properties;
}
private void distributeLargeExpressionsAsFiles(List<Pair<String, String>> properties, LargeRankExpressions largeRankExpressions) {
if (!distributeLargeRankExpressions) return;
for (ListIterator<Pair<String, String>> iter = properties.listIterator(); iter.hasNext();) {
Pair<String, String> property = iter.next();
String expression = property.getSecond();
if (expression.length() > largeRankExpressionLimit) {
String propertyName = property.getFirst();
String functionName = RankingExpression.extractScriptName(propertyName);
if (functionName != null) {
String mangledName = rankprofileName + "." + functionName;
largeRankExpressions.add(new RankExpressionBody(mangledName, ByteBuffer.wrap(expression.getBytes(StandardCharsets.UTF_8))));
if (useDistributedRankExpressions) {
iter.set(new Pair<>(RankingExpression.propertyExpressionName(functionName), mangledName));
}
}
}
}
}
private List<Pair<String, String>> deriveRankingPhaseRankProperties(RankingExpression expression, String phase) {
List<Pair<String, String>> properties = new ArrayList<>();
if (expression == null) return properties;
String name = expression.getName();
if ("".equals(name))
name = phase;
if (expression.getRoot() instanceof ReferenceNode) {
properties.add(new Pair<>("vespa.rank." + phase, expression.getRoot().toString()));
} else {
properties.add(new Pair<>("vespa.rank." + phase, "rankingExpression(" + name + ")"));
properties.add(new Pair<>(RankingExpression.propertyName(name), expression.getRoot().toString()));
}
return properties;
}
private void deriveOnnxModelFunctionsAndSummaryFeatures(RankProfile rankProfile) {
if (rankProfile.getSearch() == null) return;
if (rankProfile.getSearch().onnxModels().asMap().isEmpty()) return;
replaceOnnxFunctionInputs(rankProfile);
replaceImplicitOnnxConfigSummaryFeatures(rankProfile);
}
private void replaceOnnxFunctionInputs(RankProfile rankProfile) {
Set<String> functionNames = rankProfile.getFunctions().keySet();
if (functionNames.isEmpty()) return;
for (OnnxModel onnxModel: rankProfile.getSearch().onnxModels().asMap().values()) {
for (Map.Entry<String, String> mapping : onnxModel.getInputMap().entrySet()) {
String source = mapping.getValue();
if (functionNames.contains(source)) {
onnxModel.addInputNameMapping(mapping.getKey(), "rankingExpression(" + source + ")");
}
}
}
}
private void replaceImplicitOnnxConfigSummaryFeatures(RankProfile rankProfile) {
if (summaryFeatures == null || summaryFeatures.isEmpty()) return;
Set<ReferenceNode> replacedSummaryFeatures = new HashSet<>();
for (Iterator<ReferenceNode> i = summaryFeatures.iterator(); i.hasNext(); ) {
ReferenceNode referenceNode = i.next();
ReferenceNode replacedNode = (ReferenceNode) OnnxModelTransformer.transformFeature(referenceNode, rankProfile);
if (referenceNode != replacedNode) {
replacedSummaryFeatures.add(replacedNode);
i.remove();
}
}
summaryFeatures.addAll(replacedSummaryFeatures);
}
}
}
|
package uk.ac.rdg.resc.edal.dataset;
import uk.ac.rdg.resc.edal.util.GridCoordinates2D;
import java.io.Serializable;
import java.util.List;
/**
* Element used in {@link HorizontalMesh4dDataset} cache.
*
* @author Jesse Lopez
*/
class MeshDatasetCacheElement implements Serializable {
private static final long serialVersionUID = 1L;
List<GridCoordinates2D> outputCoords;
List<HZTDataSource.MeshCoordinates3D> coordsToRead;
MeshDatasetCacheElement(List<GridCoordinates2D> outputCoords, List<HZTDataSource.MeshCoordinates3D> coordsToRead) {
this.outputCoords = outputCoords;
this.coordsToRead = coordsToRead;
}
List<GridCoordinates2D> getOutputCoords() {
return outputCoords;
}
List<HZTDataSource.MeshCoordinates3D> getCoordsToRead() {
return coordsToRead;
}
}
|
/*
* Copyright (c) 2013 Tah Wei Hoon.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License Version 2.0,
* with full text available at http://www.apache.org/licenses/LICENSE-2.0.html
*
* This software is provided "as is". Use at your own risk.
*/
package com.myopicmobile.textwarrior3.common;
import java.util.LinkedList;
/**
* Implements undo/redo for insertion and deletion events of TextBuffer
*
* This class is tightly coupled to the implementation of TextBuffer, in
* particular the inner workings of the gap data structure to optimize
* undo/redo efficiency
*
* When text is inserted/deleted...
* 1. Before text is inserted/deleted, TextBuffer calls captureInsert()/captureDelete()
* 2. If the insertion/deletion is a continuation of the previous edit,
* the incoming edit is merged with the top entry of the undo stack.
* For 2 edits to be considered continuous, they must be the same type,
* (insert or delete), occur within a pre-defined time interval of MERGE_TIME,
* and the later edit must start off where the caret would have been after
* the earlier edit.
* 3. If the incoming edit is not continuous with the previous one, a new entry
* for it is pushed on the stack
*
* Batch mode:
* A client application can specify consecutive insert/delete operations to
* undo/redo as a group. Edits made between a call to beginBatchEdit()
* and a closing endBatchEdit() call are grouped as a unit.
*
* Undo/redo:
* Undo/redo commands merely move the stack pointer and do not delete or insert
* entries. Only when a new edit is made will the entries after the stack
* pointer be deleted.
*
* Optimizaton notes:
* Edited characters are copied lazily. When a new entry is pushed on the undo
* stack, only the starting position and length of the inserted/deleted segment
* is recorded. When another entry is pushed or when the entry is first undone,
* the affected characters are then copied over. This optimization exploits the
* non-destructive nature of continuous edits in TextBuffer -- deleted characters
* can be retrieved from the gap and inserted characters are trivially available.
* For undo/redo of the topmost entry, only the gap boundaries of TextBuffer
* need to be moved.
*/
public class UndoStack {
private TextBuffer _buf;
private LinkedList<Command> _stack = new LinkedList<Command>();
private boolean _isBatchEdit = false;
/** for grouping batch operations */
private int _groupId = 0;
/** where new entries should go */
private int _top = 0;
/** timestamp for the previous edit operation */
long _lastEditTime = -1;
public UndoStack(TextBuffer buf){
_buf = buf;
}
/**
* Undo the previous insert/delete operation
*
* @return The suggested position of the caret after the undo, or -1 if
* there is nothing to undo
*/
public int undo(){
if(canUndo()){
Command lastUndone = _stack.get(_top-1);
int group = lastUndone._group;
do{
Command c = _stack.get(_top-1);
if(c._group != group){
break;
}
lastUndone = c;
c.undo();
--_top;
}
while(canUndo());
return lastUndone.findUndoPosition();
}
return -1;
}
/**
* Redo the previous insert/delete operation
*
* @return The suggested position of the caret after the redo, or -1 if
* there is nothing to redo
*/
public int redo(){
if(canRedo()){
Command lastRedone = _stack.get(_top);
int group = lastRedone._group;
do{
Command c = _stack.get(_top);
if(c._group != group){
break;
}
lastRedone = c;
c.redo();
++_top;
}
while(canRedo());
return lastRedone.findRedoPosition();
}
return -1;
}
//TODO extract common parts of captureInsert and captureDelete
/**
* Records an insert operation. Should be called before the insertion is
* actually done.
*/
public void captureInsert(int start, int length, long time){
boolean mergeSuccess = false;
if(canUndo()){
Command c = _stack.get(_top - 1);
if(c instanceof InsertCommand
&& c.merge(start, length, time)){
mergeSuccess = true;
}
else{
c.recordData();
}
}
if(!mergeSuccess){
push(new InsertCommand(start, length, _groupId));
if(!_isBatchEdit){
_groupId++;
}
}
_lastEditTime = time;
}
/**
* Records a delete operation. Should be called before the deletion is
* actually done.
*/
public void captureDelete(int start, int length, long time){
boolean mergeSuccess = false;
if(canUndo()){
Command c = _stack.get(_top - 1);
if(c instanceof DeleteCommand
&& c.merge(start, length, time)){
mergeSuccess = true;
}
else{
c.recordData();
}
}
if(!mergeSuccess){
push(new DeleteCommand(start, length, _groupId));
if(!_isBatchEdit){
_groupId++;
}
}
_lastEditTime = time;
}
private void push(Command c){
trimStack();
++_top;
_stack.add(c);
}
private void trimStack(){
while(_stack.size() > _top){
_stack.removeLast();
}
}
public final boolean canUndo(){
return _top > 0;
}
public final boolean canRedo(){
return _top < _stack.size();
}
public boolean isBatchEdit(){
return _isBatchEdit;
}
public void beginBatchEdit(){
_isBatchEdit = true;
}
public void endBatchEdit(){
_isBatchEdit = false;
_groupId++;
}
private abstract class Command{
public final static long MERGE_TIME = 1000000000; //750ms in nanoseconds
/** Start position of the edit */
public int _start;
/** Length of the affected segment */
public int _length;
/** Contents of the affected segment */
public String _data;
/** Group ID. Commands of the same group are undone/redone as a unit */
public int _group;
public abstract void undo();
public abstract void redo();
/** Populates _data with the affected text */
public abstract void recordData();
public abstract int findUndoPosition();
public abstract int findRedoPosition();
/**
* Attempts to merge in an edit. This will only be successful if the new
* edit is continuous. See {@link UndoStack} for the requirements
* of a continuous edit.
*
* @param start Start position of the new edit
* @param length Length of the newly edited segment
* @param time Timestamp when the new edit was made. There are no
* restrictions on the units used, as long as it is consistently used
* in the whole program
*
* @return Whether the merge was successful
*/
public abstract boolean merge(int start, int length, long time);
}
private class InsertCommand extends Command{
/**
* Corresponds to an insertion of text of size length just before
* start position.
*/
public InsertCommand(int start, int length, int groupNumber){
_start = start;
_length = length;
_group = groupNumber;
}
@Override
public boolean merge(int newStart, int length, long time) {
if(_lastEditTime < 0){
return false;
}
if((time - _lastEditTime) < MERGE_TIME
&& newStart == _start + _length){
_length += length;
trimStack();
return true;
}
return false;
}
@Override
public void recordData() {
//TODO handle memory allocation failure
_data = _buf.subSequence(_start, _length).toString();
}
@Override
public void undo() {
if(_data == null){
recordData();
_buf.shiftGapStart(-_length);
}
else{
//dummy timestamp of 0
_buf.delete(_start, _length, 0 ,false);
}
}
@Override
public void redo() {
//dummy timestamp of 0
_buf.insert(_data.toCharArray(), _start, 0, false);
}
@Override
public int findRedoPosition() {
return _start + _length;
}
@Override
public int findUndoPosition() {
return _start;
}
}
private class DeleteCommand extends Command{
/**
* Corresponds to an deletion of text of size length starting from
* start position, inclusive.
*/
public DeleteCommand(int start, int length, int seqNumber){
_start = start;
_length = length;
_group = seqNumber;
}
@Override
public boolean merge(int newStart, int length, long time) {
if(_lastEditTime < 0){
return false;
}
if((time - _lastEditTime) < MERGE_TIME
&& newStart == _start - _length - length + 1){
_start = newStart;
_length += length;
trimStack();
return true;
}
return false;
}
@Override
public void recordData() {
//TODO handle memory allocation failure
_data = new String(_buf.gapSubSequence(_length));
}
@Override
public void undo() {
if(_data == null){
recordData();
_buf.shiftGapStart(_length);
}
else{
//dummy timestamp of 0
_buf.insert(_data.toCharArray(), _start, 0, false);
}
}
@Override
public void redo() {
//dummy timestamp of 0
_buf.delete(_start, _length, 0, false);
}
@Override
public int findRedoPosition() {
return _start;
}
@Override
public int findUndoPosition() {
return _start + _length;
}
}// end inner class
}
|
package com.diguits.domainmodeldesigner.templateapplyconfig.modelmappers;
import com.google.inject.Inject;
import com.diguits.common.mapping.MappingContext;
import com.diguits.domainmodeldesigner.templateapplyconfig.models.TemplateApplyConfigModel;
import com.diguits.templateengine.contract.model.TemplateApplyConfig;
import com.diguits.common.mapping.MapperBase;
import java.util.UUID;
import com.diguits.common.mapping.IMapperProvider;
public class TemplateApplyConfigToTemplateApplyConfigModelMapper extends MapperBase<TemplateApplyConfig, TemplateApplyConfigModel> {
IMapperProvider mapperProvider;
@Inject
public TemplateApplyConfigToTemplateApplyConfigModelMapper(IMapperProvider mapperProvider) {
this.mapperProvider = mapperProvider;
}
public void map(TemplateApplyConfig source, TemplateApplyConfigModel target, MappingContext context) {
if(source==null || target == null) return;
target.setRootOutputDir(source.getRootOutputDir());
target.setWaitForBefore(source.getWaitForBefore());
target.setOutputPathExpression(source.getOutputPathExpression());
target.setInMemory(source.getInMemory());
target.setConditionExpression(source.getConditionExpression());
target.setOutputFilenameExpression(source.getOutputFilenameExpression());
if (source.getModelIds() != null) {
target.getModelIds().clear();
for (UUID modelId : source.getModelIds()) {
target.getModelIds().add(modelId);
}
}
}
public void mapBack(TemplateApplyConfigModel source, TemplateApplyConfig target, MappingContext context) {
if(source == null || target == null) return;
target.setRootOutputDir(source.getRootOutputDir());
target.setWaitForBefore(source.getWaitForBefore());
target.setOutputPathExpression(source.getOutputPathExpression());
target.setInMemory(source.getInMemory());
target.setTemplateId(source.getTemplate().getId());
target.setConditionExpression(source.getConditionExpression());
target.setOutputFilenameExpression(source.getOutputFilenameExpression());
if (source.getModelIds() != null) {
target.getModelIds().clear();
for (UUID modelId : source.getModelIds()) {
target.getModelIds().add(modelId);
}
}
}
@Override
protected Class<TemplateApplyConfigModel> getToClass() {
return TemplateApplyConfigModel.class;
}
@Override
protected Class<TemplateApplyConfig> getFromClass() {
return TemplateApplyConfig.class;
}
}
|
/**
* Copyright (C) 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.dashboard.ui.config.treeNodes;
import org.jboss.dashboard.LocaleManager;
import org.jboss.dashboard.ui.UIServices;
import org.jboss.dashboard.ui.config.AbstractNode;
import org.jboss.dashboard.ui.config.TreeNode;
import org.jboss.dashboard.ui.config.components.panelInstance.PanelInstanceHandler;
import org.jboss.dashboard.workspace.PanelInstance;
import org.jboss.dashboard.workspace.WorkspaceImpl;
import org.slf4j.Logger;
import java.util.Locale;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
public class PanelInstanceNode extends AbstractNode {
@Inject
private transient Logger log;
@Inject
private PanelInstanceHandler instanceHandler;
private String workspaceId;
private Long panelInstanceId;
@Inject
private PanelsNode panelsNode;
@Inject
private PanelAllPropertiesNode panelAllPropertiesNode;
@Inject
private PanelI18nPropertiesNode panelI18nPropertiesNode;
@Inject
private PanelInstanceSpecificPropertiesNode panelInstanceSpecificPropertiesNode;
@Inject
private PanelInstanceI18nPropertiesNode panelInstanceI18nPropertiesNode;
@PostConstruct
protected void init() {
super.setSubnodes(new TreeNode[] {panelsNode, panelAllPropertiesNode, panelI18nPropertiesNode});
}
public PanelInstanceHandler getInstanceHandler() {
return instanceHandler;
}
public void setInstanceHandler(PanelInstanceHandler instanceHandler) {
this.instanceHandler = instanceHandler;
}
public String getWorkspaceId() {
return workspaceId;
}
public void setWorkspaceId(String workspaceId) {
this.workspaceId = workspaceId;
}
public Long getPanelInstanceId() {
return panelInstanceId;
}
public void setPanelInstanceId(Long panelInstanceId) {
this.panelInstanceId = panelInstanceId;
}
public String getId() {
return panelInstanceId.toString();
}
public String getIconId() {
return "16x16/ico-menu_panel.png";
}
protected PanelInstanceSpecificPropertiesNode getNewPanelInstanceSpecificPropertiesNode() {
return panelInstanceSpecificPropertiesNode;
}
protected PanelInstanceI18nPropertiesNode getNewPanelInstanceI18nPropertiesNode() {
return panelInstanceI18nPropertiesNode;
}
public PanelInstance getPanelInstance() throws Exception {
return ((WorkspaceImpl) UIServices.lookup().getWorkspacesManager().getWorkspace(workspaceId)).getPanelInstance(panelInstanceId);
}
public String getName(Locale l) {
try {
return (String) LocaleManager.lookup().localize(getPanelInstance().getTitle());
} catch (Exception e) {
log.error("Error: ", e);
}
return null;
}
public String getDescription(Locale l) {
return getName(l);
}
public boolean onEdit() {
getInstanceHandler().setWorkspaceId(workspaceId);
getInstanceHandler().setPanelInstanceId(panelInstanceId);
return super.onEdit();
}
}
|
package com.klaytn.caver.feature;
import com.klaytn.caver.Caver;
import com.klaytn.caver.crypto.KlayCredentials;
import com.klaytn.caver.methods.response.KlayAccount;
import com.klaytn.caver.methods.response.KlayTransactionReceipt;
import com.klaytn.caver.tx.gas.DefaultGasProvider;
import com.klaytn.caver.kct.KIP17;
import com.klaytn.caver.kct.KIP7;
import com.klaytn.caver.tx.manager.TransactionManager;
import org.junit.BeforeClass;
import org.junit.Test;
import org.web3j.protocol.core.DefaultBlockParameterName;
import org.web3j.tx.gas.StaticGasProvider;
import org.web3j.utils.Numeric;
import java.math.BigInteger;
import java.util.List;
import static com.klaytn.caver.base.Accounts.*;
import static com.klaytn.caver.base.LocalValues.LOCAL_CHAIN_ID;
import static org.junit.Assert.*;
public class KIP17Test {
private static final String sContractName = "NFTTest";
private static final String sContractSymbol = "NFT";
private static final String sTokenURI = "https://game.example/item-id-8u5h2m.json";
private static final String sSTATUS_SUCCESS = "0x1";
private static BigInteger sTotalSupply = BigInteger.ZERO;
private String sZeroAddr = "0x0000000000000000000000000000000000000000";
static Caver mCaver;
static KlayCredentials mDeployerCredential, mTestCredential, mTestCredential2;
static TransactionManager mDeployerTxManager, mTesterTxManger, mTesterTxManger2;
static String mContractAddress;
@BeforeClass
public static void preSetup() throws Exception {
mCaver = Caver.build(Caver.DEFAULT_URL);
mDeployerCredential = LUMAN;
mDeployerTxManager = new TransactionManager.Builder(mCaver, LUMAN).setChaindId(LOCAL_CHAIN_ID).build();
mTestCredential = BRANDON;
mTesterTxManger = new TransactionManager.Builder(mCaver, BRANDON).setChaindId(LOCAL_CHAIN_ID).build();
mTestCredential2 = WAYNE;
mTesterTxManger2 = new TransactionManager.Builder(mCaver, WAYNE).setChaindId(LOCAL_CHAIN_ID).build();
deployKIP17Contract();
}
public static void deployKIP17Contract() {
try {
KIP17 token = KIP17.deploy(
mCaver,
mDeployerTxManager,
new StaticGasProvider(DefaultGasProvider.GAS_PRICE, BigInteger.valueOf(6_000_000)),
sContractName,
sContractSymbol
).send();
mContractAddress = token.getContractAddress();
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
private void checkTxStatus(KlayTransactionReceipt.TransactionReceipt receipt) {
assertEquals("Error Status - " + receipt.getErrorMessage() + "\n",
receipt.getStatus(),
sSTATUS_SUCCESS);
}
private void checkTransferEventValue(KIP17.TransferEventResponse response, String from, String to, BigInteger tokenId) {
assertEquals(response.from, from);
assertEquals(response.to, to);
assertEquals(response.tokenId, tokenId);
}
private void checkApprovalEventValue(KIP17.ApprovalEventResponse response, String owner, String approved, BigInteger tokenId) {
assertEquals(response.owner, owner);
assertEquals(response.approved, approved);
assertEquals(response.tokenId, tokenId);
}
private void checkApprovalForAllEventValue(KIP17.ApprovalForAllEventResponse response, String owner, String operator, boolean approved) {
assertEquals(response.owner, owner);
assertEquals(response.operator, operator);
assertEquals(response.approved, approved);
}
private void checkPausedEventValue(KIP17.PausedEventResponse response, String account) {
assertEquals(response.account, account);
}
private void checkUnPausedEventValue(KIP17.UnpausedEventResponse response, String account) {
assertEquals(response.account, account);
}
private void checkPauserAddedEventValue(KIP17.PauserAddedEventResponse response, String account) {
assertEquals(response.account, account);
}
private void checkPauserRemovedEventValue(KIP17.PauserRemovedEventResponse response, String account) {
assertEquals(response.account, account);
}
private void checkMinterAddedEventValue(KIP17.MinterAddedEventResponse response, String account) {
assertEquals(response.account, account);
}
private void checkMinterRemovedEventValue(KIP17.MinterRemovedEventResponse response, String account) {
assertEquals(response.account, account);
}
//KCT-032
@Test
public void deployContract() {
try {
KIP17 token = KIP17.deploy(
mCaver,
mDeployerTxManager,
new StaticGasProvider(DefaultGasProvider.GAS_PRICE, BigInteger.valueOf(6_000_000)),
sContractName,
sContractSymbol
).send();
String contractAddress = token.getContractAddress();
KlayAccount response = mCaver.klay().getAccount(contractAddress, DefaultBlockParameterName.LATEST).send();
KlayAccount.Account account = response.getResult();
assertEquals(0x02, account.getAccType());
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-033
@Test
public void name() {
KIP17 tokenHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
try {
String name = tokenHandler.name().send();
assertEquals(name, sContractName);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-034
@Test
public void symbol() {
try {
KIP17 tokenHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String symbol = tokenHandler.symbol().send();
assertEquals(symbol, sContractSymbol);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-035
@Test
public void totalSupply() {
KIP17 tokenHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
try {
BigInteger preTotalCount = tokenHandler.totalSupply().send();
BigInteger tokenId = BigInteger.valueOf(100000);
KlayTransactionReceipt.TransactionReceipt mintReceipt = tokenHandler.mint(ownerAddress, tokenId).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = tokenHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenId);
sTotalSupply = tokenHandler.totalSupply().send();
assertEquals(preTotalCount.add(BigInteger.ONE), sTotalSupply);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-036
@Test
public void balanceOf() {
KIP17 tokenHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
try {
BigInteger preBalance = tokenHandler.balanceOf(ownerAddress).send();
BigInteger tokenId = BigInteger.valueOf(100001);
KlayTransactionReceipt.TransactionReceipt mintReceipt = tokenHandler.mint(ownerAddress, tokenId).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = tokenHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenId);
BigInteger currentBalance = tokenHandler.balanceOf(ownerAddress).send();
assertEquals(preBalance.add(BigInteger.ONE), currentBalance);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-037
@Test
public void ownerOf() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String userAddress = mTesterTxManger.getDefaultAddress();
try {
BigInteger tokenId = BigInteger.valueOf(6000);
KlayTransactionReceipt.TransactionReceipt receipt = ownerHandler.mint(userAddress, tokenId).send();
checkTxStatus(receipt);
List<KIP17.TransferEventResponse> list = ownerHandler.getTransferEvents(receipt);
checkTransferEventValue(list.get(0), sZeroAddr, userAddress, tokenId);
String address = ownerHandler.ownerOf(tokenId).send();
assertEquals(userAddress, address);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-038
@Test
public void pausedFeature() {
KIP17 tokenHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
try {
boolean isPaused = tokenHandler.paused().send();
if(isPaused) {
KlayTransactionReceipt.TransactionReceipt preUnpauseReceipt = tokenHandler.unpause().send();
checkTxStatus(preUnpauseReceipt);
assertFalse(tokenHandler.paused().send());
}
//Pause Test
KlayTransactionReceipt.TransactionReceipt pausedReceipt = tokenHandler.pause().send();
checkTxStatus(pausedReceipt);
List<KIP17.PausedEventResponse> pausedEventResponses = tokenHandler.getPausedEvents(pausedReceipt);
checkPausedEventValue(pausedEventResponses.get(0), ownerAddress);
isPaused = tokenHandler.paused().send();
assertTrue(isPaused);
//Unpause Test
KlayTransactionReceipt.TransactionReceipt unPausedReceipt = tokenHandler.unpause().send();
checkTxStatus(unPausedReceipt);
List<KIP17.UnpausedEventResponse> unPausedEventResponses = tokenHandler.getUnpausedEvents(unPausedReceipt);
checkUnPausedEventValue(unPausedEventResponses.get(0), ownerAddress);
isPaused = tokenHandler.paused().send();
assertFalse(isPaused);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCt-039
@Test
public void addPauser() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 pauserHandler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String userAddress = mTesterTxManger.getDefaultAddress();
try {
boolean isPauser = pauserHandler.isPauser(userAddress).send();
if(isPauser) {
KlayTransactionReceipt.TransactionReceipt renouncePauserReceipt = pauserHandler.renouncePauser().send();
checkTxStatus(renouncePauserReceipt);
List<KIP17.PauserRemovedEventResponse> pauserRemovedEventResponses = pauserHandler.getPauserRemovedEvents(renouncePauserReceipt);
checkPauserRemovedEventValue(pauserRemovedEventResponses.get(0), ownerAddress);
assertFalse(pauserHandler.isPauser(ownerAddress).send());
}
//Test
KlayTransactionReceipt.TransactionReceipt receipt = ownerHandler.addPauser(userAddress).send();
checkTxStatus(receipt);
List<KIP17.PauserAddedEventResponse> pauserAddedEventResponses = ownerHandler.getPauserAddedEvents(receipt);
checkPauserAddedEventValue(pauserAddedEventResponses.get(0), userAddress);
isPauser = ownerHandler.isPauser(mTesterTxManger.getDefaultAddress()).send();
assertTrue(isPauser);
//reset
KlayTransactionReceipt.TransactionReceipt renounceReceipt = pauserHandler.renouncePauser().send();
checkTxStatus(renounceReceipt);
List<KIP17.PauserRemovedEventResponse> pauserRemovedEventResponses = ownerHandler.getPauserRemovedEvents(renounceReceipt);
checkPauserRemovedEventValue(pauserRemovedEventResponses.get(0), userAddress);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-040
@Test
public void renouncePauser() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 pauserHandler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String pauserAddress = mTesterTxManger.getDefaultAddress();
try {
boolean isPauser = ownerHandler.isPauser(pauserAddress).send();
if(!isPauser) {
KlayTransactionReceipt.TransactionReceipt addPauserReceipt = ownerHandler.addPauser(pauserAddress).send();
checkTxStatus(addPauserReceipt);
List<KIP17.PauserAddedEventResponse> list = ownerHandler.getPauserAddedEvents(addPauserReceipt);
checkPauserAddedEventValue(list.get(0), pauserAddress);
}
//Test
KlayTransactionReceipt.TransactionReceipt receipt = pauserHandler.renouncePauser().send();
checkTxStatus(receipt);
List<KIP17.PauserRemovedEventResponse> pauserRemovedEventResponses = ownerHandler.getPauserRemovedEvents(receipt);
checkPauserRemovedEventValue(pauserRemovedEventResponses.get(0), pauserAddress);
isPauser = ownerHandler.isPauser(mTesterTxManger.getDefaultAddress()).send();
assertFalse(isPauser);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-041
@Test
public void addMinter() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 minterHandler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String minterAddress = mTesterTxManger.getDefaultAddress();
try {
boolean isMinter = ownerHandler.isMinter(mTesterTxManger.getDefaultAddress()).send();
if(isMinter) {
KlayTransactionReceipt.TransactionReceipt renounceMinterReceipt = minterHandler.renounceMinter().send();
checkTxStatus(renounceMinterReceipt);
List<KIP17.MinterRemovedEventResponse> removedEventResponses = minterHandler.getMinterRemovedEvents(renounceMinterReceipt);
checkMinterRemovedEventValue(removedEventResponses.get(0), minterAddress);
}
//Test
KlayTransactionReceipt.TransactionReceipt addMinterReceipt = ownerHandler.addMinter(mTesterTxManger.getDefaultAddress()).send();
checkTxStatus(addMinterReceipt);
isMinter = ownerHandler.isMinter(mTesterTxManger.getDefaultAddress()).send();
assertTrue(isMinter);
List<KIP17.MinterAddedEventResponse> minterAddedEventResponses = ownerHandler.getMinterAddedEvents(addMinterReceipt);
checkMinterAddedEventValue(minterAddedEventResponses.get(0), minterAddress);
//reset
KlayTransactionReceipt.TransactionReceipt renounceMinterReceipt = minterHandler.renounceMinter().send();
checkTxStatus(renounceMinterReceipt);
List<KIP17.MinterRemovedEventResponse> removedEventResponses = minterHandler.getMinterRemovedEvents(renounceMinterReceipt);
checkMinterRemovedEventValue(removedEventResponses.get(0), minterAddress);
isMinter = ownerHandler.isMinter(mTesterTxManger.getDefaultAddress()).send();
assertFalse(isMinter);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-042
@Test
public void renounceMinter() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 minterHandler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String minterAddress = mTesterTxManger.getDefaultAddress();
try {
boolean isMinter = ownerHandler.isMinter(minterAddress).send();
if(!isMinter) {
KlayTransactionReceipt.TransactionReceipt addMinterReceipt = ownerHandler.addMinter(minterAddress).send();
checkTxStatus(addMinterReceipt);
assertTrue(ownerHandler.isMinter(minterAddress).send());
}
//Test
KlayTransactionReceipt.TransactionReceipt renounceMinterReceipt = minterHandler.renounceMinter().send();
checkTxStatus(renounceMinterReceipt);
List<KIP17.MinterRemovedEventResponse> minterRemovedEventResponses = minterHandler.getMinterRemovedEvents(renounceMinterReceipt);
checkMinterRemovedEventValue(minterRemovedEventResponses.get(0), minterAddress);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-043
@Test
public void mint() {
KIP17 minterHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
try {
BigInteger tokenId = sTotalSupply;
BigInteger preTotalSupply = minterHandler.totalSupply().send();
//Test
KlayTransactionReceipt.TransactionReceipt receipt = minterHandler.mint(ownerAddress, tokenId).send();
checkTxStatus(receipt);
List<KIP17.TransferEventResponse> transferEventResponses = minterHandler.getTransferEvents(receipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenId);
String tokenOwnerAddr = minterHandler.ownerOf(tokenId).send();
assertEquals(ownerAddress, tokenOwnerAddr);
BigInteger total = minterHandler.totalSupply().send();
assertEquals(preTotalSupply.add(BigInteger.ONE), total);
sTotalSupply = total;
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-044
@Test
public void mintWithTokenURI() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddr = mDeployerTxManager.getDefaultAddress();
try {
BigInteger tokenId = sTotalSupply;
BigInteger preTotalSupply = ownerHandler.totalSupply().send();
//Test
KlayTransactionReceipt.TransactionReceipt receipt = ownerHandler.mintWithTokenURI(mDeployerTxManager.getDefaultAddress(), tokenId, sTokenURI).send();
checkTxStatus(receipt);
List<KIP17.TransferEventResponse> transferEventResponses = ownerHandler.getTransferEvents(receipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddr, tokenId);
String uri = ownerHandler.tokenURI(tokenId).send();
assertEquals(uri, sTokenURI);
String tokenOwnerAddr = ownerHandler.ownerOf(tokenId).send();
assertEquals(ownerAddr, tokenOwnerAddr);
BigInteger total = ownerHandler.totalSupply().send();
assertEquals(preTotalSupply.add(BigInteger.ONE), total);
sTotalSupply = total;
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-045
@Test
public void tokenOfOwnerByIndex() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 tester2Handler = KIP17.load(mContractAddress, mCaver, mTesterTxManger2, new DefaultGasProvider());
String userAddress = "0x25925f77ea2c3b82a1ab45858558076fdc44fcc4";
BigInteger[] tokenIDArr = new BigInteger[] {BigInteger.valueOf(1000), BigInteger.valueOf(1001), BigInteger.valueOf(1002)};
try {
KlayTransactionReceipt.TransactionReceipt mintReceipt1 = ownerHandler.mint(userAddress, tokenIDArr[0]).send();
checkTxStatus(mintReceipt1);
List<KIP17.TransferEventResponse> transferEventResponses = ownerHandler.getTransferEvents(mintReceipt1);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, userAddress, tokenIDArr[0]);
KlayTransactionReceipt.TransactionReceipt mintReceipt2 = ownerHandler.mint(userAddress, tokenIDArr[1]).send();
checkTxStatus(mintReceipt2);
transferEventResponses = ownerHandler.getTransferEvents(mintReceipt2);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, userAddress, tokenIDArr[1]);
KlayTransactionReceipt.TransactionReceipt mintReceipt3 = ownerHandler.mint(userAddress, tokenIDArr[2]).send();
checkTxStatus(mintReceipt3);
transferEventResponses = ownerHandler.getTransferEvents(mintReceipt3);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, userAddress, tokenIDArr[2]);
//Test
BigInteger tokenID_1 = tester2Handler.tokenOfOwnerByIndex(userAddress, BigInteger.ZERO).send();
assertEquals(tokenID_1, tokenIDArr[0]);
BigInteger tokenID_2 = tester2Handler.tokenOfOwnerByIndex(userAddress, BigInteger.ONE).send();
assertEquals(tokenID_2, tokenIDArr[1]);
BigInteger tokenID_3 = tester2Handler.tokenOfOwnerByIndex(userAddress, BigInteger.valueOf(2)).send();
assertEquals(tokenID_3, tokenIDArr[2]);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-046
@Test
public void tokenByIndex() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
try {
BigInteger total = ownerHandler.totalSupply().send();
BigInteger[] tokenIdArr = new BigInteger[]{total, total.add(BigInteger.ONE), total.add(BigInteger.valueOf(2))};
KlayTransactionReceipt.TransactionReceipt mintReceipt = ownerHandler.mint(mDeployerTxManager.getDefaultAddress(), tokenIdArr[0]).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenIdArr[0]);
mintReceipt = ownerHandler.mint(mDeployerTxManager.getDefaultAddress(), tokenIdArr[1]).send();
checkTxStatus(mintReceipt);
transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenIdArr[1]);
mintReceipt = ownerHandler.mint(mDeployerTxManager.getDefaultAddress(), tokenIdArr[2]).send();
checkTxStatus(mintReceipt);
transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenIdArr[2]);
total = ownerHandler.totalSupply().send();
//Test
BigInteger findIndex = total.subtract(BigInteger.ONE);
BigInteger tokenId = ownerHandler.tokenByIndex(findIndex).send();
assertEquals(tokenId, tokenIdArr[2]);
findIndex = findIndex.subtract(BigInteger.ONE);
tokenId = ownerHandler.tokenByIndex(findIndex).send();
assertEquals(tokenId, tokenIdArr[1]);
findIndex = findIndex.subtract(BigInteger.ONE);
tokenId = ownerHandler.tokenByIndex(findIndex).send();
assertEquals(tokenId, tokenIdArr[0]);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-047
@Test
public void transferFrom() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String userAddress = mTesterTxManger.getDefaultAddress();
try {
BigInteger tokenId = BigInteger.valueOf(2000);
KlayTransactionReceipt.TransactionReceipt receipt = ownerHandler.mint(ownerAddress, tokenId).send();
checkTxStatus(receipt);
List<KIP17.TransferEventResponse> transferEventResponses = ownerHandler.getTransferEvents(receipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenId);
KlayTransactionReceipt.TransactionReceipt transactionReceipt = ownerHandler.transferFrom(ownerAddress, userAddress, tokenId).send();
checkTxStatus(transactionReceipt);
transferEventResponses = ownerHandler.getTransferEvents(transactionReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenId);
String address = ownerHandler.ownerOf(tokenId).send();
assertEquals(address, userAddress);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-048
@Test
public void safeTransferFrom() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String userAddress = mTesterTxManger.getDefaultAddress();
try {
BigInteger tokenId = BigInteger.valueOf(2001);
KlayTransactionReceipt.TransactionReceipt mintReceipt = ownerHandler.mint(ownerAddress, tokenId).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenId);
KlayTransactionReceipt.TransactionReceipt transferReceipt = ownerHandler.safeTransferFrom(ownerAddress, userAddress, tokenId).send();
checkTxStatus(transferReceipt);
transferEventResponses= ownerHandler.getTransferEvents(transferReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenId);
String address = ownerHandler.ownerOf(tokenId).send();
assertEquals(address, userAddress);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-049
@Test
public void safeTransferFromWithData() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String userAddress = mTesterTxManger.getDefaultAddress();
try {
BigInteger tokenId = BigInteger.valueOf(2002);
KlayTransactionReceipt.TransactionReceipt mintReceipt = ownerHandler.mint(ownerAddress, tokenId).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenId);
byte[] data = "buffered data".getBytes();
KlayTransactionReceipt.TransactionReceipt transferReceipt = ownerHandler.safeTransferFrom(ownerAddress, userAddress, tokenId, data).send();
checkTxStatus(transferReceipt);
transferEventResponses = ownerHandler.getTransferEvents(transferReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenId);
String address = ownerHandler.ownerOf(tokenId).send();
assertEquals(address, userAddress);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-050
@Test
public void getApproved() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String userAddress = mTesterTxManger.getDefaultAddress();
try {
BigInteger tokenId = BigInteger.valueOf(6001);
KlayTransactionReceipt.TransactionReceipt mintReceipt = ownerHandler.mint(ownerAddress, tokenId).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> list = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(list.get(0), sZeroAddr, ownerAddress, tokenId);
KlayTransactionReceipt.TransactionReceipt approvedReceipt = ownerHandler.approve(userAddress, tokenId).send();
checkTxStatus(approvedReceipt);
List<KIP17.ApprovalEventResponse> approvalEventResponses = ownerHandler.getApprovalEvents(approvedReceipt);
checkApprovalEventValue(approvalEventResponses.get(0), ownerAddress, userAddress, tokenId);
String address = ownerHandler.getApproved(tokenId).send();
assertEquals(userAddress, address);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-051
@Test
public void isApprovedForAll() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String userAddress = mTesterTxManger.getDefaultAddress();
try {
KlayTransactionReceipt.TransactionReceipt receipt = ownerHandler.setApprovalForAll(userAddress, true).send();
checkTxStatus(receipt);
List<KIP17.ApprovalForAllEventResponse> approvalForAllEventResponses = ownerHandler.getApprovalForAllEvents(receipt);
checkApprovalForAllEventValue(approvalForAllEventResponses.get(0), ownerAddress, userAddress, true);
boolean isApprovedAll = ownerHandler.isApprovedForAll(ownerAddress, userAddress).send();
assertTrue(isApprovedAll);
receipt = ownerHandler.setApprovalForAll(userAddress, false).send();
checkTxStatus(receipt);
approvalForAllEventResponses = ownerHandler.getApprovalForAllEvents(receipt);
checkApprovalForAllEventValue(approvalForAllEventResponses.get(0), ownerAddress, userAddress, false);
isApprovedAll = ownerHandler.isApprovedForAll(ownerAddress, userAddress).send();
assertFalse(isApprovedAll);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-052
@Test
public void approve() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 operatorHandler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String operatorAddress = mTesterTxManger.getDefaultAddress();
try {
BigInteger tokenId = BigInteger.valueOf(4000);
//mint token to owner address
KlayTransactionReceipt.TransactionReceipt mintReceipt = ownerHandler.mint(ownerAddress, tokenId).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenId);
//test
KlayTransactionReceipt.TransactionReceipt approveReceipt = ownerHandler.approve(operatorAddress, tokenId).send();
checkTxStatus(approveReceipt);
List<KIP17.ApprovalEventResponse> approvalEventResponses = ownerHandler.getApprovalEvents(approveReceipt);
checkApprovalEventValue(approvalEventResponses.get(0), ownerAddress, operatorAddress, tokenId);
String approvedAddress = ownerHandler.getApproved(tokenId).send();
assertEquals(operatorAddress, approvedAddress);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-053
@Test
public void setApprovalForAll() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 operatorHandler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String operatorAddress = mTesterTxManger.getDefaultAddress();
try {
BigInteger[] tokenIdArr = new BigInteger[] {BigInteger.valueOf(4100), BigInteger.valueOf(4200)};
//mint token to owner address
KlayTransactionReceipt.TransactionReceipt mintReceipt = ownerHandler.mint(ownerAddress, tokenIdArr[0]).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenIdArr[0]);
mintReceipt = ownerHandler.mint(ownerAddress, tokenIdArr[1]).send();
checkTxStatus(mintReceipt);
transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenIdArr[1]);
//test
KlayTransactionReceipt.TransactionReceipt approvedReceipt = ownerHandler.setApprovalForAll(operatorAddress, true).send();
checkTxStatus(approvedReceipt);
List<KIP17.ApprovalForAllEventResponse> approvalForAllEventResponses = ownerHandler.getApprovalForAllEvents(approvedReceipt);
checkApprovalForAllEventValue(approvalForAllEventResponses.get(0), ownerAddress, operatorAddress, true);
boolean isApproved = ownerHandler.isApprovedForAll(ownerAddress, operatorAddress).send();
assertTrue(isApproved);
//reset
approvedReceipt = ownerHandler.setApprovalForAll(operatorAddress, false).send();
checkTxStatus(approvedReceipt);
approvalForAllEventResponses = ownerHandler.getApprovalForAllEvents(approvedReceipt);
checkApprovalForAllEventValue(approvalForAllEventResponses.get(0), ownerAddress, operatorAddress, false);
isApproved = ownerHandler.isApprovedForAll(ownerAddress, operatorAddress).send();
assertFalse(isApproved);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-054
@Test
public void burn() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
try {
BigInteger tokenID = BigInteger.valueOf(9999);
//mint Token
KlayTransactionReceipt.TransactionReceipt mintReceipt = ownerHandler.mint(ownerAddress, tokenID).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponseList = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponseList.get(0), sZeroAddr, ownerAddress, tokenID);
String address = ownerHandler.ownerOf(tokenID).send();
assertEquals(ownerAddress, address);
BigInteger preOwnerBalance = ownerHandler.balanceOf(ownerAddress).send();
//Test
KlayTransactionReceipt.TransactionReceipt burnReceipt = ownerHandler.burn(tokenID).send();
checkTxStatus(burnReceipt);
transferEventResponseList = ownerHandler.getTransferEvents(burnReceipt);
checkTransferEventValue(transferEventResponseList.get(0), ownerAddress, sZeroAddr, tokenID);
BigInteger ownerBalance = ownerHandler.balanceOf(ownerAddress).send();
assertEquals(preOwnerBalance.subtract(BigInteger.ONE), ownerBalance);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-055
@Test
public void supportsInterface() {
final String INTERFACE_ID_KIP13 = "0x01ffc9a7";
final String INTERFACE_ID_KIP17 = "0x80ac58cd";
final String INTERFACE_ID_KIP17_PAUSABLE = "0x4d5507ff";
final String INTERFACE_ID_KIP17_BURNABLE = "0x42966c68";
final String INTERFACE_ID_KIP17_MINTABLE = "0xeab83e20";
final String INTERFACE_ID_KIP17_METADATA = "0x5b5e139f";
final String INTERFACE_ID_KIP17_METADATA_MINTABLE = "0xfac27f46";
final String INTERFACE_ID_KIP17_ENUMERABLE = "0x780e9d63";
final String INTERFACE_ID_FALSE = "0xFFFFFFFF";
try {
KIP7 tokenHandler_owner = KIP7.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
boolean isSupported_KIP13 = tokenHandler_owner.supportsInterface(Numeric.hexStringToByteArray(INTERFACE_ID_KIP13)).send();
assertTrue(isSupported_KIP13);
boolean isSupported_KIP17_PAUSABLE = tokenHandler_owner.supportsInterface(Numeric.hexStringToByteArray(INTERFACE_ID_KIP17_PAUSABLE)).send();
assertTrue(isSupported_KIP17_PAUSABLE);
boolean isSupported_KIP17_BURNABLE = tokenHandler_owner.supportsInterface(Numeric.hexStringToByteArray(INTERFACE_ID_KIP17_BURNABLE)).send();
assertTrue(isSupported_KIP17_BURNABLE);
boolean isSupported_KIP17_MINTABLE = tokenHandler_owner.supportsInterface(Numeric.hexStringToByteArray(INTERFACE_ID_KIP17_MINTABLE)).send();
assertTrue(isSupported_KIP17_MINTABLE);
boolean isSupported_KIP17_METADATA = tokenHandler_owner.supportsInterface(Numeric.hexStringToByteArray(INTERFACE_ID_KIP17_METADATA)).send();
assertTrue(isSupported_KIP17_METADATA);
boolean isSupported_KIP17_METADATA_MINTABLE = tokenHandler_owner.supportsInterface(Numeric.hexStringToByteArray(INTERFACE_ID_KIP17_METADATA_MINTABLE)).send();
assertTrue(isSupported_KIP17_METADATA_MINTABLE);
boolean isSupported_KIP17_ENUMERABLE = tokenHandler_owner.supportsInterface(Numeric.hexStringToByteArray(INTERFACE_ID_KIP17_ENUMERABLE)).send();
assertTrue(isSupported_KIP17_ENUMERABLE);
boolean isSupported_KIP17 = tokenHandler_owner.supportsInterface(Numeric.hexStringToByteArray(INTERFACE_ID_KIP17)).send();
assertTrue(isSupported_KIP17);
boolean isSupported_FALSE = tokenHandler_owner.supportsInterface(Numeric.hexStringToByteArray(INTERFACE_ID_FALSE)).send();
assertFalse(isSupported_FALSE);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-056
@Test
public void getTransferEventTest() {
KIP17 tokenHandler_owner = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String toAddr = mTesterTxManger.getDefaultAddress();
try {
BigInteger tokenId = BigInteger.valueOf(2222);
KlayTransactionReceipt.TransactionReceipt receipt = tokenHandler_owner.mint(toAddr, tokenId).send();
checkTxStatus(receipt);
List<KIP17.TransferEventResponse> event = tokenHandler_owner.getTransferEvents(receipt);
assertEquals(event.size(), 1);
assertNotNull(event.get(0).log.getLogIndex());
assertNotNull(event.get(0).log.getTransactionIndex());
assertNotNull(event.get(0).log.getTransactionHash());
assertNotNull(event.get(0).log.getBlockHash());
assertNotNull(event.get(0).log.getBlockNumber());
assertNotNull(event.get(0).log.getAddress());
assertNotNull(event.get(0).log.getData());
assertNotNull(event.get(0).log.getTopics());
assertEquals(event.get(0).from, sZeroAddr);
assertEquals(event.get(0).to, toAddr);
assertEquals(event.get(0).tokenId, tokenId);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-057
@Test
public void getApprovalEventTest() {
KIP17 tokenHandler_owner = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String userAddress = mTesterTxManger.getDefaultAddress();
try {
BigInteger tokenId = BigInteger.valueOf(2223);
KlayTransactionReceipt.TransactionReceipt receipt = tokenHandler_owner.mint(ownerAddress, tokenId).send();
checkTxStatus(receipt);
KlayTransactionReceipt.TransactionReceipt approvedReceipt = tokenHandler_owner.approve(userAddress, tokenId).send();
checkTxStatus(approvedReceipt);
List<KIP17.ApprovalEventResponse> event = tokenHandler_owner.getApprovalEvents(approvedReceipt);
assertEquals(event.size(), 1);
assertNotNull(event.get(0).log.getLogIndex());
assertNotNull(event.get(0).log.getTransactionIndex());
assertNotNull(event.get(0).log.getTransactionHash());
assertNotNull(event.get(0).log.getBlockHash());
assertNotNull(event.get(0).log.getBlockNumber());
assertNotNull(event.get(0).log.getAddress());
assertNotNull(event.get(0).log.getData());
assertNotNull(event.get(0).log.getTopics());
assertEquals(event.get(0).owner, ownerAddress);
assertEquals(event.get(0).approved, userAddress);
assertEquals(event.get(0).tokenId, tokenId);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-058
@Test
public void getPausedEventTest() {
KIP17 tokenHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
try {
//Check Paused Event
KlayTransactionReceipt.TransactionReceipt pausedReceipt = tokenHandler.pause().send();
List<KIP17.PausedEventResponse> paused_events = tokenHandler.getPausedEvents(pausedReceipt);
assertEquals(paused_events.size(), 1);
assertNotNull(paused_events.get(0).log.getLogIndex());
assertNotNull(paused_events.get(0).log.getTransactionIndex());
assertNotNull(paused_events.get(0).log.getTransactionHash());
assertNotNull(paused_events.get(0).log.getBlockHash());
assertNotNull(paused_events.get(0).log.getBlockNumber());
assertNotNull(paused_events.get(0).log.getAddress());
assertNotNull(paused_events.get(0).log.getData());
assertNotNull(paused_events.get(0).log.getTopics());
assertEquals(paused_events.get(0).account, mDeployerTxManager.getDefaultAddress());
//Check UnPaused Event
KlayTransactionReceipt.TransactionReceipt unPausedReceipt = tokenHandler.unpause().send();
List<KIP17.UnpausedEventResponse> unpaused_events = tokenHandler.getUnpausedEvents(unPausedReceipt);
assertEquals(unpaused_events.size(), 1);
assertNotNull(unpaused_events.get(0).log.getLogIndex());
assertNotNull(unpaused_events.get(0).log.getTransactionIndex());
assertNotNull(unpaused_events.get(0).log.getTransactionHash());
assertNotNull(unpaused_events.get(0).log.getBlockHash());
assertNotNull(unpaused_events.get(0).log.getBlockNumber());
assertNotNull(unpaused_events.get(0).log.getAddress());
assertNotNull(unpaused_events.get(0).log.getData());
assertNotNull(unpaused_events.get(0).log.getTopics());
assertEquals(unpaused_events.get(0).account, mDeployerTxManager.getDefaultAddress());
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-059
@Test public void getPauserRoleEvents() {
KIP17 tokenHandler_owner = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 tokenHandler_pauser = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String userAddr = mTestCredential.getAddress();
try {
//Check PauserAdded Event
KlayTransactionReceipt.TransactionReceipt addPauserReceipt = tokenHandler_owner.addPauser(userAddr).send();
List<KIP17.PauserAddedEventResponse> addedEvents = tokenHandler_owner.getPauserAddedEvents(addPauserReceipt);
assertEquals(addedEvents.size(), 1);
assertNotNull(addedEvents.get(0).log.getLogIndex());
assertNotNull(addedEvents.get(0).log.getTransactionIndex());
assertNotNull(addedEvents.get(0).log.getTransactionHash());
assertNotNull(addedEvents.get(0).log.getBlockHash());
assertNotNull(addedEvents.get(0).log.getBlockNumber());
assertNotNull(addedEvents.get(0).log.getAddress());
assertNotNull(addedEvents.get(0).log.getData());
assertNotNull(addedEvents.get(0).log.getTopics());
assertEquals(addedEvents.get(0).account, userAddr);
KlayTransactionReceipt.TransactionReceipt renounceReceipt = tokenHandler_pauser.renouncePauser().send();
//Check PauserRemoved Event
List<KIP17.PauserRemovedEventResponse> removedEvents = tokenHandler_owner.getPauserRemovedEvents(renounceReceipt);
assertEquals(removedEvents.size(), 1);
assertNotNull(removedEvents.get(0).log.getLogIndex());
assertNotNull(removedEvents.get(0).log.getTransactionIndex());
assertNotNull(removedEvents.get(0).log.getTransactionHash());
assertNotNull(removedEvents.get(0).log.getBlockHash());
assertNotNull(removedEvents.get(0).log.getBlockNumber());
assertNotNull(removedEvents.get(0).log.getAddress());
assertNotNull(removedEvents.get(0).log.getData());
assertNotNull(removedEvents.get(0).log.getTopics());
assertEquals(removedEvents.get(0).account, userAddr);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-060
@Test
public void getMinterRoleEvents() {
KIP17 tokenHandler_owner = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 tokenHandler_minter = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String minter = mTesterTxManger.getDefaultAddress();
try {
//Check MinterAdded Event
KlayTransactionReceipt.TransactionReceipt addMinterReceipt = tokenHandler_owner.addMinter(minter).send();
List<KIP17.MinterAddedEventResponse> minterAddEvent = tokenHandler_owner.getMinterAddedEvents(addMinterReceipt);
assertEquals(minterAddEvent.size(), 1);
assertNotNull(minterAddEvent.get(0).log.getLogIndex());
assertNotNull(minterAddEvent.get(0).log.getTransactionIndex());
assertNotNull(minterAddEvent.get(0).log.getTransactionHash());
assertNotNull(minterAddEvent.get(0).log.getBlockHash());
assertNotNull(minterAddEvent.get(0).log.getBlockNumber());
assertNotNull(minterAddEvent.get(0).log.getAddress());
assertNotNull(minterAddEvent.get(0).log.getData());
assertNotNull(minterAddEvent.get(0).log.getTopics());
assertEquals(minterAddEvent.get(0).account, minter);
//Check MinterRemoved Event
KlayTransactionReceipt.TransactionReceipt renounceMinterReceipt = tokenHandler_minter.renounceMinter().send();
List<KIP17.MinterRemovedEventResponse> renounceMinterEvent = tokenHandler_minter.getMinterRemovedEvents(renounceMinterReceipt);
assertEquals(renounceMinterEvent.size(), 1);
assertNotNull(renounceMinterEvent.get(0).log.getLogIndex());
assertNotNull(renounceMinterEvent.get(0).log.getTransactionIndex());
assertNotNull(renounceMinterEvent.get(0).log.getTransactionHash());
assertNotNull(renounceMinterEvent.get(0).log.getBlockHash());
assertNotNull(renounceMinterEvent.get(0).log.getBlockNumber());
assertNotNull(renounceMinterEvent.get(0).log.getAddress());
assertNotNull(renounceMinterEvent.get(0).log.getData());
assertNotNull(renounceMinterEvent.get(0).log.getTopics());
assertEquals(renounceMinterEvent.get(0).account, minter);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-061
@Test
public void transferFrom_Approve() {
KIP17 owner_handler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 spender_handler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String spenderAddress = mTesterTxManger.getDefaultAddress();
String userAddress = mTesterTxManger2.getDefaultAddress();
try {
BigInteger tokenId = BigInteger.valueOf(7777);
KlayTransactionReceipt.TransactionReceipt mintReceipt = owner_handler.mint(ownerAddress, tokenId).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = owner_handler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenId);
String address = owner_handler.ownerOf(tokenId).send();
assertEquals(address , ownerAddress);
KlayTransactionReceipt.TransactionReceipt approveReceipt = owner_handler.approve(spenderAddress, tokenId).send();
checkTxStatus(approveReceipt);
List<KIP17.ApprovalEventResponse> approvalEventResponses = owner_handler.getApprovalEvents(approveReceipt);
checkApprovalEventValue(approvalEventResponses.get(0), ownerAddress, spenderAddress, tokenId);
address = owner_handler.getApproved(tokenId).send();
assertEquals(address, spenderAddress);
KlayTransactionReceipt.TransactionReceipt transferReceipt = spender_handler.transferFrom(ownerAddress, userAddress, tokenId).send();
checkTxStatus(transferReceipt);
transferEventResponses = spender_handler.getTransferEvents(transferReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenId);
address = spender_handler.ownerOf(tokenId).send();
assertEquals(address , userAddress);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-062
@Test
public void safeTransferFrom_Approve() {
KIP17 owner_handler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 spender_handler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String spenderAddress = mTesterTxManger.getDefaultAddress();
String userAddress = mTesterTxManger2.getDefaultAddress();
try {
BigInteger tokenId = BigInteger.valueOf(7778);
KlayTransactionReceipt.TransactionReceipt mintReceipt = owner_handler.mint(ownerAddress, tokenId).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = owner_handler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenId);
String address = owner_handler.ownerOf(tokenId).send();
assertEquals(address , ownerAddress);
KlayTransactionReceipt.TransactionReceipt approveReceipt = owner_handler.approve(spenderAddress, tokenId).send();
checkTxStatus(approveReceipt);
List<KIP17.ApprovalEventResponse> approvalEventResponses = owner_handler.getApprovalEvents(approveReceipt);
checkApprovalEventValue(approvalEventResponses.get(0), ownerAddress, spenderAddress, tokenId);
address = owner_handler.getApproved(tokenId).send();
assertEquals(address, spenderAddress);
KlayTransactionReceipt.TransactionReceipt transferReceipt = spender_handler.safeTransferFrom(ownerAddress, userAddress, tokenId).send();
checkTxStatus(transferReceipt);
transferEventResponses = spender_handler.getTransferEvents(transferReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenId);
address = spender_handler.ownerOf(tokenId).send();
assertEquals(address , userAddress);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-063
@Test
public void safeTransferFromWithData_Approve() {
KIP17 owner_handler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 spender_handler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String spenderAddress = mTesterTxManger.getDefaultAddress();
String userAddress = mTesterTxManger2.getDefaultAddress();
try {
BigInteger tokenId = BigInteger.valueOf(7779);
KlayTransactionReceipt.TransactionReceipt mintReceipt = owner_handler.mint(ownerAddress, tokenId).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = owner_handler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenId);
String address = owner_handler.ownerOf(tokenId).send();
assertEquals(address , ownerAddress);
KlayTransactionReceipt.TransactionReceipt approveReceipt = owner_handler.approve(spenderAddress, tokenId).send();
checkTxStatus(approveReceipt);
List<KIP17.ApprovalEventResponse> approvalEventResponses = owner_handler.getApprovalEvents(approveReceipt);
checkApprovalEventValue(approvalEventResponses.get(0), ownerAddress, spenderAddress, tokenId);
address = owner_handler.getApproved(tokenId).send();
assertEquals(address, spenderAddress);
byte[] data = "buffered data".getBytes();
KlayTransactionReceipt.TransactionReceipt transferReceipt = spender_handler.safeTransferFrom(ownerAddress, userAddress, tokenId, data).send();
checkTxStatus(transferReceipt);
transferEventResponses = spender_handler.getTransferEvents(transferReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenId);
address = spender_handler.ownerOf(tokenId).send();
assertEquals(address , userAddress);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-064
@Test
public void transferFrom_SetApprovedForAll() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 operatorHandler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String operatorAddress = mTesterTxManger.getDefaultAddress();
String userAddress = mTesterTxManger2.getDefaultAddress();
try {
BigInteger[] tokenIdArr = new BigInteger[] {BigInteger.valueOf(4001), BigInteger.valueOf(4002)};
//mint token to owner address
KlayTransactionReceipt.TransactionReceipt mintReceipt = ownerHandler.mint(ownerAddress, tokenIdArr[0]).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenIdArr[0]);
mintReceipt = ownerHandler.mint(ownerAddress, tokenIdArr[1]).send();
checkTxStatus(mintReceipt);
transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenIdArr[1]);
//test
KlayTransactionReceipt.TransactionReceipt approvedReceipt = ownerHandler.setApprovalForAll(operatorAddress, true).send();
checkTxStatus(approvedReceipt);
List<KIP17.ApprovalForAllEventResponse> approvalForAllEventResponses = ownerHandler.getApprovalForAllEvents(approvedReceipt);
checkApprovalForAllEventValue(approvalForAllEventResponses.get(0), ownerAddress, operatorAddress, true);
boolean isApproved = ownerHandler.isApprovedForAll(ownerAddress, operatorAddress).send();
assertTrue(isApproved);
KlayTransactionReceipt.TransactionReceipt transferReceipt = operatorHandler.transferFrom(ownerAddress, userAddress, tokenIdArr[0]).send();
checkTxStatus(transferReceipt);
transferEventResponses = operatorHandler.getTransferEvents(transferReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenIdArr[0]);
String address = ownerHandler.ownerOf(tokenIdArr[0]).send();
assertEquals(address, userAddress);
transferReceipt = operatorHandler.transferFrom(ownerAddress, userAddress, tokenIdArr[1]).send();
checkTxStatus(transferReceipt);
transferEventResponses = operatorHandler.getTransferEvents(transferReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenIdArr[1]);
address = ownerHandler.ownerOf(tokenIdArr[1]).send();
assertEquals(address, userAddress);
//reset
approvedReceipt = ownerHandler.setApprovalForAll(operatorAddress, false).send();
checkTxStatus(approvedReceipt);
approvalForAllEventResponses = ownerHandler.getApprovalForAllEvents(approvedReceipt);
checkApprovalForAllEventValue(approvalForAllEventResponses.get(0), ownerAddress, operatorAddress, false);
isApproved = ownerHandler.isApprovedForAll(ownerAddress, operatorAddress).send();
assertFalse(isApproved);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-065
@Test
public void safeTransferFrom_SetApprovedForAll() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 operatorHandler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String operatorAddress = mTesterTxManger.getDefaultAddress();
String userAddress = mTesterTxManger2.getDefaultAddress();
try {
BigInteger[] tokenIdArr = new BigInteger[] {BigInteger.valueOf(4003), BigInteger.valueOf(4004)};
//mint token to owner address
KlayTransactionReceipt.TransactionReceipt mintReceipt = ownerHandler.mint(ownerAddress, tokenIdArr[0]).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenIdArr[0]);
mintReceipt = ownerHandler.mint(ownerAddress, tokenIdArr[1]).send();
checkTxStatus(mintReceipt);
transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenIdArr[1]);
//test
KlayTransactionReceipt.TransactionReceipt approvedReceipt = ownerHandler.setApprovalForAll(operatorAddress, true).send();
checkTxStatus(approvedReceipt);
List<KIP17.ApprovalForAllEventResponse> approvalForAllEventResponses = ownerHandler.getApprovalForAllEvents(approvedReceipt);
checkApprovalForAllEventValue(approvalForAllEventResponses.get(0), ownerAddress, operatorAddress, true);
boolean isApproved = ownerHandler.isApprovedForAll(ownerAddress, operatorAddress).send();
assertTrue(isApproved);
KlayTransactionReceipt.TransactionReceipt transferReceipt = operatorHandler.safeTransferFrom(ownerAddress, userAddress, tokenIdArr[0]).send();
checkTxStatus(transferReceipt);
transferEventResponses = operatorHandler.getTransferEvents(transferReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenIdArr[0]);
String address = ownerHandler.ownerOf(tokenIdArr[0]).send();
assertEquals(address, userAddress);
transferReceipt = operatorHandler.safeTransferFrom(ownerAddress, userAddress, tokenIdArr[1]).send();
checkTxStatus(transferReceipt);
transferEventResponses = operatorHandler.getTransferEvents(transferReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenIdArr[1]);
address = ownerHandler.ownerOf(tokenIdArr[1]).send();
assertEquals(address, userAddress);
//reset
approvedReceipt = ownerHandler.setApprovalForAll(operatorAddress, false).send();
checkTxStatus(approvedReceipt);
approvalForAllEventResponses = ownerHandler.getApprovalForAllEvents(approvedReceipt);
checkApprovalForAllEventValue(approvalForAllEventResponses.get(0), ownerAddress, operatorAddress, false);
isApproved = ownerHandler.isApprovedForAll(ownerAddress, operatorAddress).send();
assertFalse(isApproved);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
//KCT-066
@Test
public void safeTransferFromWithData_SetApprovedForAll() {
KIP17 ownerHandler = KIP17.load(mContractAddress, mCaver, mDeployerTxManager, new DefaultGasProvider());
KIP17 operatorHandler = KIP17.load(mContractAddress, mCaver, mTesterTxManger, new DefaultGasProvider());
String ownerAddress = mDeployerTxManager.getDefaultAddress();
String operatorAddress = mTesterTxManger.getDefaultAddress();
String userAddress = mTesterTxManger2.getDefaultAddress();
try {
BigInteger[] tokenIdArr = new BigInteger[] {BigInteger.valueOf(4005), BigInteger.valueOf(4006)};
//mint token to owner address
KlayTransactionReceipt.TransactionReceipt mintReceipt = ownerHandler.mint(ownerAddress, tokenIdArr[0]).send();
checkTxStatus(mintReceipt);
List<KIP17.TransferEventResponse> transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenIdArr[0]);
mintReceipt = ownerHandler.mint(ownerAddress, tokenIdArr[1]).send();
checkTxStatus(mintReceipt);
transferEventResponses = ownerHandler.getTransferEvents(mintReceipt);
checkTransferEventValue(transferEventResponses.get(0), sZeroAddr, ownerAddress, tokenIdArr[1]);
//test
KlayTransactionReceipt.TransactionReceipt approvedReceipt = ownerHandler.setApprovalForAll(operatorAddress, true).send();
checkTxStatus(approvedReceipt);
List<KIP17.ApprovalForAllEventResponse> approvalForAllEventResponses = ownerHandler.getApprovalForAllEvents(approvedReceipt);
checkApprovalForAllEventValue(approvalForAllEventResponses.get(0), ownerAddress, operatorAddress, true);
boolean isApproved = ownerHandler.isApprovedForAll(ownerAddress, operatorAddress).send();
assertTrue(isApproved);
byte[] data = "buffered data".getBytes();
KlayTransactionReceipt.TransactionReceipt transferReceipt = operatorHandler.safeTransferFrom(ownerAddress, userAddress, tokenIdArr[0], data).send();
checkTxStatus(transferReceipt);
transferEventResponses = operatorHandler.getTransferEvents(transferReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenIdArr[0]);
String address = ownerHandler.ownerOf(tokenIdArr[0]).send();
assertEquals(address, userAddress);
transferReceipt = operatorHandler.safeTransferFrom(ownerAddress, userAddress, tokenIdArr[1], data).send();
checkTxStatus(transferReceipt);
transferEventResponses = operatorHandler.getTransferEvents(transferReceipt);
checkTransferEventValue(transferEventResponses.get(0), ownerAddress, userAddress, tokenIdArr[1]);
address = ownerHandler.ownerOf(tokenIdArr[1]).send();
assertEquals(address, userAddress);
//reset
approvedReceipt = ownerHandler.setApprovalForAll(operatorAddress, false).send();
checkTxStatus(approvedReceipt);
approvalForAllEventResponses = ownerHandler.getApprovalForAllEvents(approvedReceipt);
checkApprovalForAllEventValue(approvalForAllEventResponses.get(0), ownerAddress, operatorAddress, false);
isApproved = ownerHandler.isApprovedForAll(ownerAddress, operatorAddress).send();
assertFalse(isApproved);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
}
|
package com.github.dozermapper.extra.converters;
import org.apache.commons.beanutils.converters.BooleanConverter;
import com.github.dozermapper.core.DozerConverter;
public final class BooleanStringConverter extends DozerConverter<Boolean, String> {
private final BooleanConverter converter = new BooleanConverter();
public BooleanStringConverter() {
super(Boolean.class, String.class);
}
@Override
public Boolean convertFrom(String source, Boolean destination) {
return Boolean.parseBoolean(source);
}
@Override
public String convertTo(Boolean source, String destination) {
return converter.convert(String.class, source);
}
}
|
package org.boon.slumberdb.integration;
import java.io.IOException;
/**
* Created by Richard on 9/3/14.
*/
public class EvilDS1 {
static {
System.setProperty("org.boon.slumberdb.DataStoreConfig", "/opt/org/slumberdb/datastore1.json");
}
public static void main(String... args) throws IOException {
Utils.runDataStoreServer(10_500);
System.in.read();
}
}
|
package com.mtw.movie_poc_screen.components;
import android.content.Context;
import android.support.annotation.Nullable;
import android.support.v7.widget.RecyclerView;
import android.util.AttributeSet;
import android.view.View;
/**
* Created by Aspire-V5 on 12/6/2017.
*/
public class SmartRecyclerView extends RecyclerView {
//attribute that creates Empty view
private View mEmptyView;
//call dataObserver call back method when changes occur in adapter view that is attached with smart recycler view
private AdapterDataObserver dataObserver = new AdapterDataObserver() {
@Override
public void onChanged() {
super.onChanged();
checkIfEmpty();
}
@Override
public void onItemRangeInserted(int positionStart, int itemCount) {
super.onItemRangeInserted(positionStart, itemCount);
checkIfEmpty();
}
@Override
public void onItemRangeRemoved(int positionStart, int itemCount) {
super.onItemRangeRemoved(positionStart, itemCount);
checkIfEmpty();
}
};
public SmartRecyclerView(Context context) {
super(context);
}
public SmartRecyclerView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
}
public SmartRecyclerView(Context context, @Nullable AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
@Override
public void setAdapter(Adapter adapter) {
//check old adapter and data observer that pointed old adapter is already exist
// if exist, unregister data observer
Adapter oldAdapter = getAdapter();
if(oldAdapter != null){
oldAdapter.unregisterAdapterDataObserver(dataObserver);
}
super.setAdapter(adapter);
// attach with data observer with adapter
if(adapter != null){
adapter.registerAdapterDataObserver(dataObserver);
}
checkIfEmpty();
}
public void setEmptyView(View emptyView){
mEmptyView = emptyView;
}
/**
* check if adapter connected to SRV is empty. If so, show emptyView.
*/
private void checkIfEmpty(){
// get adapter that attached Recycler View
boolean isEmpty = getAdapter().getItemCount() == 0;
if(mEmptyView != null) {
mEmptyView.setVisibility(isEmpty ? View.VISIBLE : View.INVISIBLE);
// control visible/invisible adapter view items that attaches RecyclerView
setVisibility(isEmpty ? View.INVISIBLE : View.VISIBLE);
}
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.common.util;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.time.Duration;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeoutException;
import lombok.Cleanup;
import org.assertj.core.util.Lists;
import org.awaitility.Awaitility;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public class FutureUtilTest {
@Test
public void testCreateTimeoutException() {
TimeoutException timeoutException = FutureUtil.createTimeoutException("hello world", getClass(), "test(...)");
assertNotNull(timeoutException);
assertEquals(timeoutException.getMessage(), "hello world");
StringWriter stringWriter = new StringWriter();
timeoutException.printStackTrace(new PrintWriter(stringWriter, true));
assertEquals(stringWriter.toString(),
"org.apache.pulsar.common.util.FutureUtil$LowOverheadTimeoutException: "
+ "hello world" + System.lineSeparator()
+ "\tat org.apache.pulsar.common.util.FutureUtilTest.test(...)(Unknown Source)" + System.lineSeparator());
}
@Test
public void testTimeoutHandling() {
CompletableFuture<Void> future = new CompletableFuture<>();
@Cleanup("shutdownNow")
ScheduledExecutorService executor = Executors.newScheduledThreadPool(1);
Exception e = new Exception();
try {
FutureUtil.addTimeoutHandling(future, Duration.ofMillis(1), executor, () -> e);
future.get();
fail("Should have failed.");
} catch (InterruptedException interruptedException) {
fail("Shouldn't occur");
} catch (ExecutionException executionException) {
assertEquals(executionException.getCause(), e);
}
}
@Test
public void testTimeoutHandlingNoTimeout() throws ExecutionException, InterruptedException {
CompletableFuture<Void> future = new CompletableFuture<>();
@Cleanup("shutdownNow")
ScheduledExecutorService executor = Executors.newScheduledThreadPool(1);
FutureUtil.addTimeoutHandling(future, Duration.ofMillis(100), executor, () -> new Exception());
future.complete(null);
future.get();
}
@Test
public void testCreatingFutureWithTimeoutHandling() {
@Cleanup("shutdownNow")
ScheduledExecutorService executor = Executors.newScheduledThreadPool(1);
Exception e = new Exception();
try {
CompletableFuture<Void> future = FutureUtil.createFutureWithTimeout(Duration.ofMillis(1), executor,
() -> e);
future.get();
fail("Should have failed.");
} catch (InterruptedException interruptedException) {
fail("Shouldn't occur");
} catch (ExecutionException executionException) {
assertEquals(executionException.getCause(), e);
}
}
@Test
public void testGetOriginalException() {
CompletableFuture<Void> future = CompletableFuture.completedFuture(null);
CompletableFuture<Void> exceptionFuture = future.thenAccept(__ -> {
throw new IllegalStateException("Illegal state");
});
assertTrue(exceptionFuture.isCompletedExceptionally());
try {
exceptionFuture.get();
} catch (InterruptedException | ExecutionException e) {
Throwable originalException = FutureUtil.unwrapCompletionException(e);
assertTrue(originalException instanceof IllegalStateException);
}
CompletableFuture<Object> exceptionFuture2 = new CompletableFuture<>();
exceptionFuture2.completeExceptionally(new IllegalStateException("Completed exception"));
final List<Throwable> future2Exception = Lists.newArrayList();
exceptionFuture2.exceptionally(ex -> {
future2Exception.add(FutureUtil.unwrapCompletionException(ex));
return null;
});
Awaitility.await()
.untilAsserted(() -> {
assertEquals(future2Exception.size(), 1);
assertTrue(future2Exception.get(0) instanceof IllegalStateException);
});
final List<Throwable> future3Exception = Lists.newArrayList();
CompletableFuture.completedFuture(null)
.thenAccept(__ -> {
throw new IllegalStateException("Throw illegal exception");
})
.exceptionally(ex -> {
future3Exception.add(FutureUtil.unwrapCompletionException(ex));
return null;
});
Awaitility.await()
.untilAsserted(() -> {
assertEquals(future3Exception.size(), 1);
assertTrue(future3Exception.get(0) instanceof IllegalStateException);
});
}
@Test
public void testWaitForAny() {
CompletableFuture<String> f1 = new CompletableFuture<>();
CompletableFuture<String> f2 = new CompletableFuture<>();
CompletableFuture<String> f3 = new CompletableFuture<>();
CompletableFuture<String> f4 = new CompletableFuture<>();
f1.complete("1");
f2.complete("2");
f3.complete("3");
f4.complete("4");
CompletableFuture<Optional<Object>> ret = FutureUtil.waitForAny(Lists.newArrayList(f1, f2, f3, f4), p -> p.equals("3"));
assertEquals(ret.join().get(), "3");
// test not matched predicate result
CompletableFuture<String> f5 = new CompletableFuture<>();
CompletableFuture<String> f6 = new CompletableFuture<>();
f5.complete("5");
f6.complete("6");
ret = FutureUtil.waitForAny(Lists.newArrayList(f5, f6), p -> p.equals("3"));
assertFalse(ret.join().isPresent());
// test one complete, others are cancelled.
CompletableFuture<String> f55 = new CompletableFuture<>();
CompletableFuture<String> f66 = new CompletableFuture<>();
f55.complete("55");
ret = FutureUtil.waitForAny(Lists.newArrayList(f55, f66), p -> p.equals("55"));
assertTrue(ret.join().isPresent());
assertTrue(f66.isCancelled());
// test with exception
CompletableFuture<String> f7 = new CompletableFuture<>();
CompletableFuture<String> f8 = new CompletableFuture<>();
f8.completeExceptionally(new RuntimeException("f7 exception"));
f8.completeExceptionally(new RuntimeException("f8 exception"));
ret = FutureUtil.waitForAny(Lists.newArrayList(f7, f8), p -> p.equals("3"));
try {
ret.join();
fail("Should have failed");
} catch (CompletionException ex) {
assertTrue(ex.getCause() instanceof RuntimeException);
}
}
}
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* BPS Bildungsportal Sachsen GmbH, http://www.bps-system.de
* <p>
*/
package de.bps.course.nodes;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.olat.core.id.Identity;
import org.olat.core.id.OLATResourceable;
import org.olat.course.condition.additionalconditions.AdditionalConditionAnswerContainer;
import org.olat.course.condition.additionalconditions.PasswordStore;
import org.olat.properties.Property;
import org.olat.properties.PropertyManager;
/**
*
* Description:<br>
* This class managed the answer container for a condition.
*
* <P>
* Initial Date: 17.09.2010 <br>
*
* @author bja
*/
public class CourseNodePasswordManagerImpl implements CourseNodePasswordManager {
private final Map<Long, AdditionalConditionAnswerContainer> cache = new ConcurrentHashMap<Long, AdditionalConditionAnswerContainer>();
private static CourseNodePasswordManagerImpl INSTANCE;
static {
INSTANCE = new CourseNodePasswordManagerImpl();
}
/**
* @return singleton instance
*/
public static CourseNodePasswordManagerImpl getInstance() {
return INSTANCE;
}
private CourseNodePasswordManagerImpl() {
// no public constructor
}
/**
* @see de.bps.course.nodes.CourseNodePasswordManager#getAnswerContainer(org.olat.core.id.Identity)
*/
@Override
public AdditionalConditionAnswerContainer getAnswerContainer(Identity identity) {
AdditionalConditionAnswerContainer acac = new AdditionalConditionAnswerContainer();
if(identity == null) {
//do nothing
} else if (cache.containsKey(identity.getKey())) {
acac = cache.get(identity.getKey());
} else {
PropertyManager pm = PropertyManager.getInstance();
List<Property> properties = pm.listProperties(identity, null, AdditionalConditionAnswerContainer.RESOURCE_NAME, null, null, AdditionalConditionAnswerContainer.RESOURCE_NAME);
if(properties == null) return null;
for (Object object : properties) {
Property property = (Property) object;
PasswordStore store = new PasswordStore();
store.setPassword(property.getStringValue());
store.setCourseId(property.getLongValue());
store.setNodeIdent(property.getResourceTypeId());
acac.insertAnswer(Long.toString(property.getResourceTypeId()), Long.toString(property.getLongValue()), store);
}
cache.put(identity.getKey(), acac);
}
return acac;
}
/**
* persist answer container to database
*
* @param identity
* @param answers
*/
private void persistAnswerContainer(Identity identity, AdditionalConditionAnswerContainer answers) {
if (!answers.isContainerEmpty()) {
boolean updateInDatabase = false;
PropertyManager pm = PropertyManager.getInstance();
Map<String, Object> container = answers.getContainer();
for (String key : container.keySet()) {
PasswordStore store = (PasswordStore) container.get(key);
List<Property> properties = pm.listProperties(identity, null, AdditionalConditionAnswerContainer.RESOURCE_NAME,
Long.valueOf(store.getNodeIdent()), null, AdditionalConditionAnswerContainer.RESOURCE_NAME);
if (properties != null && properties.size() > 0) {
// it exists properties with this key and from this identity
boolean pwdFounded = false;
for (Property prop : properties) {
if (store.getCourseId().equals(prop.getLongValue())) {
if(!store.getPassword().equals(prop.getStringValue())) {
// same course id and same node id
// not same pwd --> update
prop.setStringValue(store.getPassword());
pm.updateProperty(prop);
updateInDatabase = true;
}
// same store
pwdFounded = true;
break;
}
}
if (!pwdFounded) {
Property p = pm.createUserPropertyInstance(identity, null, AdditionalConditionAnswerContainer.RESOURCE_NAME, null, store.getCourseId(),
store.getPassword(), null);
p.setResourceTypeName(AdditionalConditionAnswerContainer.RESOURCE_NAME);
p.setResourceTypeId(Long.valueOf(store.getNodeIdent()));
pm.saveProperty(p);
updateInDatabase = true;
}
} else {
// it exists nothing properties with this key and from this identity
Property p = pm.createUserPropertyInstance(identity, null, AdditionalConditionAnswerContainer.RESOURCE_NAME, null, store.getCourseId(),
store.getPassword(), null);
p.setResourceTypeName(AdditionalConditionAnswerContainer.RESOURCE_NAME);
p.setResourceTypeId(Long.valueOf(store.getNodeIdent()));
pm.saveProperty(p);
updateInDatabase = true;
}
}
if (updateInDatabase) cache.put(identity.getKey(), answers);
}
}
/**
* @see de.bps.course.nodes.CourseNodePasswordManager#updatePwd(org.olat.core.id.Identity, java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public void updatePwd(Identity identity, String nodeIdentifier, String courseId, String value) {
AdditionalConditionAnswerContainer answers = getAnswerContainer(identity);
if (answers == null) {
answers = new AdditionalConditionAnswerContainer();
}
PasswordStore pwdStore = (PasswordStore)answers.getAnswers(nodeIdentifier, courseId);
if(pwdStore != null) {
pwdStore.setPassword(value);
} else {
PasswordStore store = new PasswordStore();
store.setPassword(value);
store.setNodeIdent(Long.valueOf(nodeIdentifier));
store.setCourseId(Long.valueOf(courseId));
answers.insertAnswer(nodeIdentifier, courseId, store);
}
persistAnswerContainer(identity, answers);
}
/**
* @see de.bps.course.nodes.CourseNodePasswordManager#deleteAllPasswordsFor(java.lang.Long)
*/
public void deleteAllPasswordsFor(OLATResourceable ores) {
PropertyManager pm = PropertyManager.getInstance();
List<Property> properties = pm.listProperties(null, null, AdditionalConditionAnswerContainer.RESOURCE_NAME,
null, null, AdditionalConditionAnswerContainer.RESOURCE_NAME, ores.getResourceableId(), null);
for (Property p : properties) {
Long nodeId = p.getResourceTypeId();
Long courseId = p.getLongValue();
removeAnswers(nodeId, courseId);
pm.deleteProperty(p);
}
}
private void removeAnswers(Long nodeId, Long courseId) {
for (Long key : cache.keySet()) {
AdditionalConditionAnswerContainer acac = cache.get(key);
if (acac.containsAnswer(Long.toString(nodeId), Long.toString(courseId))) {
acac.removeAnswer(Long.toString(nodeId), Long.toString(courseId));
}
}
}
}
|
package com.ofss.fcubs.service.fcubsrtservice;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for YesNoType.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="YesNoType">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="Y"/>
* <enumeration value="N"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "YesNoType")
@XmlEnum
public enum YesNoType {
Y,
N;
public String value() {
return name();
}
public static YesNoType fromValue(String v) {
return valueOf(v);
}
}
|
package com.jldt.phantom.mgb.model;
import java.util.ArrayList;
import java.util.List;
public class UmsIntegrationConsumeSettingExample {
protected String orderByClause;
protected boolean distinct;
protected List<Criteria> oredCriteria;
public UmsIntegrationConsumeSettingExample() {
oredCriteria = new ArrayList<>();
}
public void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
public String getOrderByClause() {
return orderByClause;
}
public void setDistinct(boolean distinct) {
this.distinct = distinct;
}
public boolean isDistinct() {
return distinct;
}
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
public void or(Criteria criteria) {
oredCriteria.add(criteria);
}
public Criteria or() {
Criteria criteria = createCriteriaInternal();
oredCriteria.add(criteria);
return criteria;
}
public Criteria createCriteria() {
Criteria criteria = createCriteriaInternal();
if (oredCriteria.size() == 0) {
oredCriteria.add(criteria);
}
return criteria;
}
protected Criteria createCriteriaInternal() {
Criteria criteria = new Criteria();
return criteria;
}
public void clear() {
oredCriteria.clear();
orderByClause = null;
distinct = false;
}
protected abstract static class GeneratedCriteria {
protected List<Criterion> criteria;
protected GeneratedCriteria() {
super();
criteria = new ArrayList<>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null) {
throw new RuntimeException("Value for condition cannot be null");
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
throw new RuntimeException("Value for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
throw new RuntimeException("Between values for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value1, value2));
}
public Criteria andIdIsNull() {
addCriterion("id is null");
return (Criteria) this;
}
public Criteria andIdIsNotNull() {
addCriterion("id is not null");
return (Criteria) this;
}
public Criteria andIdEqualTo(Long value) {
addCriterion("id =", value, "id");
return (Criteria) this;
}
public Criteria andIdNotEqualTo(Long value) {
addCriterion("id <>", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThan(Long value) {
addCriterion("id >", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThanOrEqualTo(Long value) {
addCriterion("id >=", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThan(Long value) {
addCriterion("id <", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThanOrEqualTo(Long value) {
addCriterion("id <=", value, "id");
return (Criteria) this;
}
public Criteria andIdIn(List<Long> values) {
addCriterion("id in", values, "id");
return (Criteria) this;
}
public Criteria andIdNotIn(List<Long> values) {
addCriterion("id not in", values, "id");
return (Criteria) this;
}
public Criteria andIdBetween(Long value1, Long value2) {
addCriterion("id between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andIdNotBetween(Long value1, Long value2) {
addCriterion("id not between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andDeductionPerAmountIsNull() {
addCriterion("deduction_per_amount is null");
return (Criteria) this;
}
public Criteria andDeductionPerAmountIsNotNull() {
addCriterion("deduction_per_amount is not null");
return (Criteria) this;
}
public Criteria andDeductionPerAmountEqualTo(Integer value) {
addCriterion("deduction_per_amount =", value, "deductionPerAmount");
return (Criteria) this;
}
public Criteria andDeductionPerAmountNotEqualTo(Integer value) {
addCriterion("deduction_per_amount <>", value, "deductionPerAmount");
return (Criteria) this;
}
public Criteria andDeductionPerAmountGreaterThan(Integer value) {
addCriterion("deduction_per_amount >", value, "deductionPerAmount");
return (Criteria) this;
}
public Criteria andDeductionPerAmountGreaterThanOrEqualTo(Integer value) {
addCriterion("deduction_per_amount >=", value, "deductionPerAmount");
return (Criteria) this;
}
public Criteria andDeductionPerAmountLessThan(Integer value) {
addCriterion("deduction_per_amount <", value, "deductionPerAmount");
return (Criteria) this;
}
public Criteria andDeductionPerAmountLessThanOrEqualTo(Integer value) {
addCriterion("deduction_per_amount <=", value, "deductionPerAmount");
return (Criteria) this;
}
public Criteria andDeductionPerAmountIn(List<Integer> values) {
addCriterion("deduction_per_amount in", values, "deductionPerAmount");
return (Criteria) this;
}
public Criteria andDeductionPerAmountNotIn(List<Integer> values) {
addCriterion("deduction_per_amount not in", values, "deductionPerAmount");
return (Criteria) this;
}
public Criteria andDeductionPerAmountBetween(Integer value1, Integer value2) {
addCriterion("deduction_per_amount between", value1, value2, "deductionPerAmount");
return (Criteria) this;
}
public Criteria andDeductionPerAmountNotBetween(Integer value1, Integer value2) {
addCriterion("deduction_per_amount not between", value1, value2, "deductionPerAmount");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderIsNull() {
addCriterion("max_percent_per_order is null");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderIsNotNull() {
addCriterion("max_percent_per_order is not null");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderEqualTo(Integer value) {
addCriterion("max_percent_per_order =", value, "maxPercentPerOrder");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderNotEqualTo(Integer value) {
addCriterion("max_percent_per_order <>", value, "maxPercentPerOrder");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderGreaterThan(Integer value) {
addCriterion("max_percent_per_order >", value, "maxPercentPerOrder");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderGreaterThanOrEqualTo(Integer value) {
addCriterion("max_percent_per_order >=", value, "maxPercentPerOrder");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderLessThan(Integer value) {
addCriterion("max_percent_per_order <", value, "maxPercentPerOrder");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderLessThanOrEqualTo(Integer value) {
addCriterion("max_percent_per_order <=", value, "maxPercentPerOrder");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderIn(List<Integer> values) {
addCriterion("max_percent_per_order in", values, "maxPercentPerOrder");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderNotIn(List<Integer> values) {
addCriterion("max_percent_per_order not in", values, "maxPercentPerOrder");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderBetween(Integer value1, Integer value2) {
addCriterion("max_percent_per_order between", value1, value2, "maxPercentPerOrder");
return (Criteria) this;
}
public Criteria andMaxPercentPerOrderNotBetween(Integer value1, Integer value2) {
addCriterion("max_percent_per_order not between", value1, value2, "maxPercentPerOrder");
return (Criteria) this;
}
public Criteria andUseUnitIsNull() {
addCriterion("use_unit is null");
return (Criteria) this;
}
public Criteria andUseUnitIsNotNull() {
addCriterion("use_unit is not null");
return (Criteria) this;
}
public Criteria andUseUnitEqualTo(Integer value) {
addCriterion("use_unit =", value, "useUnit");
return (Criteria) this;
}
public Criteria andUseUnitNotEqualTo(Integer value) {
addCriterion("use_unit <>", value, "useUnit");
return (Criteria) this;
}
public Criteria andUseUnitGreaterThan(Integer value) {
addCriterion("use_unit >", value, "useUnit");
return (Criteria) this;
}
public Criteria andUseUnitGreaterThanOrEqualTo(Integer value) {
addCriterion("use_unit >=", value, "useUnit");
return (Criteria) this;
}
public Criteria andUseUnitLessThan(Integer value) {
addCriterion("use_unit <", value, "useUnit");
return (Criteria) this;
}
public Criteria andUseUnitLessThanOrEqualTo(Integer value) {
addCriterion("use_unit <=", value, "useUnit");
return (Criteria) this;
}
public Criteria andUseUnitIn(List<Integer> values) {
addCriterion("use_unit in", values, "useUnit");
return (Criteria) this;
}
public Criteria andUseUnitNotIn(List<Integer> values) {
addCriterion("use_unit not in", values, "useUnit");
return (Criteria) this;
}
public Criteria andUseUnitBetween(Integer value1, Integer value2) {
addCriterion("use_unit between", value1, value2, "useUnit");
return (Criteria) this;
}
public Criteria andUseUnitNotBetween(Integer value1, Integer value2) {
addCriterion("use_unit not between", value1, value2, "useUnit");
return (Criteria) this;
}
public Criteria andCouponStatusIsNull() {
addCriterion("coupon_status is null");
return (Criteria) this;
}
public Criteria andCouponStatusIsNotNull() {
addCriterion("coupon_status is not null");
return (Criteria) this;
}
public Criteria andCouponStatusEqualTo(Integer value) {
addCriterion("coupon_status =", value, "couponStatus");
return (Criteria) this;
}
public Criteria andCouponStatusNotEqualTo(Integer value) {
addCriterion("coupon_status <>", value, "couponStatus");
return (Criteria) this;
}
public Criteria andCouponStatusGreaterThan(Integer value) {
addCriterion("coupon_status >", value, "couponStatus");
return (Criteria) this;
}
public Criteria andCouponStatusGreaterThanOrEqualTo(Integer value) {
addCriterion("coupon_status >=", value, "couponStatus");
return (Criteria) this;
}
public Criteria andCouponStatusLessThan(Integer value) {
addCriterion("coupon_status <", value, "couponStatus");
return (Criteria) this;
}
public Criteria andCouponStatusLessThanOrEqualTo(Integer value) {
addCriterion("coupon_status <=", value, "couponStatus");
return (Criteria) this;
}
public Criteria andCouponStatusIn(List<Integer> values) {
addCriterion("coupon_status in", values, "couponStatus");
return (Criteria) this;
}
public Criteria andCouponStatusNotIn(List<Integer> values) {
addCriterion("coupon_status not in", values, "couponStatus");
return (Criteria) this;
}
public Criteria andCouponStatusBetween(Integer value1, Integer value2) {
addCriterion("coupon_status between", value1, value2, "couponStatus");
return (Criteria) this;
}
public Criteria andCouponStatusNotBetween(Integer value1, Integer value2) {
addCriterion("coupon_status not between", value1, value2, "couponStatus");
return (Criteria) this;
}
}
public static class Criteria extends GeneratedCriteria {
protected Criteria() {
super();
}
}
public static class Criterion {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
}
|
/*
* Copyright © 2009 HotPads (admin@hotpads.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datarouter.web.filter;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
@Singleton
public class StaticFileFilterConfig{
public boolean skip(/* used by subclasses */ @SuppressWarnings("unused") HttpServletRequest request){
return false;
}
}
|
package com.fr.swift.cloud.beans.factory.classreading;
import com.fr.swift.cloud.beans.factory.classreading.basic.reader.IntReader;
import com.fr.swift.cloud.beans.factory.classreading.basic.reader.LongReader;
import com.fr.swift.cloud.beans.factory.classreading.constant.ConstantClass;
import com.fr.swift.cloud.beans.factory.classreading.constant.ConstantNameAndType;
import com.fr.swift.cloud.beans.factory.classreading.constant.ConstantUtf8;
import com.fr.swift.cloud.log.SwiftLoggers;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* This class created on 2018/12/3
*
* @author Lucifer
* @description
* @since Advanced FineBI 5.0
*/
public class ClassReader {
private static String RUNTIME_VISIBLE_ANNOTATIONS = "RuntimeVisibleAnnotations";
public static ClassAnnotations read(String classPath) {
try {
File file = new File(classPath);
FileInputStream inputStream = new FileInputStream(file);
return read(inputStream);
} catch (FileNotFoundException e) {
SwiftLoggers.getLogger().error(e);
return null;
}
}
public static ClassAnnotations read(InputStream inputStream) {
ClassFile classFile = new ClassFile();
classFile.setMagic(LongReader.read(inputStream));
classFile.setMinorVersion(IntReader.read(inputStream));
classFile.setMajorVersion(IntReader.read(inputStream));
classFile.setConstantPoolCount(IntReader.read(inputStream));
ConstantPool constantPool = new ConstantPool(classFile.getConstantPoolCount());
constantPool.read(inputStream);
int annotationsStartIndex = 0;
for (int i = 0; i < constantPool.getConstantPoolCount(); i++) {
ConstantInfo constantInfo = constantPool.getCpInfo()[i];
if (constantInfo instanceof ConstantUtf8) {
String value = ((ConstantUtf8) constantInfo).getValue();
if (value.equals(RUNTIME_VISIBLE_ANNOTATIONS)) {
annotationsStartIndex = i + 1;
break;
}
}
}
List<String> annotations = new ArrayList<String>();
if (annotationsStartIndex != 0) {
for (int i = annotationsStartIndex; i < constantPool.getConstantPoolCount(); i++) {
ConstantInfo constantInfo = constantPool.getCpInfo()[i];
if (constantInfo instanceof ConstantUtf8) {
try {
annotations.add(Type.getType(((ConstantUtf8) constantInfo).getValue()).getClassName());
} catch (Exception ignore) {
}
}
if (constantInfo instanceof ConstantNameAndType) {
break;
}
}
}
classFile.setAccessFlag(IntReader.read(inputStream));
int classIndex = IntReader.read(inputStream);
ConstantClass clazz = (ConstantClass) constantPool.getCpInfo()[classIndex];
ConstantUtf8 className = (ConstantUtf8) constantPool.getCpInfo()[clazz.getNameIndex()];
classFile.setClassName(className.getValue());
String returnClassName = className.getValue().replaceAll("/", ".");
return new ClassAnnotations(returnClassName, annotations);
}
}
|
/*
* Copyright 2013 by The Appdapter Project (www.appdapter.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.appdapter.core.remote.sparql;
import java.util.ArrayList;
import java.util.List;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicNameValuePair;
import org.appdapter.core.log.BasicDebugger;
import org.appdapter.core.log.Debuggable;
import org.slf4j.Logger;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.Resource;
public class SparqlEndpointClient extends BasicDebugger {
public String buildSparqlPrefixHeader(String repoServiceURL) {
return Debuggable.notImplemented("buildSparqlPrefixHeader", repoServiceURL);
}
public List<Resource> getContextIDs() {
return Debuggable.notImplemented("getContextIDs", this);
}
WebDataClient myWebDataClient = new WebDataClient();
String endpointURI;
public SparqlEndpointClient(String endpointString) {
endpointURI = endpointString;
}
public SparqlEndpointClient getConnection() {
return this;
}
public void executeUpdate(String updateString) {
execRemoteSparqlUpdate(endpointURI, updateString, true);
}
public ResultSet execQuery(String queryText, boolean debugFlag) {
return execRemoteSparqlSelect(endpointURI, queryText);
}
/**
* SPARQL query runs through the sparqlService facility of Jena/ARQ, which handles
* the HTTP client duties (using its own class named HttpQuery
* http://grepcode.com/file/repo1.maven.org/maven2/com.hp.hpl.jena/arq/2.8.7/com/hp/hpl/jena/sparql/engine/http/HttpQuery.java
* and returns us a regular jena.query.ResultSet, which we can iterate rowwise, dump as XML, etc.
*
* TODO: Need to review the proper "close" semantics for these result sets.
*
* @param queryText
* @param svcUrl
*/
public ResultSet execRemoteSparqlSelect(String svcUrl, String queryText) {
Logger log = getLogger();
log.info("QueryUrl=[{}] QueryText={}", svcUrl, queryText);
// Create an ARQ parsed query object
Query parsedQuery = QueryFactory.create(queryText);
String queryBaseURI = null;
Syntax queryFileSyntax = Syntax.syntaxSPARQL;
// Query parsedQuery = QueryFactory.read(queryFileURL, null, Syntax.syntaxSPARQL); // , queryBaseURI, queryFileSyntax);
QueryExecution qExc = QueryExecutionFactory.sparqlService(svcUrl, parsedQuery);
ResultSet resSet = qExc.execSelect();
return resSet;
}
/**
* SPARQL-Update POST runs through the Apache Commons HttpClient library.
* @param updateText
* @param svcUrl
*/
public void execRemoteSparqlUpdate(String svcUrl, String updateText, boolean debugFlag) {
List<NameValuePair> nvps = new ArrayList<NameValuePair>();
nvps.add(new BasicNameValuePair("request", updateText));
try {
myWebDataClient.execPost(svcUrl, nvps, debugFlag);
} catch (Throwable t) {
// For some reason the 2-args form isn't printing stack traces from OutOf-Permgen exceptions.
getLogger().error("Caught Exception: {} \n********************* Bonus Direct Stack Trace to STDERR", t);
t.printStackTrace();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.