text
stringlengths
7
1.01M
/* * Copyright 2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openehealth.ipf.platform.camel.flow.process; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.openehealth.ipf.commons.flow.FlowException; import org.openehealth.ipf.commons.flow.FlowManager; import org.openehealth.ipf.commons.flow.ManagedMessage; import org.openehealth.ipf.platform.camel.flow.PlatformMessage; /** * A processor that triggers a * {@link FlowManager#invalidateFlow(ManagedMessage)} operation. * * @author Martin Krasser */ public class FlowErrorProcessor extends FlowProcessor { private static final Logger LOG = LoggerFactory.getLogger(FlowErrorProcessor.class); @Override public String toString() { return "FlowErrorProcessor[" + getProcessor() + "]"; } /** * Delegates to {@link FlowManager#invalidateFlow(ManagedMessage)} * * @param message * managed message. */ @Override protected void processMessage(PlatformMessage message) { try { flowManager.invalidateFlow(message); } catch (FlowException e) { // thrown if flow id is unknown to flow manager LOG.warn("invalidate flow operation failed", e); } catch (Exception e) { // keep processing exchange (only log error) LOG.error(e.getMessage(), e); } } }
/* * Copyright 2015-2017 Vladimir Bukhtoyarov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package io.github.bucket4j.grid.jcache; import io.github.bucket4j.Bucket; import io.github.bucket4j.BucketConfiguration; import io.github.bucket4j.grid.ProxyManager; import io.github.bucket4j.grid.GridBucket; import io.github.bucket4j.grid.GridBucketState; import io.github.bucket4j.grid.GridProxy; import io.github.bucket4j.util.LazySupplier; import javax.cache.Cache; import java.io.Serializable; import java.util.function.Supplier; /** * JCache specific implementation of {@link ProxyManager} * * @param <K> type of key for buckets */ public class JCacheProxyManager<K extends Serializable> implements ProxyManager<K> { private final GridProxy<K> gridProxy; JCacheProxyManager(Cache<K, GridBucketState> cache) { this.gridProxy = new JCacheProxy<>(cache); } @Override public Bucket getProxy(K key, Supplier<BucketConfiguration> supplier) { return GridBucket.createLazyBucket(key, new LazySupplier<>(supplier), gridProxy); } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package net.azurewebsites.ktprame; import android.os.Bundle; import org.apache.cordova.*; public class MainActivity extends CordovaActivity { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Set by <content src="index.html" /> in config.xml loadUrl(launchUrl); } }
package creos.simsg.api.grid.uncertainty.configuration; import creos.simsg.api.model.Configuration; import creos.simsg.api.model.Fuse; import creos.simsg.api.model.State; import creos.simsg.api.uncertainty.Confidence; import creos.simgsg.api.utils.Pair; import java.util.Iterator; import java.util.Map; import java.util.Objects; public class UConfiguration implements Iterable<Pair<Fuse, State>> { private final Configuration configuration; private final Confidence confidence; public UConfiguration(Fuse[] fuses, State[] states, double confidence) { this.confidence = new Confidence(confidence); this.configuration = new Configuration(fuses, states); } public double getConfidence() { return confidence.getProbability(); } public Iterator<Pair<Fuse, State>> getFuseStates() { return this.configuration.getConfiguration() .entrySet() .stream() .map((Map.Entry<Fuse, State> entry) -> new Pair<Fuse,State>(entry.getKey(), entry.getValue())) .iterator(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; return Objects.equals(configuration, ((UConfiguration)o).configuration); } @Override public Iterator<Pair<Fuse, State>> iterator() { return this.configuration.getConfiguration() .entrySet() .stream() .map((Map.Entry<Fuse, State> entry) -> new Pair<Fuse,State>(entry.getKey(), entry.getValue())) .iterator(); } }
package org.hl7.fhir.r4.model.codesystems; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Wed, Nov 21, 2018 11:18-0500 for FHIR v3.6.0 import org.hl7.fhir.exceptions.FHIRException; public enum SubstanceCategory { /** * A substance that causes an allergic reaction. */ ALLERGEN, /** * A substance that is produced by or extracted from a biological source. */ BIOLOGICAL, /** * A substance that comes directly from a human or an animal (e.g. blood, urine, feces, tears, etc.). */ BODY, /** * Any organic or inorganic substance of a particular molecular identity, including -- (i) any combination of such substances occurring in whole or in part as a result of a chemical reaction or occurring in nature and (ii) any element or uncombined radical (http://www.epa.gov/opptintr/import-export/pubs/importguide.pdf). */ CHEMICAL, /** * A food, dietary ingredient, or dietary supplement for human or animal. */ FOOD, /** * A substance intended for use in the diagnosis, cure, mitigation, treatment, or prevention of disease in man or other animals (Federal Food Drug and Cosmetic Act). */ DRUG, /** * A finished product which is not normally ingested, absorbed or injected (e.g. steel, iron, wood, plastic and paper). */ MATERIAL, /** * added to help the parsers */ NULL; public static SubstanceCategory fromCode(String codeString) throws FHIRException { if (codeString == null || "".equals(codeString)) return null; if ("allergen".equals(codeString)) return ALLERGEN; if ("biological".equals(codeString)) return BIOLOGICAL; if ("body".equals(codeString)) return BODY; if ("chemical".equals(codeString)) return CHEMICAL; if ("food".equals(codeString)) return FOOD; if ("drug".equals(codeString)) return DRUG; if ("material".equals(codeString)) return MATERIAL; throw new FHIRException("Unknown SubstanceCategory code '"+codeString+"'"); } public String toCode() { switch (this) { case ALLERGEN: return "allergen"; case BIOLOGICAL: return "biological"; case BODY: return "body"; case CHEMICAL: return "chemical"; case FOOD: return "food"; case DRUG: return "drug"; case MATERIAL: return "material"; default: return "?"; } } public String getSystem() { return "http://terminology.hl7.org/CodeSystem/substance-category"; } public String getDefinition() { switch (this) { case ALLERGEN: return "A substance that causes an allergic reaction."; case BIOLOGICAL: return "A substance that is produced by or extracted from a biological source."; case BODY: return "A substance that comes directly from a human or an animal (e.g. blood, urine, feces, tears, etc.)."; case CHEMICAL: return "Any organic or inorganic substance of a particular molecular identity, including -- (i) any combination of such substances occurring in whole or in part as a result of a chemical reaction or occurring in nature and (ii) any element or uncombined radical (http://www.epa.gov/opptintr/import-export/pubs/importguide.pdf)."; case FOOD: return "A food, dietary ingredient, or dietary supplement for human or animal."; case DRUG: return "A substance intended for use in the diagnosis, cure, mitigation, treatment, or prevention of disease in man or other animals (Federal Food Drug and Cosmetic Act)."; case MATERIAL: return "A finished product which is not normally ingested, absorbed or injected (e.g. steel, iron, wood, plastic and paper)."; default: return "?"; } } public String getDisplay() { switch (this) { case ALLERGEN: return "Allergen"; case BIOLOGICAL: return "Biological Substance"; case BODY: return "Body Substance"; case CHEMICAL: return "Chemical"; case FOOD: return "Dietary Substance"; case DRUG: return "Drug or Medicament"; case MATERIAL: return "Material"; default: return "?"; } } }
/* The MIT License (MIT) Copyright (c) 2021 Pierre Lindenbaum Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.github.lindenb.jvarkit.util.vcf.predictions; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import com.github.lindenb.jvarkit.lang.StringUtils; import com.github.lindenb.jvarkit.util.vcf.predictions.AnnPredictionParser.AnnPrediction; import com.github.lindenb.jvarkit.util.vcf.predictions.SnpEffPredictionParser.SnpEffPrediction; import com.github.lindenb.jvarkit.util.vcf.predictions.VepPredictionParser.VepPrediction; import htsjdk.variant.variantcontext.VariantContext; import htsjdk.variant.vcf.VCFHeader; public class GeneExtractorFactory { public interface KeyAndGene extends Comparable<KeyAndGene> { /** get the name of the entity (gene-id, transcript-id, etc... )*/ public String getKey(); /** name of the gene may be null , not used for comparaison */ public String getGene(); /** name of the extractor */ public String getMethod(); /** first key is method, second is gene */ @Override public default int compareTo(final KeyAndGene o) { int i = this.getMethod().compareTo(o.getMethod()); if(i!=0) return i; return this.getKey().compareTo(o.getKey()); } } public static class KeyAndGeneImpl implements KeyAndGene { final String key; final String gene; final String method; public KeyAndGeneImpl(final String key,final String gene,final String method) { this.key = key; this.gene = StringUtils.isBlank(gene)?".":gene; this.method = method; } @Override public String getKey() { return key; } @Override public String getGene() { return gene; } @Override public String getMethod() { return method; } @Override public int hashCode() { return this.key.hashCode()*31+this.method.hashCode(); } @Override public boolean equals(Object obj) { if(obj==this) return true; if(obj==null || !(obj instanceof KeyAndGene)) return false; return this.compareTo(KeyAndGene.class.cast(obj))==0; } @Override public String toString() { return this.getKey(); } } public interface GeneExtractor extends Function<VariantContext, Map<KeyAndGene,Set<String>>> { /** get name of the tag in the INFO column */ public String getInfoTag(); /** get name for this extraction */ public String getName(); } private abstract class AbstractGeneExtractorImpl implements GeneExtractor { private String extractorName; AbstractGeneExtractorImpl(final String extractorName) { this.extractorName = extractorName; } @Override public String getName() { return this.extractorName; } @Override public int hashCode() { return this.extractorName.hashCode(); } @Override public boolean equals(Object obj) { if(obj==this) return true; if(obj==null || !(obj instanceof GeneExtractor)) return false; return this.extractorName.equals(GeneExtractor.class.cast(obj).getName()); } @Override public String toString() { return this.getName(); } } private class VepGeneExtractor extends AbstractGeneExtractorImpl { private VepPredictionParser parser = null; private final Function<VepPrediction, String> pred2gene; VepGeneExtractor(final VepPredictionParser parser,final String name,final Function<VepPrediction, String> pred2gene) { super(name); this.parser= parser; this.pred2gene = pred2gene; } @Override public String getInfoTag() { return this.parser.getTag(); } @Override public Map<KeyAndGene,Set<String>> apply(final VariantContext ctx ) { final Map<KeyAndGene,Set<String>> gene2values = new HashMap<>(); for(final VepPrediction pred:this.parser.getPredictions(ctx)){ final String geneName=this.pred2gene.apply(pred); if(StringUtils.isBlank(geneName)) continue; final KeyAndGene keyAndGene=new KeyAndGeneImpl(geneName,pred.getGeneName(),this.getName()); Set<String> values = gene2values.get(keyAndGene); if(values==null) { values = new LinkedHashSet<>(); gene2values.put(keyAndGene,values); } values.add(pred.getOriginalAttributeAsString()); } return gene2values; } } /** bcftools csq extractor */ private class BcftoolsCsqExtractor extends AbstractGeneExtractorImpl { private final BcfToolsPredictionParser parser; private final Function<BcfToolsPredictionParser.BcfToolsPrediction, String> pred2gene; BcftoolsCsqExtractor(final BcfToolsPredictionParser parser,final String name,final Function<BcfToolsPredictionParser.BcfToolsPrediction, String> pred2gene) { super(name); this.parser= parser; this.pred2gene = pred2gene; } @Override public String getInfoTag() { return parser.getTag(); } @Override public Map<KeyAndGene,Set<String>> apply(final VariantContext ctx) { final Map<KeyAndGene,Set<String>> gene2values = new HashMap<>(); for(final BcfToolsPredictionParser.BcfToolsPrediction pred:this.parser.getPredictions(ctx)){ if(pred.isIntergenicRegion()) continue; final String geneName=this.pred2gene.apply(pred); if(StringUtils.isBlank(geneName)) continue; final KeyAndGene keyAndGene = new KeyAndGeneImpl(geneName, pred.getGeneName(),this.getName()); Set<String> values = gene2values.get(keyAndGene); if(values==null) { values = new LinkedHashSet<>(); gene2values.put(keyAndGene,values); } values.add(pred.getOriginalAttributeAsString()); } return gene2values; } } /** SNPEFF/ANN extractor */ private class AnnGeneExtractor extends AbstractGeneExtractorImpl { private final AnnPredictionParser parser; private final Function<AnnPrediction, String> pred2gene; AnnGeneExtractor(final AnnPredictionParser parser,final String name,final Function<AnnPrediction, String> pred2gene) { super(name); this.parser= parser; this.pred2gene = pred2gene; } @Override public String getInfoTag() { return parser.getTag(); } @Override public Map<KeyAndGene,Set<String>> apply(final VariantContext ctx) { final Map<KeyAndGene,Set<String>> gene2values = new HashMap<>(); for(final AnnPrediction pred:this.parser.getPredictions(ctx)){ if(pred.isIntergenicRegion()) continue; final String geneName=this.pred2gene.apply(pred); if(StringUtils.isBlank(geneName)) continue; final KeyAndGene keyAndGene = new KeyAndGeneImpl(geneName, pred.getGeneName(),this.getName()); Set<String> values = gene2values.get(keyAndGene); if(values==null) { values = new LinkedHashSet<>(); gene2values.put(keyAndGene,values); } values.add(pred.getOriginalAttributeAsString()); } return gene2values; } } private class SnpEffGeneExtractor extends AbstractGeneExtractorImpl { private final SnpEffPredictionParser parser; private final Function<SnpEffPrediction, String> pred2gene; SnpEffGeneExtractor(final SnpEffPredictionParser parser,final String name,final Function<SnpEffPrediction, String> pred2gene) { super(name); this.parser= parser; this.pred2gene = pred2gene; } @Override public String getInfoTag() { return parser.getTag(); } @Override public Map<KeyAndGene,Set<String>> apply(final VariantContext ctx) { final Map<KeyAndGene,Set<String>> gene2values = new HashMap<>(); for(final SnpEffPrediction pred:this.parser.getPredictions(ctx)){ //if(pred.isIntergenicRegion()) continue; final String geneName=this.pred2gene.apply(pred); if(StringUtils.isBlank(geneName)) continue; final KeyAndGene keyAndGene = new KeyAndGeneImpl(geneName, pred.getGeneName(),this.getName()); Set<String> values = gene2values.get(keyAndGene); if(values==null) { values = new LinkedHashSet<>(); gene2values.put(keyAndGene,values); } values.add(pred.getOriginalAttributeAsString()); } return gene2values; } } private class SmooveExtractor extends AbstractGeneExtractorImpl { private final SmooveGenesParser parser; SmooveExtractor(final String name,final SmooveGenesParser smooveGenesParser) { super(name); this.parser = smooveGenesParser; } @Override public Map<KeyAndGene, Set<String>> apply(final VariantContext vc) { final Map<KeyAndGene,Set<String>> gene2values = new HashMap<>(); for(final SmooveGenesParser.Prediction pred:this.parser.parse(vc)){ //if(pred.isIntergenicRegion()) continue; final String geneName= pred.getGeneName(); if(StringUtils.isBlank(geneName)) continue; final KeyAndGene keyAndGene = new KeyAndGeneImpl(geneName, geneName,this.getName()); Set<String> values = gene2values.get(keyAndGene); if(values==null) { values = new LinkedHashSet<>(); gene2values.put(keyAndGene,values); } values.add(pred.getOriginalAttributeAsString()); } return gene2values; } @Override public String getInfoTag() { return this.parser.getTag(); } } private final List<GeneExtractor> extractors = new ArrayList<>(); /* WARNING keep that order: see constuctor */ private static List<String> AVAILABLE_EXTRACTORS_NAMES = Collections.unmodifiableList(Arrays.asList( "ANN/GeneId","ANN/FeatureId","ANN/GeneName",// 0 & 1 & 2 "VEP/GeneId","VEP/Ensp","VEP/Feature",// 3 & 4 & 5 "EFF/Gene","EFF/Transcript",// 6 & 7 "BCSQ/gene","BCSQ/transcript",//8 & 9 "SMOOVE" //10 )) ; public static final String OPT_DESC = "Gene Extractors Name. Space/semicolon/Comma separated"; public GeneExtractorFactory(final VCFHeader header) { /* WARNING keep that order: see AVAILABLE_EXTRACTORS_NAMES */ final AnnPredictionParser annparser =new AnnPredictionParserFactory().header(header).get(); extractors.add( new AnnGeneExtractor(annparser,AVAILABLE_EXTRACTORS_NAMES.get(0), P->P.getGeneId())); extractors.add( new AnnGeneExtractor(annparser,AVAILABLE_EXTRACTORS_NAMES.get(1), P->P.getFeatureId())); extractors.add( new AnnGeneExtractor(annparser,AVAILABLE_EXTRACTORS_NAMES.get(2), P->P.getGeneName())); final VepPredictionParser vepparser = new VepPredictionParserFactory().header(header).get(); extractors.add( new VepGeneExtractor(vepparser,AVAILABLE_EXTRACTORS_NAMES.get(3), P->P.getEnsemblGene())); extractors.add( new VepGeneExtractor(vepparser,AVAILABLE_EXTRACTORS_NAMES.get(4), P->P.getENSP())); extractors.add( new VepGeneExtractor(vepparser,AVAILABLE_EXTRACTORS_NAMES.get(5), P->P.getFeature())); final SnpEffPredictionParser effparser = new SnpEffPredictionParser(header); extractors.add( new SnpEffGeneExtractor(effparser,AVAILABLE_EXTRACTORS_NAMES.get(6), P->P.getGeneName())); extractors.add( new SnpEffGeneExtractor(effparser,AVAILABLE_EXTRACTORS_NAMES.get(7), P->P.getEnsemblTranscript())); final BcfToolsPredictionParser csqParser = new BcfToolsPredictionParser(header); extractors.add( new BcftoolsCsqExtractor(csqParser,AVAILABLE_EXTRACTORS_NAMES.get(8), P->P.getGeneName())); extractors.add( new BcftoolsCsqExtractor(csqParser,AVAILABLE_EXTRACTORS_NAMES.get(9), P->P.getTranscript())); final SmooveGenesParser smooveGenesParser = new SmooveGenesParser(header); extractors.add( new SmooveExtractor(AVAILABLE_EXTRACTORS_NAMES.get(10), smooveGenesParser)); } /** return a list of all the available extractors' names */ public static List<String> getExtractorNames() { return AVAILABLE_EXTRACTORS_NAMES; } /** get all available extractors */ public List<GeneExtractor> getAllExtractors() { return Collections.unmodifiableList(this.extractors); } public List<GeneExtractor> parse(final String arg) { if(StringUtils.isBlank(arg)) return Collections.emptyList(); final List<GeneExtractor> L = new ArrayList<>(); for(final String s:arg.split("[\\s,;]+")) { if(StringUtils.isBlank(s)) continue; final Optional<GeneExtractor> ex = this.getAllExtractors(). stream(). filter(E->E.getName().equals(s)). findFirst(); if(!ex.isPresent()) { throw new IllegalArgumentException("Cannot find gene extractor \""+s+"\" in \""+arg+"\". Available are: " + this.getAllExtractors().stream().map(E->E.getName()).collect(Collectors.joining(" "))); } L.add(ex.get()); } return L; } }
package com.spring.microservice.web; import com.spring.microservice.domain.User; import com.spring.microservice.service.UserService; import com.spring.microservice.web.dto.LoginRequest; import com.spring.microservice.web.dto.LoginResponse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.client.HttpServerErrorException; import javax.validation.Valid; import java.util.List; /** * User controller. */ @RestController @RequestMapping("/users") @Validated public class UserController { private UserService userService; private static final String LOGIN_FAILED_MSG = "Login Failed"; @Autowired public UserController(UserService userService) { this.userService = userService; } @PostMapping("/signin") public LoginResponse login(@RequestBody @Valid LoginRequest loginRequest) { return new LoginResponse(userService.signin(loginRequest.getUsername(), loginRequest.getPassword()).orElseThrow(() -> new HttpServerErrorException(HttpStatus.FORBIDDEN, LOGIN_FAILED_MSG))); } @PostMapping("/signup") @PreAuthorize("hasRole('ROLE_ADMIN')") public User signup(@RequestBody @Valid LoginRequest loginRequest) { return userService.signup(loginRequest.getUsername(), loginRequest.getPassword(), loginRequest.getFirstName(), loginRequest.getLastName()).orElseThrow(() -> new RuntimeException("User already exists")); } @GetMapping @PreAuthorize("hasRole('ROLE_ADMIN')") public List<User> getAllUsers() { return userService.getAll(); } }
package zserio.emit.cpp; import zserio.ast.Constant; import zserio.emit.common.ExpressionFormatter; import zserio.emit.common.ZserioEmitException; import zserio.emit.cpp.symbols.CppNativeSymbol; import zserio.emit.cpp.types.CppNativeType; public class ConstEmitterTemplateData extends CppTemplateData { public ConstEmitterTemplateData(TemplateDataContext context, Constant constant) throws ZserioEmitException { super(context); final CppNativeMapper cppNativeMapper = context.getCppNativeMapper(); final ExpressionFormatter cppExpressionFormatter = context.getExpressionFormatter( new HeaderIncludeCollectorAdapter(this)); final CppNativeSymbol constantNativeSymbol = cppNativeMapper.getCppSymbol(constant); packageData = new PackageTemplateData(constantNativeSymbol.getPackageName()); name = constantNativeSymbol.getName(); CppNativeType nativeTargetType = cppNativeMapper.getCppType(constant.getTypeReference()); addHeaderIncludesForType(nativeTargetType); cppTypeName = nativeTargetType.getFullName(); value = cppExpressionFormatter.formatGetter(constant.getValueExpression()); } public PackageTemplateData getPackage() { return packageData; } public String getName() { return name; } public String getCppTypeName() { return cppTypeName; } public String getValue() { return value; } private final PackageTemplateData packageData; private final String name; private final String cppTypeName; private final String value; }
package mobi.tarantino.stub.auto.model.auth; import mobi.tarantino.stub.auto.model.auth.pojo.ErrorMobiApiAnswer; import retrofit2.Response; public class HttpApiException extends RuntimeException { private final int code; private final String message; private final transient Response<?> response; private final ErrorMobiApiAnswer apiAnswer; public HttpApiException(Response<?> response, ErrorMobiApiAnswer apiAnswer) { super(response != null ? "HTTP " + response.code() + " " + response.message() : null); if (response != null) { this.code = response.code(); this.message = response.message(); this.response = response; } else { this.code = apiAnswer != null ? getCode(apiAnswer) : 0; this.message = apiAnswer != null ? apiAnswer.getError() : null; this.response = null; } this.apiAnswer = apiAnswer; } private static int getCode(ErrorMobiApiAnswer apiAnswer) { try { return Integer.valueOf(apiAnswer.getErrorCode()); } catch (NumberFormatException e) { e.printStackTrace(); return 0; } } public static HttpApiException unauthorized() { return new HttpApiException(null, new ErrorMobiApiAnswer("unauthorized", 401)); } /** * HTTP status code. */ public int code() { return code; } /** * HTTP status message. */ public String message() { return message; } /** * The full HTTP response. This may be null if the exception was serialized. */ public Response<?> response() { return response; } public ErrorMobiApiAnswer getApiAnswer() { return apiAnswer; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.servlet.jsp.tagext; import java.io.IOException; import java.io.Reader; import java.io.Writer; import javax.servlet.jsp.JspWriter; /** * An encapsulation of the evaluation of the body of an action so it is * available to a tag handler. BodyContent is a subclass of JspWriter. * <p> * Note that the content of BodyContent is the result of evaluation, so it will * not contain actions and the like, but the result of their invocation. * <p> * BodyContent has methods to convert its contents into a String, to read its * contents, and to clear the contents. * <p> * The buffer size of a BodyContent object is unbounded. A BodyContent object * cannot be in autoFlush mode. It is not possible to invoke flush on a * BodyContent object, as there is no backing stream. * <p> * Instances of BodyContent are created by invoking the pushBody and popBody * methods of the PageContext class. A BodyContent is enclosed within another * JspWriter (maybe another BodyContent object) following the structure of their * associated actions. * <p> * A BodyContent is made available to a BodyTag through a setBodyContent() call. * The tag handler can use the object until after the call to doEndTag(). */ public abstract class BodyContent extends JspWriter { /** * Protected constructor. Unbounded buffer, no autoflushing. * * @param e * the enclosing JspWriter */ protected BodyContent(JspWriter e) { super(UNBOUNDED_BUFFER, false); this.enclosingWriter = e; } /** * Redefined flush() so it is not legal. * <p> * It is not valid to flush a BodyContent because there is no backing stream * behind it. * * @throws IOException * always thrown */ @Override public void flush() throws IOException { throw new IOException("Illegal to flush within a custom tag"); } /** * Clear the body without throwing any exceptions. */ public void clearBody() { try { this.clear(); } catch (IOException ex) { // TODO -- clean this one up. throw new Error("internal error!;"); } } /** * Return the value of this BodyContent as a Reader. * * @return the value of this BodyContent as a Reader */ public abstract Reader getReader(); /** * Return the value of the BodyContent as a String. * * @return the value of the BodyContent as a String */ public abstract String getString(); /** * Write the contents of this BodyContent into a Writer. Subclasses may * optimize common invocation patterns. * * @param out * The writer into which to place the contents of this body * evaluation * @throws IOException * if an I/O error occurred while writing the contents of this * BodyContent to the given Writer */ public abstract void writeOut(Writer out) throws IOException; /** * Get the enclosing JspWriter. * * @return the enclosing JspWriter passed at construction time */ public JspWriter getEnclosingWriter() { return enclosingWriter; } // private fields private final JspWriter enclosingWriter; }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package MyLibs; /** * * @author Carlo Miguel Legaspi */ public class Property { private int block; private int lot; private Double cost; private Double sqkm; public int getBlock() { return block; } public void setBlock(int block) { this.block = block; } public int getLot() { return lot; } public void setLot(int lot) { this.lot = lot; } public Double getCost() { return cost; } public void setCost(Double cost) { this.cost = cost; } public Double getSqkm() { return sqkm; } public void setSqkm(Double sqkm) { this.sqkm = sqkm; } }
package net.neoremind.fountain.support; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import net.neoremind.fountain.rowbaselog.event.GtidEvent; /** * GtidEvent事件回调器 * * @author zhangxu */ public class GtIdEventCallback implements EventCallback<GtidEvent> { private static final Logger logger = LoggerFactory.getLogger(GtIdEventCallback.class); @Override public void handle(GtidEvent event, TrxContext trxContext) { if (logger.isDebugEnabled()) { logger.debug(String.format("Got GtIdEvent with gtid=%s", event.getGtId())); } trxContext.setSid(event.getSid()); trxContext.setCurrGtId(event.getGtId()); } }
package Interview1; import org.testng.annotations.Test; public class Test8 { /** * Method overloading concept */ public void method1(String name, String age){ System.out.println("method1 called :::: name is >>>"+name+"and age is >>>"+age); } public void method1(int age, String name){ System.out.println("method2 called :::: name is >>>"+name+"and age is >>>"+age); } @Test public void methodOverloading(){ method1("Venkatesh", "29"); method1(27, "Venkatesh"); } /** * Is Palindrome or not */ public boolean isPalindrome(String s){ char[] ch = s.toCharArray(); int left = 0; int right = s.length()-1; while(left<right){ if(ch[left]!=ch[right]){ return false; } left++; right--; } return true; } @Test public void palindromeOrNot(){ System.out.println(isPalindrome("madom")); } /** * reverse the string */ @Test public void reverseAString(){ String s = "venkatesh"; char[] ch = s.toCharArray(); String temp = ""; for(int i=ch.length-1;i>=0;i--){ char c1 = ch[i]; temp=temp+c1; } System.out.println(temp); } /** * method overloading * method overriding * madam - Palindrome or not * venkatesh - reverse/sort */ }
package org.ballerinalang.net.transport.connectionpool; import org.ballerinalang.net.transport.contract.Constants; import org.ballerinalang.net.transport.contract.HttpClientConnector; import org.ballerinalang.net.transport.contract.HttpWsConnectorFactory; import org.ballerinalang.net.transport.contract.config.SenderConfiguration; import org.ballerinalang.net.transport.contract.exceptions.ServerConnectorException; import org.ballerinalang.net.transport.contractimpl.DefaultHttpWsConnectorFactory; import org.ballerinalang.net.transport.message.HttpMessageDataStreamer; import org.ballerinalang.net.transport.util.DefaultHttpConnectorListener; import org.ballerinalang.net.transport.util.TestUtil; import org.ballerinalang.net.transport.util.server.HttpServer; import org.ballerinalang.net.transport.util.server.initializers.SendChannelIDServerInitializer; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.NoSuchElementException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; /** * Tests the timeout for waiting for idle connection in connection pool. */ public class ConnectionPoolWaitingTimeoutTestCase { private HttpServer httpServer; private HttpClientConnector httpClientConnector; private HttpWsConnectorFactory connectorFactory; private static final int MAX_ACTIVE_CONNECTIONS = 2; private static final int MAX_WAIT_TIME_FOR_CONNECTION_POOL = 1000; @BeforeClass public void setup() { httpServer = TestUtil.startHTTPServer(TestUtil.HTTP_SERVER_PORT, new SendChannelIDServerInitializer(5000)); connectorFactory = new DefaultHttpWsConnectorFactory(); SenderConfiguration senderConfiguration = new SenderConfiguration(); senderConfiguration.getPoolConfiguration().setMaxActivePerPool(MAX_ACTIVE_CONNECTIONS); senderConfiguration.getPoolConfiguration().setMaxWaitTime(MAX_WAIT_TIME_FOR_CONNECTION_POOL); httpClientConnector = connectorFactory.createHttpClientConnector(new HashMap<>(), senderConfiguration); } @Test public void testWaitingForConnectionTimeout() { try { int noOfRequests = 3; CountDownLatch[] countDownLatches = new CountDownLatch[noOfRequests]; for (int i = 0; i < noOfRequests; i++) { countDownLatches[i] = new CountDownLatch(1); } DefaultHttpConnectorListener[] responseListeners = new DefaultHttpConnectorListener[noOfRequests]; for (int i = 0; i < countDownLatches.length; i++) { responseListeners[i] = TestUtil.sendRequestAsync(countDownLatches[i], httpClientConnector); } // Wait for the responses for (CountDownLatch countDownLatch : countDownLatches) { countDownLatch.await(10, TimeUnit.SECONDS); } // Check the responses. Throwable throwable = null; HashSet<String> channelIds = new HashSet<>(); for (DefaultHttpConnectorListener responseListener : responseListeners) { if (responseListener.getHttpErrorMessage() != null) { if (throwable != null) { Assert.fail("Cannot have more than one error"); } throwable = responseListener.getHttpErrorMessage(); } else { String channelId = new BufferedReader(new InputStreamReader( new HttpMessageDataStreamer(responseListener.getHttpResponseMessage()).getInputStream())) .lines().collect(Collectors.joining("\n")); channelIds.add(channelId); } } Assert.assertTrue(channelIds.size() <= MAX_ACTIVE_CONNECTIONS); Assert.assertTrue(throwable instanceof NoSuchElementException); Assert.assertEquals(throwable.getMessage(), Constants.MAXIMUM_WAIT_TIME_EXCEED); } catch (Exception e) { TestUtil.handleException("IOException occurred while running testMaxActiveConnectionsPerPool", e); } } @AfterClass public void cleanUp() throws ServerConnectorException, InterruptedException { TestUtil.cleanUp(new ArrayList<>(), httpServer); connectorFactory.shutdown(); } }
package com.demo.streamx; import java.util.stream.Stream; public class Fibonacci { int x = 1; Stream<Integer> numbers() { return Stream.iterate(0, i -> { int result = x + i; x = i; return result; }); } // Stream.iterate() 以种子(第一个参数)开头,并将其传给方法(第二个参数)。 // 方法的结果将添加到流,并存储作为第一个参数用于下次调用 iterate(),依次类推。 // 我们可以利用 iterate() 生成一个斐波那契数列 public static void main(String[] args) { new Fibonacci().numbers() //.skip(20) // 过滤前 20 个 .limit(10) // 然后取 10 个 .forEach(System.out::println); } }
package com.geekq.guns.core.support; import com.geekq.guns.core.support.exception.ToolBoxException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.LinkedHashSet; import java.util.Locale; /** * 时间工具类 * @author xiaoleilu */ public class DateTimeKit { /** 毫秒 */ public final static long MS = 1; /** 每秒钟的毫秒数 */ public final static long SECOND_MS = MS * 1000; /** 每分钟的毫秒数 */ public final static long MINUTE_MS = SECOND_MS * 60; /** 每小时的毫秒数 */ public final static long HOUR_MS = MINUTE_MS * 60; /** 每天的毫秒数 */ public final static long DAY_MS = HOUR_MS * 24; /** 标准日期格式 */ public final static String NORM_DATE_PATTERN = "yyyy-MM-dd"; /** 标准时间格式 */ public final static String NORM_TIME_PATTERN = "HH:mm:ss"; /** 标准日期时间格式,精确到分 */ public final static String NORM_DATETIME_MINUTE_PATTERN = "yyyy-MM-dd HH:mm"; /** 标准日期时间格式,精确到秒 */ public final static String NORM_DATETIME_PATTERN = "yyyy-MM-dd HH:mm:ss"; /** 标准日期时间格式,精确到毫秒 */ public final static String NORM_DATETIME_MS_PATTERN = "yyyy-MM-dd HH:mm:ss.SSS"; /** HTTP头中日期时间格式 */ public final static String HTTP_DATETIME_PATTERN = "EEE, dd MMM yyyy HH:mm:ss z"; /** 标准日期(不含时间)格式化器 */ // private final static SimpleDateFormat NORM_DATE_FORMAT = new SimpleDateFormat(NORM_DATE_PATTERN); private static ThreadLocal<SimpleDateFormat> NORM_DATE_FORMAT = new ThreadLocal<SimpleDateFormat>(){ synchronized protected SimpleDateFormat initialValue() { return new SimpleDateFormat(NORM_DATE_PATTERN); }; }; /** 标准时间格式化器 */ // private final static SimpleDateFormat NORM_TIME_FORMAT = new SimpleDateFormat(NORM_TIME_PATTERN); private static ThreadLocal<SimpleDateFormat> NORM_TIME_FORMAT = new ThreadLocal<SimpleDateFormat>(){ synchronized protected SimpleDateFormat initialValue() { return new SimpleDateFormat(NORM_TIME_PATTERN); }; }; /** 标准日期时间格式化器 */ // private final static SimpleDateFormat NORM_DATETIME_FORMAT = new SimpleDateFormat(NORM_DATETIME_PATTERN); private static ThreadLocal<SimpleDateFormat> NORM_DATETIME_FORMAT = new ThreadLocal<SimpleDateFormat>(){ synchronized protected SimpleDateFormat initialValue() { return new SimpleDateFormat(NORM_DATETIME_PATTERN); }; }; /** HTTP日期时间格式化器 */ // private final static SimpleDateFormat HTTP_DATETIME_FORMAT = new SimpleDateFormat(HTTP_DATETIME_PATTERN, Locale.US); private static ThreadLocal<SimpleDateFormat> HTTP_DATETIME_FORMAT = new ThreadLocal<SimpleDateFormat>(){ synchronized protected SimpleDateFormat initialValue() { return new SimpleDateFormat(HTTP_DATETIME_PATTERN, Locale.US); }; }; /** * 当前时间,格式 yyyy-MM-dd HH:mm:ss * * @return 当前时间的标准形式字符串 */ public static String now() { return formatDateTime(new DateTime()); } /** * 当前时间long * * @param isNano 是否为高精度时间 * @return 时间 */ public static long current(boolean isNano) { return isNano ? System.nanoTime() : System.currentTimeMillis(); } /** * 当前日期,格式 yyyy-MM-dd * * @return 当前日期的标准形式字符串 */ public static String today() { return formatDate(new DateTime()); } /** * @return 当前月份 */ public static int thisMonth() { return month(date()); } /** * @return 今年 */ public static int thisYear() { return year(date()); } /** * @return 当前时间 */ public static DateTime date() { return new DateTime(); } /** * Long类型时间转为Date * * @param date Long类型Date(Unix时间戳) * @return 时间对象 */ public static DateTime date(long date) { return new DateTime(date); } /** * 转换为Calendar对象 * * @param date 日期对象 * @return Calendar对象 */ public static Calendar toCalendar(Date date) { final Calendar cal = Calendar.getInstance(); cal.setTime(date); return cal; } /** * 获得月份,从1月开始计数 * * @param date 日期 * @return 月份 */ public static int month(Date date) { return toCalendar(date).get(Calendar.MONTH) + 1; } /** * 获得年 * * @param date 日期 * @return 年 */ public static int year(Date date) { return toCalendar(date).get(Calendar.YEAR); } /** * 获得季节 * * @param date 日期 * @return 第几个季节 */ public static int season(Date date) { return toCalendar(date).get(Calendar.MONTH) / 3 + 1; } /** * 获得指定日期年份和季节<br> * 格式:[20131]表示2013年第一季度 * * @param date 日期 * @return Season ,类似于 20132 */ public static String yearAndSeason(Date date) { return yearAndSeason(toCalendar(date)); } /** * 获得指定日期区间内的年份和季节<br> * * @param startDate 其实日期(包含) * @param endDate 结束日期(包含) * @return Season列表 ,元素类似于 20132 */ public static LinkedHashSet<String> yearAndSeasons(Date startDate, Date endDate) { final LinkedHashSet<String> seasons = new LinkedHashSet<String>(); if (startDate == null || endDate == null) { return seasons; } final Calendar cal = Calendar.getInstance(); cal.setTime(startDate); while (true) { // 如果开始时间超出结束时间,让结束时间为开始时间,处理完后结束循环 if (startDate.after(endDate)) { startDate = endDate; } seasons.add(yearAndSeason(cal)); if (startDate.equals(endDate)) { break; } cal.add(Calendar.MONTH, 3); startDate = cal.getTime(); } return seasons; } // ------------------------------------ Format start ---------------------------------------------- /** * 根据特定格式格式化日期 * * @param date 被格式化的日期 * @param format 格式 * @return 格式化后的字符串 */ public static String format(Date date, String format) { return new SimpleDateFormat(format).format(date); } /** * 格式 yyyy-MM-dd HH:mm:ss * * @param date 被格式化的日期 * @return 格式化后的日期 */ public static String formatDateTime(Date date) { if(null == date){ return null; } return NORM_DATETIME_FORMAT.get().format(date); } /** * 格式 yyyy-MM-dd * * @param date 被格式化的日期 * @return 格式化后的字符串 */ public static String formatDate(Date date) { if(null == date){ return null; } return NORM_DATE_FORMAT.get().format(date); } /** * 格式化为Http的标准日期格式 * * @param date 被格式化的日期 * @return HTTP标准形式日期字符串 */ public static String formatHttpDate(Date date) { if(null == date){ return null; } return HTTP_DATETIME_FORMAT.get().format(date); } // ------------------------------------ Format end ---------------------------------------------- // ------------------------------------ Parse start ---------------------------------------------- /** * 构建DateTime对象 * * @param dateStr Date字符串 * @param simpleDateFormat 格式化器 * @return DateTime对象 */ public static DateTime parse(String dateStr, SimpleDateFormat simpleDateFormat) { try { return new DateTime(simpleDateFormat.parse(dateStr)); } catch (Exception e) { throw new ToolBoxException(StrKit.format("Parse [{}] with format [{}] error!", dateStr, simpleDateFormat.toPattern()), e); } } /** * 将特定格式的日期转换为Date对象 * * @param dateString 特定格式的日期 * @param format 格式,例如yyyy-MM-dd * @return 日期对象 */ public static DateTime parse(String dateString, String format) { return parse(dateString, new SimpleDateFormat(format)); } /** * 格式yyyy-MM-dd HH:mm:ss * * @param dateString 标准形式的时间字符串 * @return 日期对象 */ public static DateTime parseDateTime(String dateString) { return parse(dateString, NORM_DATETIME_FORMAT.get()); } /** * 格式yyyy-MM-dd * * @param dateString 标准形式的日期字符串 * @return 日期对象 */ public static DateTime parseDate(String dateString) { return parse(dateString, NORM_DATE_FORMAT.get()); } /** * 格式HH:mm:ss * * @param timeString 标准形式的日期字符串 * @return 日期对象 */ public static DateTime parseTime(String timeString) { return parse(timeString, NORM_TIME_FORMAT.get()); } /** * 格式:<br> * 1、yyyy-MM-dd HH:mm:ss<br> * 2、yyyy-MM-dd<br> * 3、HH:mm:ss<br> * 4、yyyy-MM-dd HH:mm 5、yyyy-MM-dd HH:mm:ss.SSS * * @param dateStr 日期字符串 * @return 日期 */ public static DateTime parse(String dateStr) { if (null == dateStr) { return null; } dateStr = dateStr.trim(); int length = dateStr.length(); try { if (length == NORM_DATETIME_PATTERN.length()) { return parseDateTime(dateStr); } else if (length == NORM_DATE_PATTERN.length()) { return parseDate(dateStr); } else if (length == NORM_TIME_PATTERN.length()) { return parseTime(dateStr); } else if (length == NORM_DATETIME_MINUTE_PATTERN.length()) { return parse(dateStr, NORM_DATETIME_MINUTE_PATTERN); } else if (length >= NORM_DATETIME_MS_PATTERN.length() - 2) { return parse(dateStr, NORM_DATETIME_MS_PATTERN); } } catch (Exception e) { throw new ToolBoxException(StrKit.format("Parse [{}] with format normal error!", dateStr)); } // 没有更多匹配的时间格式 throw new ToolBoxException(StrKit.format(" [{}] format is not fit for date pattern!", dateStr)); } // ------------------------------------ Parse end ---------------------------------------------- // ------------------------------------ Offset start ---------------------------------------------- /** * 获取某天的开始时间 * * @param date 日期 * @return 某天的开始时间 */ public static DateTime getBeginTimeOfDay(Date date) { Calendar calendar = Calendar.getInstance(); calendar.setTime(date); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); return new DateTime(calendar.getTime()); } /** * 获取某天的结束时间 * * @param date 日期 * @return 某天的结束时间 */ public static DateTime getEndTimeOfDay(Date date) { Calendar calendar = Calendar.getInstance(); calendar.setTime(date); calendar.set(Calendar.HOUR_OF_DAY, 23); calendar.set(Calendar.MINUTE, 59); calendar.set(Calendar.SECOND, 59); calendar.set(Calendar.MILLISECOND, 999); return new DateTime(calendar.getTime()); } /** * 昨天 * * @return 昨天 */ public static DateTime yesterday() { return offsiteDay(new DateTime(), -1); } /** * 上周 * * @return 上周 */ public static DateTime lastWeek() { return offsiteWeek(new DateTime(), -1); } /** * 上个月 * * @return 上个月 */ public static DateTime lastMouth() { return offsiteMonth(new DateTime(), -1); } /** * 偏移天 * * @param date 日期 * @param offsite 偏移天数,正数向未来偏移,负数向历史偏移 * @return 偏移后的日期 */ public static DateTime offsiteDay(Date date, int offsite) { return offsiteDate(date, Calendar.DAY_OF_YEAR, offsite); } /** * 偏移周 * * @param date 日期 * @param offsite 偏移周数,正数向未来偏移,负数向历史偏移 * @return 偏移后的日期 */ public static DateTime offsiteWeek(Date date, int offsite) { return offsiteDate(date, Calendar.WEEK_OF_YEAR, offsite); } /** * 偏移月 * * @param date 日期 * @param offsite 偏移月数,正数向未来偏移,负数向历史偏移 * @return 偏移后的日期 */ public static DateTime offsiteMonth(Date date, int offsite) { return offsiteDate(date, Calendar.MONTH, offsite); } /** * 获取指定日期偏移指定时间后的时间 * * @param date 基准日期 * @param calendarField 偏移的粒度大小(小时、天、月等)使用Calendar中的常数 * @param offsite 偏移量,正数为向后偏移,负数为向前偏移 * @return 偏移后的日期 */ public static DateTime offsiteDate(Date date, int calendarField, int offsite) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.add(calendarField, offsite); return new DateTime(cal.getTime()); } // ------------------------------------ Offset end ---------------------------------------------- /** * 判断两个日期相差的时长<br/> * 返回 minuend - subtrahend 的差 * * @param subtrahend 减数日期 * @param minuend 被减数日期 * @param diffField 相差的选项:相差的天、小时 * @return 日期差 */ public static long diff(Date subtrahend, Date minuend, long diffField) { long diff = minuend.getTime() - subtrahend.getTime(); return diff / diffField; } /** * 计时,常用于记录某段代码的执行时间,单位:纳秒 * * @param preTime 之前记录的时间 * @return 时间差,纳秒 */ public static long spendNt(long preTime) { return System.nanoTime() - preTime; } /** * 计时,常用于记录某段代码的执行时间,单位:毫秒 * * @param preTime 之前记录的时间 * @return 时间差,毫秒 */ public static long spendMs(long preTime) { return System.currentTimeMillis() - preTime; } /** * 格式化成yyMMddHHmm后转换为int型 * * @param date 日期 * @return int */ public static int toIntSecond(Date date) { return Integer.parseInt(format(date, "yyMMddHHmm")); } /** * 计算指定指定时间区间内的周数 * * @param start 开始时间 * @param end 结束时间 * @return 周数 */ public static int weekCount(Date start, Date end) { final Calendar startCalendar = Calendar.getInstance(); startCalendar.setTime(start); final Calendar endCalendar = Calendar.getInstance(); endCalendar.setTime(end); final int startWeekofYear = startCalendar.get(Calendar.WEEK_OF_YEAR); final int endWeekofYear = endCalendar.get(Calendar.WEEK_OF_YEAR); int count = endWeekofYear - startWeekofYear + 1; if (Calendar.SUNDAY != startCalendar.get(Calendar.DAY_OF_WEEK)) { count--; } return count; } /** * 计时器<br> * 计算某个过程话费的时间,精确到毫秒 * * @return Timer */ public static Timer timer() { return new Timer(); } /** * 生日转为年龄,计算法定年龄 * @param birthDay 生日,标准日期字符串 * @return 年龄 * @throws Exception */ public static int ageOfNow(String birthDay) { return ageOfNow(parse(birthDay)); } /** * 生日转为年龄,计算法定年龄 * @param birthDay 生日 * @return 年龄 * @throws Exception */ public static int ageOfNow(Date birthDay) { return age(birthDay,date()); } /** * 计算相对于dateToCompare的年龄,长用于计算指定生日在某年的年龄 * @param birthDay 生日 * @param dateToCompare 需要对比的日期 * @return 年龄 * @throws Exception */ public static int age(Date birthDay, Date dateToCompare) { Calendar cal = Calendar.getInstance(); cal.setTime(dateToCompare); if (cal.before(birthDay)) { throw new IllegalArgumentException(StrKit.format("Birthday is after date {}!", formatDate(dateToCompare))); } int year = cal.get(Calendar.YEAR); int month = cal.get(Calendar.MONTH); int dayOfMonth = cal.get(Calendar.DAY_OF_MONTH); cal.setTime(birthDay); int age = year - cal.get(Calendar.YEAR); int monthBirth = cal.get(Calendar.MONTH); if (month == monthBirth) { int dayOfMonthBirth = cal.get(Calendar.DAY_OF_MONTH); if (dayOfMonth < dayOfMonthBirth) { //如果生日在当月,但是未达到生日当天的日期,年龄减一 age--; } } else if (month < monthBirth){ //如果当前月份未达到生日的月份,年龄计算减一 age--; } return age; } /** * 计时器<br> * 计算某个过程话费的时间,精确到毫秒 * * @author Looly * */ public static class Timer { private long time; private boolean isNano; public Timer() { this(false); } public Timer(boolean isNano) { this.isNano = isNano; start(); } /** * @return 开始计时并返回当前时间 */ public long start() { time = current(isNano); return time; } /** * @return 重新计时并返回从开始到当前的持续时间 */ public long durationRestart() { long now = current(isNano); long d = now - time; time = now; return d; } /** * @return 从开始到当前的持续时间 */ public long duration() { return current(isNano) - time; } } // ------------------------------------------------------------------------ Private method start /** * 获得指定日期年份和季节<br> * 格式:[20131]表示2013年第一季度 * * @param cal 日期 */ private static String yearAndSeason(Calendar cal) { return new StringBuilder().append(cal.get(Calendar.YEAR)).append(cal.get(Calendar.MONTH) / 3 + 1).toString(); } // ------------------------------------------------------------------------ Private method end }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.expressions.translator; import com.facebook.presto.spi.relation.CallExpression; import com.facebook.presto.spi.relation.ConstantExpression; import com.facebook.presto.spi.relation.LambdaDefinitionExpression; import com.facebook.presto.spi.relation.SpecialFormExpression; import com.facebook.presto.spi.relation.VariableReferenceExpression; import static com.facebook.presto.expressions.translator.TranslatedExpression.untranslated; public class RowExpressionTranslator<T, C> { public TranslatedExpression<T> translateConstant(ConstantExpression literal, C context, RowExpressionTreeTranslator<T, C> rowExpressionTreeTranslator) { return untranslated(literal); } public TranslatedExpression<T> translateVariable(VariableReferenceExpression reference, C context, RowExpressionTreeTranslator<T, C> rowExpressionTreeTranslator) { return untranslated(reference); } public TranslatedExpression<T> translateLambda(LambdaDefinitionExpression reference, C context, RowExpressionTreeTranslator<T, C> rowExpressionTreeTranslator) { return untranslated(reference); } public TranslatedExpression<T> translateCall(CallExpression call, C context, RowExpressionTreeTranslator<T, C> rowExpressionTreeTranslator) { return untranslated(call); } public TranslatedExpression<T> translateSpecialForm(SpecialFormExpression specialForm, C context, RowExpressionTreeTranslator<T, C> rowExpressionTreeTranslator) { return untranslated(specialForm); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.referencing.operation.projection; import java.util.Random; import java.math.BigDecimal; import org.opengis.util.FactoryException; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.apache.sis.internal.referencing.Formulas; import org.apache.sis.internal.referencing.provider.LambertConformal1SP; import org.apache.sis.internal.referencing.provider.LambertConformal2SP; import org.apache.sis.internal.referencing.provider.LambertConformalWest; import org.apache.sis.internal.referencing.provider.LambertConformalBelgium; import org.apache.sis.internal.referencing.provider.LambertConformalMichigan; import org.apache.sis.referencing.operation.transform.CoordinateDomain; import org.apache.sis.parameter.Parameters; import org.apache.sis.internal.util.DoubleDouble; import org.apache.sis.test.DependsOnMethod; import org.apache.sis.test.DependsOn; import org.apache.sis.test.TestUtilities; import org.junit.Test; import static java.lang.StrictMath.*; import static java.lang.Double.*; import static org.apache.sis.test.Assert.*; // Branch-specific imports import static org.junit.Assume.assumeTrue; import static org.apache.sis.test.Assert.PENDING_NEXT_GEOAPI_RELEASE; /** * Tests the {@link LambertConicConformal} class. We test using various values of the latitude of origin. * We do not test with various values of standard parallels, because it is just an other way to set * the value of the <var>n</var> field in {@code LambertConicConformal}. As long as we make this value varying, * the latitude of origin is the simplest approach. * * @author Martin Desruisseaux (Geomatys) * @author Rémi Maréchal (Geomatys) * @since 0.6 * @version 0.7 * @module */ @DependsOn(ConformalProjectionTest.class) public final strictfp class LambertConicConformalTest extends MapProjectionTestCase { /** * Verifies the value of the constant used in <cite>"Lambert Conic Conformal (2SP Belgium)"</cite> projection. * * @see #testLambertConicConformalBelgium() */ @Test public void verifyBelgeConstant() { final DoubleDouble BELGE_A = (DoubleDouble) LambertConicConformal.belgeA(); BigDecimal a = new BigDecimal(BELGE_A.value); a = a.add (new BigDecimal(BELGE_A.error)); a = a.multiply(new BigDecimal("57.29577951308232087679815481410517")); // Conversion from radians to degrees. a = a.multiply(new BigDecimal(60 * 60)); // Conversion from degrees to seconds. a = a.add (new BigDecimal("29.2985")); // The standard value. assertTrue(abs(a.doubleValue()) < 1E-31); } /** * Creates a new instance of {@link LambertConicConformal}. See the class javadoc for an explanation * about why we ask only for the latitude of origin and not the standard parallels. * * @param ellipse {@code false} for a sphere, or {@code true} for WGS84 ellipsoid. * @param latitudeOfOrigin The latitude of origin, in decimal degrees. */ private void createNormalizedProjection(final boolean ellipse, final double latitudeOfOrigin) { final LambertConformal1SP method = new LambertConformal1SP(); final Parameters parameters = parameters(method, ellipse); parameters.getOrCreate(LambertConformal1SP.LATITUDE_OF_ORIGIN).setValue(latitudeOfOrigin); transform = new LambertConicConformal(method, parameters); if (!ellipse) { transform = new LambertConicConformal.Spherical((LambertConicConformal) transform); } tolerance = NORMALIZED_TOLERANCE; validate(); } /** * Tests the WKT formatting of {@link NormalizedProjection}. For the Lambert Conformal projection, we expect * the internal {@code n} parameter in addition to the eccentricity. * * <div class="section">Note on accuracy</div> * The value of the eccentricity parameter should be fully accurate because it is calculated using only the * {@link Math#sqrt(double)} function (ignoring basic algebraic operations) which, according javadoc, must * give the result closest to the true mathematical result. But the functions involved in the calculation of * <var>n</var> do not have such strong guarantees. So we use a regular expression in this test for ignoring * the 2 last digits of <var>n</var>. */ @Test public void testNormalizedWKT() { createNormalizedProjection(true, 40); assertWktEqualsRegex("(?m)\\Q" + "PARAM_MT[“Lambert conic conformal (radians domain)”,\n" + " PARAMETER[“eccentricity”, 0.0818191908426215],\n" + " PARAMETER[“n”, 0.64278760968653\\E\\d*\\Q]]\\E"); // 0.6427876096865393 in the original test. } /** * Tests the projection at some special latitudes (0, ±π/2, NaN and others). * * @throws ProjectionException Should never happen. */ @Test public void testSpecialLatitudes() throws ProjectionException { if (transform == null) { // May have been initialized by 'testSphericalCase'. createNormalizedProjection(true, 40); // Elliptical case } final double INF = POSITIVE_INFINITY; assertEquals ("Not a number", NaN, transform(NaN), NORMALIZED_TOLERANCE); assertEquals ("Out of range", NaN, transform(+2), NORMALIZED_TOLERANCE); assertEquals ("Out of range", NaN, transform(-2), NORMALIZED_TOLERANCE); assertEquals ("Forward 0°N", 1, transform(0), NORMALIZED_TOLERANCE); assertEquals ("Forward 90°S", 0, transform(-PI/2), NORMALIZED_TOLERANCE); assertEquals ("Forward 90°N", INF, transform(+PI/2), NORMALIZED_TOLERANCE); assertEquals ("Forward (90+ε)°S", 0, transform(-nextUp( PI/2)), NORMALIZED_TOLERANCE); assertEquals ("Forward (90+ε)°N", INF, transform(+nextUp(+PI/2)), NORMALIZED_TOLERANCE); assertEquals ("Forward (90-ε)°S", 0, transform(+nextUp(-PI/2)), 1E-10); assertEquals ("Not a number", NaN, inverseTransform(NaN), NORMALIZED_TOLERANCE); assertEquals ("Inverse 0", -PI/2, inverseTransform( 0), NORMALIZED_TOLERANCE); assertEquals ("Inverse +1", 0, inverseTransform(+1), NORMALIZED_TOLERANCE); assertEquals ("Inverse -1", 0, inverseTransform(-1), NORMALIZED_TOLERANCE); assertEquals ("Inverse +∞", +PI/2, inverseTransform(INF), NORMALIZED_TOLERANCE); assertEquals ("Inverse -∞", +PI/2, inverseTransform(-INF), NORMALIZED_TOLERANCE); // Like the north case, but with sign inversed. createNormalizedProjection(((LambertConicConformal) transform).eccentricity != 0, -40); validate(); assertEquals ("Not a number", NaN, transform(NaN), NORMALIZED_TOLERANCE); assertEquals ("Out of range", NaN, transform(+2), NORMALIZED_TOLERANCE); assertEquals ("Out of range", NaN, transform(-2), NORMALIZED_TOLERANCE); assertEquals ("Forward 0°N", 1, transform(0), NORMALIZED_TOLERANCE); assertEquals ("Forward 90°N", INF, transform(+PI/2), NORMALIZED_TOLERANCE); assertEquals ("Forward 90°S", 0, transform(-PI/2), NORMALIZED_TOLERANCE); assertEquals ("Forward (90+ε)°N", INF, transform(+nextUp(+PI/2)), NORMALIZED_TOLERANCE); assertEquals ("Forward (90+ε)°S", 0, transform(-nextUp( PI/2)), NORMALIZED_TOLERANCE); assertEquals ("Forward (90-ε)°S", 0, transform( nextUp(-PI/2)), 1E-10); assertEquals ("Not a number", NaN, inverseTransform(NaN), NORMALIZED_TOLERANCE); assertEquals ("Inverse 0", -PI/2, inverseTransform( 0), NORMALIZED_TOLERANCE); assertEquals ("Inverse +∞", +PI/2, inverseTransform(INF), NORMALIZED_TOLERANCE); assertEquals ("Inverse -∞", +PI/2, inverseTransform(-INF), NORMALIZED_TOLERANCE); } /** * Tests the derivatives at a few points. This method compares the derivatives computed by * the projection with an estimation of derivatives computed by the finite differences method. * * @throws TransformException Should never happen. */ @Test @DependsOnMethod("testSpecialLatitudes") public void testDerivative() throws TransformException { if (transform == null) { // May have been initialized by 'testSphericalCase'. createNormalizedProjection(true, 40); // Elliptical case } final double delta = toRadians(100.0 / 60) / 1852; // Approximatively 100 metres. derivativeDeltas = new double[] {delta, delta}; tolerance = 1E-9; verifyDerivative(toRadians( 0), toRadians( 0)); verifyDerivative(toRadians(15), toRadians(30)); verifyDerivative(toRadians(10), toRadians(60)); } /** * Tests the <cite>"Lambert Conic Conformal (1SP)"</cite> case (EPSG:9801). * This test is defined in GeoAPI conformance test suite. * * @throws FactoryException if an error occurred while creating the map projection. * @throws TransformException if an error occurred while projecting a coordinate. * * @see org.opengis.test.referencing.ParameterizedTransformTest#testLambertConicConformal1SP() */ @Test @DependsOnMethod({"testSpecialLatitudes", "testDerivative"}) public void testLambertConicConformal1SP() throws FactoryException, TransformException { createGeoApiTest(new LambertConformal1SP()).testLambertConicConformal1SP(); } /** * Tests the <cite>"Lambert Conic Conformal (2SP)"</cite> case (EPSG:9802). * This test is defined in GeoAPI conformance test suite. * * @throws FactoryException if an error occurred while creating the map projection. * @throws TransformException if an error occurred while projecting a coordinate. * * @see org.opengis.test.referencing.ParameterizedTransformTest#testLambertConicConformal1SP() */ @Test @DependsOnMethod("testLambertConicConformal1SP") public void testLambertConicConformal2SP() throws FactoryException, TransformException { createGeoApiTest(new LambertConformal2SP()).testLambertConicConformal2SP(); } /** * Tests the <cite>"Lambert Conic Conformal (2SP Belgium)"</cite> case (EPSG:9803). * This test is defined in GeoAPI conformance test suite. * * @throws FactoryException if an error occurred while creating the map projection. * @throws TransformException if an error occurred while projecting a coordinate. * * @see org.opengis.test.referencing.ParameterizedTransformTest#testLambertConicConformal1SP() */ @Test @DependsOnMethod({"testLambertConicConformal2SP", "verifyBelgeConstant"}) public void testLambertConicConformalBelgium() throws FactoryException, TransformException { createGeoApiTest(new LambertConformalBelgium()).testLambertConicConformalBelgium(); } /** * Tests the <cite>"Lambert Conic Conformal (2SP Michigan)"</cite> case (EPSG:1051). * This test is defined in GeoAPI conformance test suite. * * @throws FactoryException if an error occurred while creating the map projection. * @throws TransformException if an error occurred while projecting a coordinate. * * @see org.opengis.test.referencing.ParameterizedTransformTest#testLambertConicConformalMichigan() */ @Test @DependsOnMethod("testLambertConicConformal2SP") public void testLambertConicConformalMichigan() throws FactoryException, TransformException { assumeTrue(PENDING_NEXT_GEOAPI_RELEASE); // Test not available in GeoAPI 3.0 } /** * Tests the <cite>"Lambert Conic Conformal (1SP West Orientated)"</cite> case (EPSG:9826)). * * @throws FactoryException if an error occurred while creating the map projection. * @throws TransformException if an error occurred while projecting a coordinate. */ @Test @DependsOnMethod("testLambertConicConformal1SP") public void testLambertConicConformalWestOrientated() throws FactoryException, TransformException { createCompleteProjection(new LambertConformal1SP(), false, 0.5, // Central meridian 40, // Latitude of origin 0, // Standard parallel (none) 0.997, // Scale factor 200, // False easting 100); // False northing final MathTransform reference = transform; createCompleteProjection(new LambertConformalWest(), false, 0.5, // Central meridian 40, // Latitude of origin 0, // Standard parallel (none) 0.997, // Scale factor 200, // False easting 100); // False northing final Random random = TestUtilities.createRandomNumberGenerator(); final double[] sources = new double[20]; for (int i=0; i<sources.length;) { sources[i++] = 20 * random.nextDouble(); // Longitude sources[i++] = 10 * random.nextDouble() + 35; // Latitude } final double[] expected = new double[sources.length]; reference.transform(sources, 0, expected, 0, sources.length/2); /* * At this point, we have the source coordinates and the expected projected coordinates calculated * by the "Lambert Conic Conformal (1SP)" method. Now convert those projected coordinates into the * coordinates that we expect from the "Lambert Conic Conformal (1SP West Orientated)". If we had * no false easting, we would just revert the sign of 'x' values. But because of the false easting, * we expect an additional offset of two time that easting. This is because (quoting the EPSG guide): * * the term FE retains its definition, i.e. in the Lambert Conic Conformal (West Orientated) * method it increases the Westing value at the natural origin. * In this method it is effectively false westing (FW). * * So the conversion for this test case should be: W = 400 - E * * However our map projection "kernel" implementation does not reverse the sign of 'x' values, * because this reversal is the job of a separated method (CoordinateSystems.swapAndScaleAxes) * which does is work by examining the axis directions. So we the values that we expect are: * * expected = -W = E - 400 */ for (int i=0; i<sources.length; i += 2) { expected[i] -= 400; } tolerance = Formulas.LINEAR_TOLERANCE; verifyTransform(sources, expected); } /** * Performs the same tests than {@link #testSpecialLatitudes()} and {@link #testDerivative()}, * but using spherical formulas. * * @throws FactoryException if an error occurred while creating the map projection. * @throws TransformException if an error occurred while projecting a coordinate. */ @Test @DependsOnMethod({"testSpecialLatitudes", "testDerivative"}) public void testSphericalCase() throws FactoryException, TransformException { createNormalizedProjection(false, 40); // Spherical case testSpecialLatitudes(); testDerivative(); // Make sure that the above methods did not overwrote the 'transform' field. assertEquals("transform.class", LambertConicConformal.Spherical.class, transform.getClass()); } /** * Verifies the consistency of elliptical formulas with the spherical formulas. * This test compares the results of elliptical formulas with the spherical ones * for some random points. * * @throws FactoryException if an error occurred while creating the map projection. * @throws TransformException if an error occurred while projecting a coordinate. */ @Test @DependsOnMethod("testSphericalCase") public void compareEllipticalWithSpherical() throws FactoryException, TransformException { createCompleteProjection(new LambertConformal1SP(), false, 0.5, // Central meridian 40, // Latitude of origin 0, // Standard parallel (none) 0.997, // Scale factor 200, // False easting 100); // False northing tolerance = Formulas.LINEAR_TOLERANCE; compareEllipticalWithSpherical(CoordinateDomain.GEOGRAPHIC_SAFE, 0); } /** * Verifies that deserialized projections work as expected. This implies that deserialization * recomputed the internal transient fields, especially the series expansion coefficients. * * @throws FactoryException if an error occurred while creating the map projection. * @throws TransformException if an error occurred while projecting a coordinate. */ @Test @DependsOnMethod("testLambertConicConformal1SP") public void testSerialization() throws FactoryException, TransformException { createNormalizedProjection(true, 40); final double[] source = CoordinateDomain.GEOGRAPHIC_RADIANS_NORTH.generateRandomInput(TestUtilities.createRandomNumberGenerator(), 2, 10); final double[] target = new double[source.length]; transform.transform(source, 0, target, 0, 10); transform = assertSerializedEquals(transform); tolerance = Formulas.LINEAR_TOLERANCE; verifyTransform(source, target); } }
package com.open.capacity.answer.entity; import lombok.Data; import java.sql.Timestamp; @Data public class QuestionsEntity { private Integer id; private Integer questionType; private String videoSerial; private Integer selectionId; private Timestamp createTime; private Timestamp updateTime; }
package com.baeldung.searching; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; public class WordIndexerUnitTest { String theString; WordIndexer wordIndexer; @BeforeEach public void setUp() throws Exception { wordIndexer = new WordIndexer(); theString = "To be, or not to be: that is the question: " + "Whether 'tis nobler in the mind to suffer " + "The slings and arrows of outrageous fortune, " + "Or to take arms against a sea of troubles, " + "And by opposing end them? To die: to sleep; " + "No more; and by a sleep to say we end " + "The heart-ache and the thousand natural shocks " + "That flesh is heir to, 'tis a consummation " + "Devoutly to be wish'd. To die, to sleep; " + "To sleep: perchance to dream: ay, there's the rub: " + "For in that sleep of death what dreams may come,"; } @Test public void givenWord_whenSearching_thenFindAllIndexedLocations() { List<Integer> expectedResult = Arrays.asList(7, 122, 130, 221, 438); List<Integer> actualResult = wordIndexer.findWord(theString, "or"); assertEquals(expectedResult, actualResult); } @Test public void givenWordWithNoRepeatCharacters_whenImprovedSearching_thenFindAllIndexedLocations() { List<Integer> expectedResult = Arrays.asList(7, 122, 130, 221, 438); List<Integer> actualResult = wordIndexer.findWordUpgrade(theString, "or"); assertEquals(expectedResult, actualResult); } @Test public void givenWord_whenSearching_thenFindAtEndOfString() { List<Integer> expectedResult = Arrays.asList(480); List<Integer> actualResult = wordIndexer.findWordUpgrade(theString, "come,"); assertEquals(expectedResult, actualResult); } }
package com.bancoexterior.app.util; import java.io.IOException; import java.util.List; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletResponse; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.CellStyle; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.xssf.usermodel.XSSFFont; import org.apache.poi.xssf.usermodel.XSSFSheet; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import com.bancoexterior.app.convenio.model.Movimiento; import lombok.extern.slf4j.Slf4j; @Slf4j public class ConsultaExcelExporter { private XSSFWorkbook workbook; private XSSFSheet sheet; private List<Movimiento> listaMovimientos; public ConsultaExcelExporter(List<Movimiento> listaMovimientos) { log.info("me llamo"); log.info("listaMovimientos: "+listaMovimientos); this.listaMovimientos = listaMovimientos; workbook = new XSSFWorkbook(); } private void writeHeaderLine() { sheet = workbook.createSheet("Users"); Row row = sheet.createRow(0); CellStyle style = workbook.createCellStyle(); XSSFFont font = workbook.createFont(); font.setBold(true); font.setFontHeight(16); style.setFont(font); createCell(row, 0, "Cod. Operacion", style); createCell(row, 1, "Fecha Operacion", style); createCell(row, 2, "Codigo Moneda", style); createCell(row, 3, "Codigo Ibs", style); createCell(row, 4, "Nro IdCliente", style); createCell(row, 5, "Cuenta en divisas", style); createCell(row, 6, "Cuenta en Bolivares", style); createCell(row, 7, "Monto Divisa", style); createCell(row, 8, "Monto Bs", style); createCell(row, 9, "Tasa Cliente", style); createCell(row, 10, "Tasa Operacion", style); createCell(row, 11, "Monto Bs Operacion", style); createCell(row, 12, "Referencia Debito", style); createCell(row, 13, "Referencia Credito", style); createCell(row, 14, "Tipo Transaccion", style); createCell(row, 15, "Estatus", style); } private void createCell(Row row, int columnCount, Object value, CellStyle style) { sheet.autoSizeColumn(columnCount); Cell cell = row.createCell(columnCount); if (value instanceof Integer) { cell.setCellValue((Integer) value); } else if (value instanceof Boolean) { cell.setCellValue((Boolean) value); }else { cell.setCellValue((String) value); } cell.setCellStyle(style); } private void writeDataLines() { int rowCount = 1; CellStyle style = workbook.createCellStyle(); XSSFFont font = workbook.createFont(); font.setFontHeight(14); style.setFont(font); for (Movimiento movimiento : listaMovimientos) { Row row = sheet.createRow(rowCount++); int columnCount = 0; createCell(row, columnCount++, movimiento.getCodOperacion(), style); createCell(row, columnCount++, movimiento.getFechaOperacion(), style); createCell(row, columnCount++, movimiento.getCodMoneda(), style); createCell(row, columnCount++, movimiento.getCodigoIbs(), style); createCell(row, columnCount++, movimiento.getNroIdCliente(), style); createCell(row, columnCount++, movimiento.getCuentaDivisa(), style); createCell(row, columnCount++, movimiento.getCuentaNacional(), style); createCell(row, columnCount++, movimiento.getMontoDivisa().toString(), style); createCell(row, columnCount++, movimiento.getMontoBsCliente().toString(), style); createCell(row, columnCount++, movimiento.getTasaCliente().toString(), style); createCell(row, columnCount++, movimiento.getTasaOperacion().toString(), style); createCell(row, columnCount++, movimiento.getMontoBsOperacion().toString(), style); createCell(row, columnCount++, movimiento.getReferenciaDebito(), style); createCell(row, columnCount++, movimiento.getReferenciaCredito(), style); log.info("estatus: "+movimiento.getTipoTransaccion()); if(movimiento.getTipoTransaccion().equals("C")) { createCell(row, columnCount++, "Compra", style); }else { createCell(row, columnCount++, "Venta", style); } log.info("estatus: "+movimiento.getEstatus()); if(movimiento.getEstatus() == 0) { createCell(row, columnCount++, "Por Aprobar", style); }else { if(movimiento.getEstatus() == 1) { createCell(row, columnCount++, "Aprobada Automática", style); }else { if(movimiento.getEstatus() == 2) { createCell(row, columnCount++, "Aprobada Funcional", style); }else { if(movimiento.getEstatus() == 3) { createCell(row, columnCount++, "Rechazada Automática", style); }else { createCell(row, columnCount++, "Rechazada Funcional", style); } } } } } } public void export(HttpServletResponse response) throws IOException { writeHeaderLine(); writeDataLines(); ServletOutputStream outputStream = response.getOutputStream(); workbook.write(outputStream); workbook.close(); outputStream.close(); } }
package com.jeespring.common.swagger; import com.google.common.base.Predicates; import com.jeespring.common.web.AbstractBaseController; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import springfox.documentation.builders.ApiInfoBuilder; import springfox.documentation.builders.PathSelectors; import springfox.documentation.builders.RequestHandlerSelectors; import springfox.documentation.service.ApiInfo; import springfox.documentation.spi.DocumentationType; import springfox.documentation.spring.web.plugins.Docket; import springfox.documentation.swagger2.annotations.EnableSwagger2; @Configuration @EnableSwagger2 public class Swagger { @Bean("JeeSpring云接口") public Docket createJeeSpringRestApi() { return new Docket(DocumentationType.SWAGGER_2) .groupName("JeeSpring云接口") .apiInfo(apiInfo()) .select() .apis(RequestHandlerSelectors.withMethodAnnotation(ApiOperation.class)) //这里采用包含注解的方式来确定要显示的接口 //.apis(RequestHandlerSelectors.basePackage("com.jeespring.modules")) //这里采用包扫描的方式来确定要显示的接口 //.apis(RequestHandlerSelectors.withClassAnnotation(Api.class)) //.paths(PathSelectors.regex("/rest/.*")) //.paths(PathSelectors.any()) .build(); } private ApiInfo apiInfo() { return new ApiInfoBuilder() .title("Swagger2构建RESTful APIs") .description("更多JeeSpring相关文章") .termsOfServiceUrl("http://www.jeespring.com/") .contact("contact") .version("1.0") .build(); } /*注解 @ApiOperation(value="创建用户", notes="根据User对象创建用户") @ApiImplicitParam(name = "user", value = "用户详细实体user", required = true, dataType = "User") @ApiOperation("生成代码") @ApiImplicitParams({ @ApiImplicitParam(name = "moduleName", value = "模块名称", required = true, dataType = "String"), @ApiImplicitParam(name = "bizChName", value = "业务名称", required = true, dataType = "String"), @ApiImplicitParam(name = "bizEnName", value = "业务英文名称", required = true, dataType = "String"), @ApiImplicitParam(name = "path", value = "项目生成类路径", required = true, dataType = "String") }) */ }
package com.jbm.framework.usage.form; import cn.hutool.core.bean.BeanUtil; import cn.hutool.core.util.ObjectUtil; import com.jbm.util.ObjectUtils; import io.swagger.annotations.ApiModel; import lombok.Data; import lombok.NoArgsConstructor; import java.util.List; /** * @program: JBM6 * @author: wesley.zhang * @create: 2020-02-19 21:28 **/ @Data @NoArgsConstructor @ApiModel(value = "实体请求表单") public class EntityRequsetForm<Entity> extends BaseRequestForm { private static final long serialVersionUID = 1L; private Entity entity; public Entity tryGet(Class<Entity> clazz) { if (ObjectUtil.isNotEmpty(entity)) { return entity; } return BeanUtil.getProperty(this, clazz.getSimpleName()); } public List<Entity> tryGetList(Class<Entity> clazz) { return BeanUtil.getProperty(this, clazz.getSimpleName() + "s"); } }
package cn.ucai.superkache.activity; import android.app.ProgressDialog; import android.content.Intent; import android.os.Bundle; import android.text.TextUtils; import android.view.View; import android.widget.EditText; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import com.easemob.EMError; import com.easemob.chat.EMGroup; import com.easemob.chat.EMGroupManager; import com.easemob.exceptions.EaseMobException; public class PublicGroupsSeachActivity extends BaseActivity{ private RelativeLayout containerLayout; private EditText idET; private TextView nameText; public static EMGroup searchedGroup; @Override protected void onCreate(Bundle arg0) { super.onCreate(arg0); setContentView(cn.ucai.superkache.R.layout.activity_public_groups_search); containerLayout = (RelativeLayout) findViewById(cn.ucai.superkache.R.id.rl_searched_group); idET = (EditText) findViewById(cn.ucai.superkache.R.id.et_search_id); nameText = (TextView) findViewById(cn.ucai.superkache.R.id.name); searchedGroup = null; } /** * 搜索 * @param v */ public void searchGroup(View v){ if(TextUtils.isEmpty(idET.getText())){ return; } final ProgressDialog pd = new ProgressDialog(this); pd.setMessage(getResources().getString(cn.ucai.superkache.R.string.searching)); pd.setCancelable(false); pd.show(); new Thread(new Runnable() { public void run() { try { searchedGroup = EMGroupManager.getInstance().getGroupFromServer(idET.getText().toString()); runOnUiThread(new Runnable() { public void run() { pd.dismiss(); containerLayout.setVisibility(View.VISIBLE); nameText.setText(searchedGroup.getGroupName()); } }); } catch (final EaseMobException e) { e.printStackTrace(); runOnUiThread(new Runnable() { public void run() { pd.dismiss(); searchedGroup = null; containerLayout.setVisibility(View.GONE); if(e.getErrorCode() == EMError.GROUP_NOT_EXIST){ Toast.makeText(getApplicationContext(), getResources().getString(cn.ucai.superkache.R.string.group_not_existed), 0).show(); }else{ Toast.makeText(getApplicationContext(), getResources().getString(cn.ucai.superkache.R.string.group_search_failed) + " : " + getString(cn.ucai.superkache.R.string.connect_failuer_toast), 0).show(); } } }); } } }).start(); } /** * 点击搜索到的群组进入群组信息页面 * @param view */ public void enterToDetails(View view){ startActivity(new Intent(this, GroupSimpleDetailActivity.class)); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2018.03.22 at 01:48:05 PM EET // package com.dnb.services.industry; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlEnumValue; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for SeverityType. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="SeverityType"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;maxLength value="16"/> * &lt;enumeration value="Error"/> * &lt;enumeration value="Fatal"/> * &lt;enumeration value="Information"/> * &lt;enumeration value="Warning"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "SeverityType") @XmlEnum public enum SeverityType { @XmlEnumValue("Error") ERROR("Error"), @XmlEnumValue("Fatal") FATAL("Fatal"), @XmlEnumValue("Information") INFORMATION("Information"), @XmlEnumValue("Warning") WARNING("Warning"); private final String value; SeverityType(String v) { value = v; } public String value() { return value; } public static SeverityType fromValue(String v) { for (SeverityType c: SeverityType.values()) { if (c.value.equals(v)) { return c; } } throw new IllegalArgumentException(v); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.visualizers; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.jmeter.samplers.SampleResult; /** * Provides storage of samples in addition to calculations */ public class CachingStatCalculator extends SamplingStatCalculator { private final List<Sample> storedValues = Collections.synchronizedList(new ArrayList<Sample>()); public CachingStatCalculator(String string) { super(string); } public List<Sample> getSamples() { return storedValues; } public Sample getSample(int index) { synchronized( storedValues ){ if (index < storedValues.size()) { return storedValues.get(index); } } return null; } @Override public void clear() { super.clear(); storedValues.clear(); } /** * Records a sample. * */ @Override public Sample addSample(SampleResult res) { final Sample sample = super.addSample(res); storedValues.add(sample); return sample; } }
package geeksForgeeks; import java.util.HashSet; import java.util.Set; public class RemoveLoopInLinkedList { public static void removeLoop(Node head){ // code here // remove the loop without losing any nodes Set<Integer> set = new HashSet<>(); Node runNode = head; while(runNode.next != null) { if(set.contains(runNode.next.data)) { runNode.next = null; break; } set.add(runNode.data); runNode = runNode.next; } } public static void main(String... args) { RemoveLoopInLinkedList obj = new RemoveLoopInLinkedList(); Node head = obj.prepareData(); //36 24 25 30 71 3 61 22 10 67 73 22 43 41 78 24 42 2 70 71 66 37 48 38 75 51 27 45 58 15 49 46 39 73 27 61 27 7 35 37 73 59 10 67 51 8 11 12 41 32 34 58 68 33 47 15 36 73 11 13 40 11 10 30 35 36 42 61 74 28 49 19 7 11 5 57 18 47 21 10 78 6 20 18 39 66 64 74 59 74 38 50 4 48 79 38 35 72 50 61 20 19 79 26 29 35 34 78 2 54 39 31 60 58 80 50 44 63 75 54 8 32 23 43 31 53 32 18 76 2 78 15 52 28 72 32 62 58 61 15 31 19 46 42 77 77 43 72 60 37 45 19 21 19 62 51 23 13 20 18 46 17 65 17 44 8 80 58 65 6 removeLoop(head); } public Node prepareData(){ Node head = new Node(1); head.next = new Node(4); head.next.next = new Node(3); head.next.next.next = head.next; return head; } public Node prepareData(int[] arr){ Node head = new Node(1); head.next = new Node(4); head.next.next = new Node(3); head.next.next.next = head.next; return head; } class Node { int data; Node next; public Node(int d) { this.data = d; } } }
package com.weique.overhaul.v2.mvp.contract; import android.app.Activity; import com.jess.arms.mvp.IModel; import com.jess.arms.mvp.IView; import com.weique.overhaul.v2.mvp.model.entity.BaseBean; import com.weique.overhaul.v2.mvp.model.entity.ContradictionAddBean; import com.weique.overhaul.v2.mvp.model.entity.ContradictionRecordBean; import com.weique.overhaul.v2.mvp.model.entity.UploadFileRsponseBean; import java.util.List; import io.reactivex.Observable; import okhttp3.MultipartBody; /** * ================================================ * Description: * <p> * Created by MVPArmsTemplate on 01/03/2020 10:49 * <a href="mailto:jess.yan.effort@gmail.com">Contact me</a> * <a href="https://github.com/JessYanCoding">Follow me</a> * <a href="https://github.com/JessYanCoding/MVPArms">Star me</a> * <a href="https://github.com/JessYanCoding/MVPArms/wiki">See me</a> * <a href="https://github.com/JessYanCoding/MVPArmsTemplate">模版请保持更新</a> * ================================================ */ public interface ContradictionAddContract { interface View extends IView { void setContradictionData(String s); void getContradictionRecord(ContradictionRecordBean contradictionRecordBean); void isContradictionRecordDelete(String contradictionRecordBean); void isInvalidContradictionRecord(String contradictionRecordBean); void stagingReset(String contradictionRecordBean); Activity getContext(); void goToPhotoAlbum(int max); void updatePicture(List<UploadFileRsponseBean> uploadFileRsponseBeans); } interface Model extends IModel { Observable<BaseBean<String>> setContradiction(ContradictionAddBean contradictionAddBean); Observable<BaseBean<ContradictionRecordBean>> getContradictionRecord(String id); Observable<BaseBean<String>> DeleteContradictionRecord(String resetId); Observable<BaseBean<String>> InvalidContradictionRecord(String resetId); Observable<BaseBean<String>> setResetContradiction(ContradictionAddBean stagingBean); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.opentracing.decorators; public class CometdsSpanDecorator extends CometdSpanDecorator { @Override public String getComponent() { return "cometds"; } }
package com.java110.things.adapt.attendance; import com.java110.things.entity.attendance.ClockInDto; import com.java110.things.entity.attendance.ClockInResultDto; import com.java110.things.entity.machine.MachineCmdDto; import com.java110.things.entity.machine.MachineDto; import java.util.List; /** * @ClassName ICallAttendanceService * @Description TODO * @Author wuxw * @Date 2020/5/27 15:40 * @Version 1.0 * add by wuxw 2020/5/27 **/ public interface ICallAttendanceService { /** * 查询设备信息 * @param machineDto 查询条件一般传 machineCode 设备编码 * @return 返回设备信息,查询不到时返回null */ MachineDto getMachine(MachineDto machineDto); /** * 门禁 上报,当门禁上线时 建议将门禁自动上报,系统管理页面和云端可以自动注册设备 * @param machineDto 设备对象 */ void uploadMachine(MachineDto machineDto); /** * 查询设备指令 * @param machineCmdDto 设备信息 * @return 返回设备指令 */ List<MachineCmdDto> getMachineCmds(MachineCmdDto machineCmdDto) throws Exception; public void saveMachineCmd(MachineCmdDto machineCmdDto) throws Exception; /** * 插入设备指令 * @param machineCmdDto * @throws Exception */ void insertMachineCmd(MachineCmdDto machineCmdDto) throws Exception; /** * 考勤接口 * @param clockInDto * @return */ ClockInResultDto clockIn(ClockInDto clockInDto) throws Exception; }
package com.gayelak.gayelakandroid; import android.content.Context; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.util.TypedValue; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.GridView; import android.widget.ImageView; import com.bumptech.glide.Glide; import com.firebase.ui.storage.images.FirebaseImageLoader; import com.google.firebase.storage.FirebaseStorage; import com.google.firebase.storage.StorageReference; import java.util.ArrayList; /** * Created by radibarq on 1/22/18. */ public class SoldImageAdapter extends BaseAdapter { private Context mContext; private LayoutInflater mInflater; private double screenHeight; private double screenWidth; private ArrayList<String> itemsKeys; public SoldImageAdapter(Context c, ArrayList<String> items, double screenHeight, double screenWidth) { this.itemsKeys = items; mInflater = (LayoutInflater) c.getSystemService(Context.LAYOUT_INFLATER_SERVICE); mContext = c; this.screenHeight = screenHeight; this.screenWidth = screenWidth; } @Override public int getCount() { return itemsKeys.size(); } @Override public Object getItem(int position) { return null; } @Override public long getItemId(int position) { return 0; } @Override public View getView(int position, View convertView, ViewGroup parent) { View view; view = mInflater.inflate(R.layout.layout_browsing_image_adapter, parent, false); view.setLayoutParams(new GridView.LayoutParams(GridView.AUTO_FIT, (int) (screenHeight / 2.5))); ImageView imageView = view.findViewById(R.id.imageView); imageView.setBackgroundColor(Color.parseColor("#eef0f5")); StorageReference imageStorageRef = FirebaseStorage.getInstance().getReference().child("Items_Photos").child(itemsKeys.get(position)).child("1.jpeg"); Glide.with(mContext) .using(new FirebaseImageLoader()) .load(imageStorageRef).animate(android.R.anim.fade_in) .into(imageView); return view; } }
package com.cy.http; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest { @Test public void addition_isCorrect() { assertEquals(4, 2 + 2); } }
package com.nextbreakpoint.blueprint.designs.controllers; import com.nextbreakpoint.blueprint.common.core.*; import com.nextbreakpoint.blueprint.common.events.TileRenderCompleted; import com.nextbreakpoint.blueprint.common.events.TileRenderRequested; import com.nextbreakpoint.blueprint.designs.aggregate.DesignAggregate; import com.nextbreakpoint.blueprint.designs.common.Render; import com.nextbreakpoint.blueprint.designs.model.Design; import lombok.extern.log4j.Log4j2; import rx.Observable; import rx.Single; import java.util.Objects; import java.util.UUID; @Log4j2 public class TileRenderCompletedController implements Controller<InputMessage, Void> { private final Mapper<InputMessage, TileRenderCompleted> inputMapper; private final MessageMapper<TileRenderCompleted, OutputMessage> bufferOutputMapper; private final MessageMapper<TileRenderRequested, OutputMessage> renderOutputMapper; private final MessageEmitter bufferEmitter; private final MessageEmitter renderEmitter; private final DesignAggregate aggregate; public TileRenderCompletedController( DesignAggregate aggregate, Mapper<InputMessage, TileRenderCompleted> inputMapper, MessageMapper<TileRenderCompleted, OutputMessage> bufferOutputMapper, MessageMapper<TileRenderRequested, OutputMessage> renderOutputMapper, MessageEmitter bufferEmitter, MessageEmitter renderEmitter ) { this.aggregate = Objects.requireNonNull(aggregate); this.inputMapper = Objects.requireNonNull(inputMapper); this.bufferOutputMapper = Objects.requireNonNull(bufferOutputMapper); this.renderOutputMapper = Objects.requireNonNull(renderOutputMapper); this.bufferEmitter = Objects.requireNonNull(bufferEmitter); this.renderEmitter = Objects.requireNonNull(renderEmitter); } @Override public Single<Void> onNext(InputMessage message) { return Single.just(message) .map(inputMapper::transform) .flatMapObservable(event -> onUpdateRequested(event, message.getToken())) .ignoreElements() .toCompletable() .toSingleDefault("") .map(value -> null); } private Observable<Void> onUpdateRequested(TileRenderCompleted event, String revision) { return findDesign(event.getDesignId()).flatMap(design -> sendEvents(event, design, revision)); } private Observable<Design> findDesign(UUID designId) { return aggregate.findDesign(designId) .flatMapObservable(result -> result.map(Observable::just).orElseGet(Observable::empty)); } private Observable<? extends Void> sendEvents(TileRenderCompleted event, Design design, String revision) { return sendTileCompletedEvent(event).concatWith(sendRenderEvents(event, design, revision)); } private Observable<Void> sendTileCompletedEvent(TileRenderCompleted event) { return Observable.just(event) .map(bufferOutputMapper::transform) .flatMapSingle(bufferEmitter::send); } private Observable<Void> sendRenderEvents(TileRenderCompleted event, Design design, String revision) { return createRenderEvents(event, design, revision).flatMapSingle(this::sendRenderEvent); } private Single<Void> sendRenderEvent(TileRenderRequested event) { return renderEmitter.send(renderOutputMapper.transform(event), Render.getTopicName(renderEmitter.getTopicName() + "-requested", event.getLevel())); } private Observable<TileRenderRequested> createRenderEvents(TileRenderCompleted event, Design design, String revision) { return isLateEvent(event, design) ? Observable.empty() : generateRenderEvents(event, design, revision); } private boolean isLateEvent(TileRenderCompleted event, Design design) { final boolean value = !event.getCommandId().equals(design.getCommandId()); if (value) { log.debug("Discard late event " + event); } return value; } private Observable<TileRenderRequested> generateRenderEvents(TileRenderCompleted event, Design design, String revision) { final TilesBitmap bitmap = TilesBitmap.of(design.getBitmap()); return Observable.from(Render.generateTiles(creteTile(event), design.getLevels(), bitmap)) .map(tile -> createRenderEvent(design, tile, revision)); } private Tile creteTile(TileRenderCompleted event) { return Tile.builder() .withLevel(event.getLevel()) .withRow(event.getRow()) .withCol(event.getCol()) .build(); } private TileRenderRequested createRenderEvent(Design design, Tile tile, String revision) { return TileRenderRequested.builder() .withDesignId(design.getDesignId()) .withCommandId(design.getCommandId()) .withRevision(revision) .withData(design.getData()) .withChecksum(design.getChecksum()) .withLevel(tile.getLevel()) .withRow(tile.getRow()) .withCol(tile.getCol()) .build(); } }
package com.springgithub.springgithub.model.StackOverflow; public class Items { private Object badge_counts; private int account_id; private boolean is_employee; private long last_modified_date; private long last_access_date; private int reputation_change_year; private int reputation_change_quarter; private int reputation_change_month; private int reputation_change_week; private int reputation_change_day; private int reputation; private long creation_date; private String user_type; private long user_id; private String link; private String profile_image; private String display_name; public Object getBadge_counts() { return badge_counts; } public void setBadge_counts(Object badge_counts) { this.badge_counts = badge_counts; } public int getAccount_id() { return account_id; } public void setAccount_id(int account_id) { this.account_id = account_id; } public boolean isIs_employee() { return is_employee; } public void setIs_employee(boolean is_employee) { this.is_employee = is_employee; } public long getLast_modified_date() { return last_modified_date; } public void setLast_modified_date(long last_modified_date) { this.last_modified_date = last_modified_date; } public long getLast_access_date() { return last_access_date; } public void setLast_access_date(long last_access_date) { this.last_access_date = last_access_date; } public int getReputation_change_year() { return reputation_change_year; } public void setReputation_change_year(int reputation_change_year) { this.reputation_change_year = reputation_change_year; } public int getReputation_change_quarter() { return reputation_change_quarter; } public void setReputation_change_quarter(int reputation_change_quarter) { this.reputation_change_quarter = reputation_change_quarter; } public int getReputation_change_month() { return reputation_change_month; } public void setReputation_change_month(int reputation_change_month) { this.reputation_change_month = reputation_change_month; } public int getReputation_change_week() { return reputation_change_week; } public void setReputation_change_week(int reputation_change_week) { this.reputation_change_week = reputation_change_week; } public int getReputation_change_day() { return reputation_change_day; } public void setReputation_change_day(int reputation_change_day) { this.reputation_change_day = reputation_change_day; } public int getReputation() { return reputation; } public void setReputation(int reputation) { this.reputation = reputation; } public long getCreation_date() { return creation_date; } public void setCreation_date(long creation_date) { this.creation_date = creation_date; } public String getUser_type() { return user_type; } public void setUser_type(String user_type) { this.user_type = user_type; } public long getUser_id() { return user_id; } public void setUser_id(long user_id) { this.user_id = user_id; } public String getLink() { return link; } public void setLink(String link) { this.link = link; } public String getProfile_image() { return profile_image; } public void setProfile_image(String profile_image) { this.profile_image = profile_image; } public String getDisplay_name() { return display_name; } public void setDisplay_name(String display_name) { this.display_name = display_name; } }
package com.dna.everythingisbad.entity; import com.dna.everythingisbad.ai.EntityAIPoliceShootGun; import com.dna.everythingisbad.ai.EntityPoliceBreakDoor; import com.dna.everythingisbad.init.ModItems; import com.dna.everythingisbad.init.ModSoundEvents; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.IEntityLivingData; import net.minecraft.entity.IRangedAttackMob; import net.minecraft.entity.ai.*; import net.minecraft.entity.monster.AbstractSkeleton; import net.minecraft.entity.monster.EntityIronGolem; import net.minecraft.entity.monster.EntitySkeleton; import net.minecraft.entity.monster.EntityZombie; import net.minecraft.entity.passive.EntityWolf; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.projectile.EntityArrow; import net.minecraft.init.Items; import net.minecraft.init.SoundEvents; import net.minecraft.inventory.EntityEquipmentSlot; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.network.datasync.DataParameter; import net.minecraft.network.datasync.DataSerializers; import net.minecraft.network.datasync.EntityDataManager; import net.minecraft.pathfinding.PathNavigateGround; import net.minecraft.util.DamageSource; import net.minecraft.util.ResourceLocation; import net.minecraft.util.SoundEvent; import net.minecraft.util.math.MathHelper; import net.minecraft.world.DifficultyInstance; import net.minecraft.world.EnumDifficulty; import net.minecraft.world.World; import net.minecraftforge.fml.common.Mod; import javax.annotation.Nullable; import java.util.Random; public class EntityPoliceOfficer extends EntitySkeleton implements IRangedAttackMob { private boolean isBreakDoorsTaskSet; private final EntityPoliceBreakDoor breakDoor = new EntityPoliceBreakDoor(this); static Random random = new Random(); public EntityPoliceOfficer(World worldIn) { super(worldIn); this.setCombatTask(); } @Override public SoundEvent getAmbientSound() { int hoo = random.nextInt(5); return ModSoundEvents.SOUND_EVENT_POLICE_AMBIENT[hoo]; } @Override public SoundEvent getHurtSound(DamageSource damageSourceIn) { int har = random.nextInt(5); return ModSoundEvents.SOUND_EVENT_POLICE_AMBIENT[har]; } protected SoundEvent getStepSound() { return SoundEvents.ENTITY_ZOMBIE_STEP; } @Nullable @Override public IEntityLivingData onInitialSpawn(DifficultyInstance difficulty, @Nullable IEntityLivingData livingdata) { livingdata = super.onInitialSpawn(difficulty, livingdata); // Gives the police their gear this.setItemStackToSlot(EntityEquipmentSlot.HEAD, new ItemStack(ModItems.COWBOY_HAT_ITEM)); this.setItemStackToSlot(EntityEquipmentSlot.MAINHAND, new ItemStack(ModItems.POLICE_GUN_ITEM)); //this.setItemStackToSlot(EntityEquipmentSlot.MAINHAND, new ItemStack(Items.BOW)); // Prevents the police from dropping their gun upon death this.inventoryHandsDropChances[EntityEquipmentSlot.MAINHAND.getIndex()] = 0; this.setCombatTask(); this.setBreakDoorsAItask(true); return livingdata; } @Override public boolean getCanSpawnHere() { return false; } @Override public void attackEntityWithRangedAttack(EntityLivingBase target, float distanceFactor) { EntityBullet bulet = new EntityBullet(this.world,this); double d0 = target.posX - this.posX; double d1 = target.getEntityBoundingBox().minY + (double)(target.height / 3.0F) - bulet.posY; double d2 = target.posZ - this.posZ; double d3 = (double) MathHelper.sqrt(d0 * d0 + d2 * d2); bulet.shoot(d0, d1 + d3 * 0.20000000298023224D, d2, 1.6F, (float)(14 - this.world.getDifficulty().getDifficultyId() * 4)); this.playSound(SoundEvents.ENTITY_GENERIC_EXPLODE, 1.0F, 1.0F / (this.getRNG().nextFloat() * 0.4F + 0.8F)); this.world.spawnEntity(bulet); } @Override public void setCombatTask() { super.setCombatTask(); EntityAIPoliceShootGun doodly = new EntityAIPoliceShootGun(this, 1.0D, 20, 15.0F); this.tasks.addTask(4,doodly); } /** * Sets or removes EntityAIBreakDoor task */ public void setBreakDoorsAItask(boolean enabled) { if (this.isBreakDoorsTaskSet != enabled) { this.isBreakDoorsTaskSet = enabled; ((PathNavigateGround)this.getNavigator()).setBreakDoors(enabled); if (enabled) { this.tasks.addTask(3, this.breakDoor); } else { this.tasks.removeTask(this.breakDoor); } } } public boolean isBreakDoorsTaskSet() { return this.isBreakDoorsTaskSet; } @Override protected void initEntityAI() { this.tasks.addTask(1, new EntityAISwimming(this)); this.tasks.addTask(2, new EntityAIRestrictSun(this)); this.tasks.addTask(5, new EntityAIWanderAvoidWater(this, 1.0D)); this.tasks.addTask(6, new EntityAIWatchClosest(this, EntityPlayer.class, 8.0F)); this.tasks.addTask(6, new EntityAILookIdle(this)); this.targetTasks.addTask(1, new EntityAIHurtByTarget(this, false, new Class[0])); this.targetTasks.addTask(3, new EntityAINearestAttackableTarget(this, EntityPlayer.class, false)); } public void writeEntityToNBT(NBTTagCompound compound) { super.writeEntityToNBT(compound); compound.setBoolean("CanBreakDoors", true); } @Nullable @Override protected ResourceLocation getLootTable() { return null; } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.social.gitlab; import com.fasterxml.jackson.databind.JsonNode; import org.keycloak.broker.oidc.OIDCIdentityProvider; import org.keycloak.broker.oidc.OIDCIdentityProviderConfig; import org.keycloak.broker.oidc.mappers.AbstractJsonUserAttributeMapper; import org.keycloak.broker.provider.BrokeredIdentityContext; import org.keycloak.broker.provider.util.SimpleHttp; import org.keycloak.broker.social.SocialIdentityProvider; import org.keycloak.events.EventBuilder; import org.keycloak.models.KeycloakSession; import org.keycloak.representations.AccessTokenResponse; import org.keycloak.representations.IDToken; import org.keycloak.representations.JsonWebToken; import javax.ws.rs.core.MultivaluedMap; import java.io.IOException; /** * @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a> */ public class GitLabIdentityProvider extends OIDCIdentityProvider implements SocialIdentityProvider<OIDCIdentityProviderConfig> { public static final String AUTH_URL = "https://gitlab.com/oauth/authorize"; public static final String TOKEN_URL = "https://gitlab.com/oauth/token"; public static final String USER_INFO = "https://gitlab.com/api/v4/user"; public static final String API_SCOPE = "api"; public GitLabIdentityProvider(KeycloakSession session, OIDCIdentityProviderConfig config) { super(session, config); config.setAuthorizationUrl(AUTH_URL); config.setTokenUrl(TOKEN_URL); config.setUserInfoUrl(USER_INFO); String defaultScope = config.getDefaultScope(); if (defaultScope.equals(SCOPE_OPENID)) { config.setDefaultScope((API_SCOPE + " " + defaultScope).trim()); } } protected BrokeredIdentityContext extractIdentity(AccessTokenResponse tokenResponse, String accessToken, JsonWebToken idToken) throws IOException { String id = idToken.getSubject(); BrokeredIdentityContext identity = new BrokeredIdentityContext(id); String name = (String)idToken.getOtherClaims().get(IDToken.NAME); String preferredUsername = (String)idToken.getOtherClaims().get(IDToken.NICKNAME); String email = (String)idToken.getOtherClaims().get(IDToken.EMAIL); if (getConfig().getDefaultScope().contains(API_SCOPE)) { String userInfoUrl = getUserInfoUrl(); if (userInfoUrl != null && !userInfoUrl.isEmpty() && (id == null || name == null || preferredUsername == null || email == null)) { JsonNode userInfo = SimpleHttp.doGet(userInfoUrl, session) .header("Authorization", "Bearer " + accessToken).asJson(); name = getJsonProperty(userInfo, "name"); preferredUsername = getJsonProperty(userInfo, "username"); email = getJsonProperty(userInfo, "email"); AbstractJsonUserAttributeMapper.storeUserProfileForMapper(identity, userInfo, getConfig().getAlias()); } } identity.getContextData().put(FEDERATED_ACCESS_TOKEN_RESPONSE, tokenResponse); identity.getContextData().put(VALIDATED_ID_TOKEN, idToken); processAccessTokenResponse(identity, tokenResponse); identity.setId(id); identity.setName(name); identity.setEmail(email); identity.setBrokerUserId(getConfig().getAlias() + "." + id); if (tokenResponse.getSessionState() != null) { identity.setBrokerSessionId(getConfig().getAlias() + "." + tokenResponse.getSessionState()); } if (preferredUsername == null) { preferredUsername = email; } if (preferredUsername == null) { preferredUsername = id; } identity.setUsername(preferredUsername); return identity; } @Override public BrokeredIdentityContext exchangeExternal(EventBuilder event, MultivaluedMap<String, String> params) { return null; } }
package org.tron.common.runtime; import static org.apache.commons.collections4.CollectionUtils.isEmpty; import static org.apache.commons.collections4.CollectionUtils.size; import static org.tron.common.utils.ByteUtil.EMPTY_BYTE_ARRAY; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import lombok.Getter; import lombok.Setter; import org.spongycastle.util.encoders.Hex; import org.tron.common.logsfilter.trigger.ContractTrigger; import org.tron.common.runtime.vm.DataWord; import org.tron.common.runtime.vm.LogInfo; import org.tron.common.utils.ByteArraySet; import org.tron.core.capsule.TransactionResultCapsule; import org.tron.protos.Protocol.Transaction.Result.contractResult; public class ProgramResult { private long kandyUsed = 0; private long futureRefund = 0; private byte[] hReturn = EMPTY_BYTE_ARRAY; private byte[] contractAddress = EMPTY_BYTE_ARRAY; private RuntimeException exception; private boolean revert; private Set<DataWord> deleteAccounts; private Set<DataWord> deleteVotes; private Set<DataWord> deleteDelegation; private ByteArraySet touchedAccounts = new ByteArraySet(); private List<InternalTransaction> internalTransactions; private List<LogInfo> logInfoList; private TransactionResultCapsule ret = new TransactionResultCapsule(); @Setter private List<ContractTrigger> triggerList; @Setter @Getter private String runtimeError; @Getter @Setter private contractResult resultCode; /* * for testing runs , * call/create is not executed * but dummy recorded */ private List<CallCreate> callCreateList; public static ProgramResult createEmpty() { ProgramResult result = new ProgramResult(); result.setHReturn(EMPTY_BYTE_ARRAY); return result; } public void spendKandy(long kandy) { kandyUsed += kandy; } public void setRevert() { this.revert = true; } public boolean isRevert() { return revert; } public void refundKandy(long kandy) { kandyUsed -= kandy; } public byte[] getContractAddress() { return Arrays.copyOf(contractAddress, contractAddress.length); } public void setContractAddress(byte[] contractAddress) { this.contractAddress = Arrays.copyOf(contractAddress, contractAddress.length); } public byte[] getHReturn() { return hReturn; } public void setHReturn(byte[] hReturn) { this.hReturn = hReturn; } public List<ContractTrigger> getTriggerList() { return triggerList != null ? triggerList : new LinkedList<>(); } public TransactionResultCapsule getRet() { return ret; } public void setRet(TransactionResultCapsule ret) { this.ret = ret; } public RuntimeException getException() { return exception; } public void setException(RuntimeException exception) { this.exception = exception; } public long getKandyUsed() { return kandyUsed; } public Set<DataWord> getDeleteAccounts() { if (deleteAccounts == null) { deleteAccounts = new HashSet<>(); } return deleteAccounts; } public Set<DataWord> getDeleteVotes() { if (deleteVotes == null) { deleteVotes = new HashSet<>(); } return deleteVotes; } public Set<DataWord> getDeleteDelegation() { if (deleteDelegation == null) { deleteDelegation = new HashSet<>(); } return deleteDelegation; } public void addDeleteAccount(DataWord address) { getDeleteAccounts().add(address); } public void addDeleteVotes(DataWord address) { getDeleteVotes().add(address); } public void addDeleteDelegation(DataWord address) { getDeleteDelegation().add(address); } public void addDeleteAccounts(Set<DataWord> accounts) { if (!isEmpty(accounts)) { getDeleteAccounts().addAll(accounts); } } public void addDeleteVotesSet(Set<DataWord> addresses) { if (!isEmpty(addresses)) { getDeleteVotes().addAll(addresses); } } public void addDeleteDelegationSet(Set<DataWord> addresses) { if (!isEmpty(addresses)) { getDeleteDelegation().addAll(addresses); } } public void addTouchAccount(byte[] addr) { touchedAccounts.add(addr); } public Set<byte[]> getTouchedAccounts() { return touchedAccounts; } public void addTouchAccounts(Set<byte[]> accounts) { if (!isEmpty(accounts)) { getTouchedAccounts().addAll(accounts); } } public List<LogInfo> getLogInfoList() { if (logInfoList == null) { logInfoList = new ArrayList<>(); } return logInfoList; } public void addLogInfo(LogInfo logInfo) { getLogInfoList().add(logInfo); } public void addLogInfos(List<LogInfo> logInfos) { if (!isEmpty(logInfos)) { getLogInfoList().addAll(logInfos); } } public List<CallCreate> getCallCreateList() { if (callCreateList == null) { callCreateList = new ArrayList<>(); } return callCreateList; } public void addCallCreate(byte[] data, byte[] destination, byte[] kandyLimit, byte[] value) { getCallCreateList().add(new CallCreate(data, destination, kandyLimit, value)); } public List<InternalTransaction> getInternalTransactions() { if (internalTransactions == null) { internalTransactions = new ArrayList<>(); } return internalTransactions; } public InternalTransaction addInternalTransaction(byte[] parentHash, int deep, byte[] senderAddress, byte[] transferAddress, long value, byte[] data, String note, long nonce, Map<String, Long> token) { InternalTransaction transaction = new InternalTransaction(parentHash, deep, size(internalTransactions), senderAddress, transferAddress, value, data, note, nonce, token); getInternalTransactions().add(transaction); return transaction; } public void addInternalTransaction(InternalTransaction internalTransaction) { getInternalTransactions().add(internalTransaction); } public void addInternalTransactions(List<InternalTransaction> internalTransactions) { getInternalTransactions().addAll(internalTransactions); } public void rejectInternalTransactions() { for (InternalTransaction internalTx : getInternalTransactions()) { internalTx.reject(); } } public void addFutureRefund(long kandyValue) { futureRefund += kandyValue; } public long getFutureRefund() { return futureRefund; } public void resetFutureRefund() { futureRefund = 0; } public void reset() { getDeleteAccounts().clear(); getDeleteVotes().clear(); getDeleteDelegation().clear(); getLogInfoList().clear(); resetFutureRefund(); } public void merge(ProgramResult another) { addInternalTransactions(another.getInternalTransactions()); if (another.getException() == null && !another.isRevert()) { addDeleteAccounts(another.getDeleteAccounts()); addDeleteVotesSet(another.getDeleteVotes()); addDeleteDelegationSet(another.getDeleteDelegation()); addLogInfos(another.getLogInfoList()); addFutureRefund(another.getFutureRefund()); addTouchAccounts(another.getTouchedAccounts()); } } }
/* * Copyright (c) 2018, 2020, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oracle.graal.python.builtins.modules; import com.oracle.graal.python.PythonLanguage; import com.oracle.graal.python.builtins.PythonBuiltinClassType; import com.oracle.graal.python.builtins.modules.PythonCextBuiltins.CheckFunctionResultNode; import com.oracle.graal.python.builtins.modules.PythonCextBuiltins.TrufflePInt_AsPrimitive; import com.oracle.graal.python.builtins.modules.PythonCextBuiltinsFactory.TrufflePInt_AsPrimitiveFactory; import com.oracle.graal.python.builtins.modules.ExternalFunctionNodesFactory.ExternalFunctionNodeGen; import com.oracle.graal.python.builtins.objects.PNone; import com.oracle.graal.python.builtins.objects.cext.CExtNodes; import com.oracle.graal.python.builtins.objects.cext.CExtNodes.ConvertArgsToSulongNode; import com.oracle.graal.python.builtins.objects.function.PArguments; import com.oracle.graal.python.builtins.objects.function.PKeyword; import com.oracle.graal.python.builtins.objects.function.Signature; import com.oracle.graal.python.nodes.IndirectCallNode; import com.oracle.graal.python.nodes.PRaiseNode; import com.oracle.graal.python.nodes.PRootNode; import com.oracle.graal.python.nodes.argument.ReadIndexedArgumentNode; import com.oracle.graal.python.nodes.argument.ReadVarArgsNode; import com.oracle.graal.python.nodes.argument.ReadVarKeywordsNode; import com.oracle.graal.python.nodes.call.FunctionInvokeNode; import com.oracle.graal.python.nodes.interop.PForeignToPTypeNode; import com.oracle.graal.python.runtime.ExecutionContext.CalleeContext; import com.oracle.graal.python.runtime.ExecutionContext.ForeignCallContext; import com.oracle.graal.python.runtime.PythonContext; import com.oracle.graal.python.runtime.object.PythonObjectFactory; import com.oracle.truffle.api.Assumption; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.CompilerDirectives.CompilationFinal; import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary; import com.oracle.truffle.api.RootCallTarget; import com.oracle.truffle.api.Truffle; import com.oracle.truffle.api.dsl.Cached; import com.oracle.truffle.api.dsl.CachedContext; import com.oracle.truffle.api.dsl.Specialization; import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.interop.ArityException; import com.oracle.truffle.api.interop.InteropLibrary; import com.oracle.truffle.api.interop.UnsupportedMessageException; import com.oracle.truffle.api.interop.UnsupportedTypeException; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.profiles.ConditionProfile; public abstract class ExternalFunctionNodes { abstract static class ExternalFunctionNode extends PRootNode implements IndirectCallNode { private final Signature signature; private final Object callable; private final String name; @Child private CExtNodes.ConvertArgsToSulongNode toSulongNode; @Child private CheckFunctionResultNode checkResultNode = CheckFunctionResultNode.create(); @Child private PForeignToPTypeNode fromForeign = PForeignToPTypeNode.create(); @Child private InteropLibrary lib; @Child private CalleeContext calleeContext = CalleeContext.create(); @CompilationFinal private Assumption nativeCodeDoesntNeedExceptionState = Truffle.getRuntime().createAssumption(); @CompilationFinal private Assumption nativeCodeDoesntNeedMyFrame = Truffle.getRuntime().createAssumption(); @Override public Assumption needNotPassFrameAssumption() { return nativeCodeDoesntNeedMyFrame; } @Override public Assumption needNotPassExceptionAssumption() { return nativeCodeDoesntNeedExceptionState; } @Override public Node copy() { ExternalFunctionNode node = (ExternalFunctionNode) super.copy(); node.nativeCodeDoesntNeedMyFrame = Truffle.getRuntime().createAssumption(); node.nativeCodeDoesntNeedExceptionState = Truffle.getRuntime().createAssumption(); return node; } ExternalFunctionNode(PythonLanguage lang, String name, Object callable, Signature signature, CExtNodes.ConvertArgsToSulongNode convertArgsNode) { super(lang); this.name = name; this.callable = callable; this.signature = signature; this.toSulongNode = convertArgsNode != null ? convertArgsNode : CExtNodes.AllToSulongNode.create(); this.lib = InteropLibrary.getFactory().create(callable); } @Specialization Object doIt(VirtualFrame frame, @Cached("createCountingProfile()") ConditionProfile customLocalsProfile, @Cached CExtNodes.AsPythonObjectNode asPythonObjectNode, @CachedContext(PythonLanguage.class) PythonContext ctx, @Cached PRaiseNode raiseNode) { CalleeContext.enter(frame, customLocalsProfile); Object[] frameArgs = PArguments.getVariableArguments(frame); Object[] arguments = new Object[frameArgs.length]; toSulongNode.executeInto(frameArgs, 0, arguments, 0); // If any code requested the caught exception (i.e. used 'sys.exc_info()'), we store // it to the context since we cannot propagate it through the native frames. Object state = ForeignCallContext.enter(frame, ctx, this); try { return fromNative(asPythonObjectNode.execute(checkResultNode.execute(name, lib.execute(callable, arguments)))); } catch (UnsupportedTypeException | UnsupportedMessageException e) { CompilerDirectives.transferToInterpreter(); throw raiseNode.raise(PythonBuiltinClassType.TypeError, "Calling native function %s failed: %m", name, e); } catch (ArityException e) { CompilerDirectives.transferToInterpreter(); throw raiseNode.raise(PythonBuiltinClassType.TypeError, "Calling native function %s expected %d arguments but got %d.", name, e.getExpectedArity(), e.getActualArity()); } finally { // special case after calling a C function: transfer caught exception back to frame // to simulate the global state semantics PArguments.setException(frame, ctx.getCaughtException()); ForeignCallContext.exit(frame, ctx, state); calleeContext.exit(frame, this); } } private Object fromNative(Object result) { return fromForeign.executeConvert(result); } public final Object getCallable() { return callable; } @Override public String getName() { return name; } @Override public String toString() { return "<external function root " + name + ">"; } @Override public boolean isCloningAllowed() { return true; } @Override public Signature getSignature() { return signature; } @Override public boolean isPythonInternal() { // everything that is implemented in C is internal return true; } public static ExternalFunctionNode create(PythonLanguage lang, String name, Object callable, Signature signature) { return ExternalFunctionNodeGen.create(lang, name, callable, signature, null); } public static ExternalFunctionNode create(PythonLanguage lang, String name, Object callable, Signature signature, ConvertArgsToSulongNode convertArgsNode) { return ExternalFunctionNodeGen.create(lang, name, callable, signature, convertArgsNode); } } abstract static class MethodDescriptorRoot extends PRootNode { @Child private CalleeContext calleeContext = CalleeContext.create(); @Child FunctionInvokeNode invokeNode; @Child ReadIndexedArgumentNode readSelfNode; private final ConditionProfile customLocalsProfile = ConditionProfile.createCountingProfile(); @TruffleBoundary MethodDescriptorRoot(PythonLanguage language, RootCallTarget callTarget) { super(language); this.readSelfNode = ReadIndexedArgumentNode.create(0); assert callTarget.getRootNode() instanceof ExternalFunctionNode; this.invokeNode = FunctionInvokeNode.createBuiltinFunction(callTarget); } @Override public boolean isCloningAllowed() { return true; } @Override public String getName() { return invokeNode.getCurrentRootNode().getName(); } @Override public String toString() { return "<METH root " + invokeNode.getCurrentRootNode().getName() + ">"; } @Override public boolean isPythonInternal() { return true; } final void enterCalleeContext(VirtualFrame frame) { CalleeContext.enter(frame, customLocalsProfile); } final void exitCalleeContext(VirtualFrame frame) { calleeContext.exit(frame, this); } } static class MethKeywordsRoot extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(-1, true, 1, false, new String[]{"self"}, new String[0]); @Child private PythonObjectFactory factory; @Child private ReadVarArgsNode readVarargsNode; @Child private ReadVarKeywordsNode readKwargsNode; MethKeywordsRoot(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); this.factory = PythonObjectFactory.create(); this.readVarargsNode = ReadVarArgsNode.create(1, true); this.readKwargsNode = ReadVarKeywordsNode.create(new String[0]); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object[] args = readVarargsNode.executeObjectArray(frame); PKeyword[] kwargs = readKwargsNode.executePKeyword(frame); Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, self, factory.createTuple(args), factory.createDict(kwargs)); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } static class MethVarargsRoot extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(-1, false, 1, false, new String[]{"self"}, new String[0]); @Child private PythonObjectFactory factory; @Child private ReadVarArgsNode readVarargsNode; MethVarargsRoot(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); this.factory = PythonObjectFactory.create(); this.readVarargsNode = ReadVarArgsNode.create(1, true); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object[] args = readVarargsNode.executeObjectArray(frame); Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, self, factory.createTuple(args)); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } static class MethNoargsRoot extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(-1, false, -1, false, new String[]{"self"}, new String[0]); MethNoargsRoot(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, self, PNone.NONE); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } static class MethORoot extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(-1, false, -1, false, new String[]{"self", "arg"}, new String[0]); @Child private ReadIndexedArgumentNode readArgNode; MethORoot(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); this.readArgNode = ReadIndexedArgumentNode.create(1); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object arg = readArgNode.execute(frame); Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, self, arg); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } static class MethFastcallWithKeywordsRoot extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(-1, true, 1, false, new String[]{"self"}, new String[0]); @Child private PythonObjectFactory factory; @Child private ReadVarArgsNode readVarargsNode; @Child private ReadVarKeywordsNode readKwargsNode; MethFastcallWithKeywordsRoot(PythonLanguage language, RootCallTarget fun) { super(language, fun); this.factory = PythonObjectFactory.create(); this.readVarargsNode = ReadVarArgsNode.create(1, true); this.readKwargsNode = ReadVarKeywordsNode.create(new String[0]); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object[] args = readVarargsNode.executeObjectArray(frame); PKeyword[] kwargs = readKwargsNode.executePKeyword(frame); Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, self, factory.createTuple(args), args.length, factory.createDict(kwargs)); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } static class MethFastcallRoot extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(-1, false, 1, false, new String[]{"self"}, new String[0]); @Child private PythonObjectFactory factory; @Child private ReadVarArgsNode readVarargsNode; MethFastcallRoot(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); this.factory = PythonObjectFactory.create(); this.readVarargsNode = ReadVarArgsNode.create(1, true); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object[] args = readVarargsNode.executeObjectArray(frame); Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, self, factory.createTuple(args), args.length); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } /** * Wrapper root node for C function type {@code allocfunc} and {@code ssizeargfunc}. */ static class AllocFuncRootNode extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(false, -1, false, new String[]{"self", "nitems"}, new String[0]); @Child private ReadIndexedArgumentNode readArgNode; @Child private TrufflePInt_AsPrimitive asSsizeTNode; AllocFuncRootNode(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); this.readArgNode = ReadIndexedArgumentNode.create(1); this.asSsizeTNode = TrufflePInt_AsPrimitiveFactory.create(); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object arg = readArgNode.execute(frame); Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, self, asSsizeTNode.executeLong(frame, arg, 1, Long.BYTES)); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } /** * Wrapper root node for a get attribute function (C type {@code getattrfunc}). */ static class GetAttrFuncRootNode extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(false, -1, false, new String[]{"self", "key"}, new String[0]); @Child private ReadIndexedArgumentNode readArgNode; @Child private CExtNodes.AsCharPointerNode asCharPointerNode; GetAttrFuncRootNode(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); this.readArgNode = ReadIndexedArgumentNode.create(1); this.asCharPointerNode = CExtNodes.AsCharPointerNode.create(); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object arg = readArgNode.execute(frame); Object[] arguments = PArguments.create(); // TODO we should use 'CStringWrapper' for 'arg' but it does currently not support // PString PArguments.setVariableArguments(arguments, self, asCharPointerNode.execute(arg)); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } /** * Wrapper root node for a set attribute function (C type {@code setattrfunc}). */ static class SetAttrFuncRootNode extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(false, -1, false, new String[]{"self", "key", "value"}, new String[0]); @Child private ReadIndexedArgumentNode readArg1Node; @Child private ReadIndexedArgumentNode readArg2Node; @Child private CExtNodes.AsCharPointerNode asCharPointerNode; SetAttrFuncRootNode(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); this.readArg1Node = ReadIndexedArgumentNode.create(1); this.readArg2Node = ReadIndexedArgumentNode.create(2); this.asCharPointerNode = CExtNodes.AsCharPointerNode.create(); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object arg1 = readArg1Node.execute(frame); Object arg2 = readArg2Node.execute(frame); Object[] arguments = PArguments.create(); // TODO we should use 'CStringWrapper' for 'arg1' but it does currently not support // PString PArguments.setVariableArguments(arguments, self, asCharPointerNode.execute(arg1), arg2); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } /** * Wrapper root node for a rich compare function (C type {@code richcmpfunc}). */ static class RichCmpFuncRootNode extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(false, -1, false, new String[]{"self", "other", "op"}, new String[0]); @Child private ReadIndexedArgumentNode readArg1Node; @Child private ReadIndexedArgumentNode readArg2Node; @Child private TrufflePInt_AsPrimitive asSsizeTNode; RichCmpFuncRootNode(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); this.readArg1Node = ReadIndexedArgumentNode.create(1); this.readArg2Node = ReadIndexedArgumentNode.create(2); this.asSsizeTNode = TrufflePInt_AsPrimitiveFactory.create(); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object arg1 = readArg1Node.execute(frame); Object arg2 = readArg2Node.execute(frame); Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, self, arg1, asSsizeTNode.executeInt(frame, arg2, 1, Integer.BYTES)); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } /** * Wrapper root node for C function type {@code ssizeobjargproc}. */ static class SSizeObjArgProcRootNode extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(false, -1, false, new String[]{"self", "i", "value"}, new String[0]); @Child private ReadIndexedArgumentNode readArg1Node; @Child private ReadIndexedArgumentNode readArg2Node; @Child private TrufflePInt_AsPrimitive asSsizeTNode; SSizeObjArgProcRootNode(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); this.readArg1Node = ReadIndexedArgumentNode.create(1); this.readArg2Node = ReadIndexedArgumentNode.create(2); this.asSsizeTNode = TrufflePInt_AsPrimitiveFactory.create(); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object arg1 = readArg1Node.execute(frame); Object arg2 = readArg2Node.execute(frame); Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, self, asSsizeTNode.executeLong(frame, arg1, 1, Long.BYTES), arg2); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } /** * Wrapper root node for reverse binary operations. */ static class MethReverseRootNode extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(false, -1, false, new String[]{"self", "obj"}, new String[0]); @Child private ReadIndexedArgumentNode readArg0Node; @Child private ReadIndexedArgumentNode readArg1Node; MethReverseRootNode(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); this.readArg0Node = ReadIndexedArgumentNode.create(0); this.readArg1Node = ReadIndexedArgumentNode.create(1); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object arg0 = readArg0Node.execute(frame); Object arg1 = readArg1Node.execute(frame); Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, arg1, arg0); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } /** * Wrapper root node for native power function (with an optional third argument). */ static class MethPowRootNode extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(false, 0, false, new String[]{"args"}, new String[0]); @Child private ReadVarArgsNode readVarargsNode; private final ConditionProfile profile = ConditionProfile.createBinaryProfile(); MethPowRootNode(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); this.readVarargsNode = ReadVarArgsNode.create(1, true); } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object[] varargs = readVarargsNode.executeObjectArray(frame); Object arg0 = varargs[0]; Object arg1 = profile.profile(varargs.length > 1) ? varargs[1] : PNone.NONE; Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, self, arg0, arg1); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } void setArguments(Object[] arguments, Object arg0, Object arg1, Object arg2) { PArguments.setVariableArguments(arguments, arg0, arg1, arg2); } @Override public Signature getSignature() { return SIGNATURE; } } /** * Wrapper root node for native reverse power function (with an optional third argument). */ static class MethRPowRootNode extends MethPowRootNode { MethRPowRootNode(PythonLanguage language, RootCallTarget callTarget) { super(language, callTarget); } @Override void setArguments(Object[] arguments, Object arg0, Object arg1, Object arg2) { PArguments.setVariableArguments(arguments, arg1, arg0, arg2); } } /** * Wrapper root node for native power function (with an optional third argument). */ static class MethRichcmpOpRootNode extends MethodDescriptorRoot { private static final Signature SIGNATURE = new Signature(false, -1, false, new String[]{"self", "other"}, new String[0]); @Child private ReadIndexedArgumentNode readArgNode; private final int op; MethRichcmpOpRootNode(PythonLanguage language, RootCallTarget callTarget, int op) { super(language, callTarget); this.readArgNode = ReadIndexedArgumentNode.create(1); this.op = op; } @Override public Object execute(VirtualFrame frame) { enterCalleeContext(frame); try { Object self = readSelfNode.execute(frame); Object arg = readArgNode.execute(frame); Object[] arguments = PArguments.create(); PArguments.setVariableArguments(arguments, self, arg, op); return invokeNode.execute(frame, arguments); } finally { exitCalleeContext(frame); } } @Override public Signature getSignature() { return SIGNATURE; } } }
package com.javarush.task.task13.task1324; import java.awt.*; /* Один метод в классе */ public class Solution { public static void main(String[] args) throws Exception { } public interface Animal { Color getColor(); Integer getAge(); } public static abstract class Fox implements Animal { public String getName() { return "Fox"; } } }
/* * Copyright 2011 - 2013 NTB University of Applied Sciences in Technology * Buchs, Switzerland, http://www.ntb.ch/inf * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ch.ntb.inf.deep.cg; import ch.ntb.inf.deep.classItems.*; import ch.ntb.inf.deep.classItems.Class; import ch.ntb.inf.deep.config.Configuration; import ch.ntb.inf.deep.host.ErrorReporter; import ch.ntb.inf.deep.host.StdStreams; import ch.ntb.inf.deep.linker.Linker32; import ch.ntb.inf.deep.ssa.*; import ch.ntb.inf.deep.strings.HString; public abstract class CodeGen implements SSAInstructionOpcs, SSAInstructionMnemonics, SSAValueType, ICjvmInstructionOpcs, ICclassFileConsts, ICdescAndTypeConsts { protected static final boolean dbg = false; protected static final int maxNofParam = 32; protected static final int defaultNofInstr = 32; protected static final int defaultNofFixup = 8; protected static int objectSize, stringSize; protected static StdConstant int2floatConst1 = null; // 2^52+2^31, for int -> float conversions protected static StdConstant int2floatConst2 = null; // 2^32, for long -> float conversions protected static StdConstant int2floatConst3 = null; // 2^52, for long -> float conversions public static int idGET1, idGET2, idGET4, idGET8; public static int idPUT1, idPUT2, idPUT4, idPUT8; public static int idBIT, idASM, idHALT, idADR_OF_METHOD, idREF; public static int idENABLE_FLOATS; public static int idGETGPR, idGETFPR, idGETSPR; public static int idPUTGPR, idPUTFPR, idPUTSPR; public static int idDoubleToBits, idBitsToDouble; public static int idFloatToBits, idBitsToFloat; protected static Method stringNewstringMethod; protected static Method heapNewstringMethod; protected static Method strInitC; protected static Method strInitCII; protected static Method strAllocC; protected static Method strAllocCII; // nof parameter for a method, set by SSA, includes "this", long and doubles count as 2 parameters protected static int nofParam; // nofParamGPR + nofParamFPR = nofParam, set by last exit set of last node protected static int nofParamGPR, nofParamFPR; // maximum nof registers used by this method public static int nofNonVolGPR, nofNonVolFPR, nofVolGPR, nofVolFPR; // gives required stack space for parameters of this method if not enough registers protected static int recParamSlotsOnStack; // gives required stack space for parameters of any call in this method if not enough registers public static int callParamSlotsOnStack; // type of parameter, set by SSA, includes "this", long and doubles count as 2 parameters protected static int[] paramType = new int[maxNofParam]; // register type of parameter, long and doubles count as 2 parameters public static boolean[] paramHasNonVolReg = new boolean[maxNofParam]; // register of parameter, long and doubles count as 2 parameters public static int[] paramRegNr = new int[maxNofParam]; // last instruction where parameters is used public static int[] paramRegEnd = new int[maxNofParam]; // information about into which registers parameters of this method go protected static int nofMoveGPR, nofMoveFPR; protected static int[] moveGPRsrc = new int[maxNofParam]; protected static int[] moveGPRdst = new int[maxNofParam]; protected static int[] moveFPRsrc = new int[maxNofParam]; protected static int[] moveFPRdst = new int[maxNofParam]; public CodeGen() {} protected void init(Method m) { SSA ssa = m.ssa; nofParamGPR = 0; nofParamFPR = 0; nofNonVolGPR = 0; nofNonVolFPR = 0; nofVolGPR = 0; nofVolFPR = 0; nofMoveGPR = 0; nofMoveFPR = 0; recParamSlotsOnStack = 0; callParamSlotsOnStack = 0; if (dbg) StdStreams.vrb.println("generate code for " + m.owner.name + "." + m.name); for (int i = 0; i < maxNofParam; i++) { paramType[i] = tVoid; paramRegNr[i] = -1; paramRegEnd[i] = -1; } // make local copy int maxStackSlots = m.maxStackSlots; int i = maxStackSlots; while ((i < ssa.isParam.length) && ssa.isParam[i]) { int type = ssa.paramType[i] & ~(1<<ssaTaFitIntoInt); paramType[i - maxStackSlots] = type; paramHasNonVolReg[i - maxStackSlots] = false; if (type == tLong || type == tDouble) i++; i++; } nofParam = i - maxStackSlots; if (nofParam > maxNofParam) {ErrorReporter.reporter.error(601); return;} if (dbg) StdStreams.vrb.println("nofParam = " + nofParam + ", double / long count as 2"); } public abstract void translateMethod(Method m); public abstract void doFixups(Code32 code); public abstract void generateCompSpecSubroutines(); protected static int getInt(byte[] bytes, int index){ return (((bytes[index]<<8) | (bytes[index+1]&0xFF))<<8 | (bytes[index+2]&0xFF))<<8 | (bytes[index+3]&0xFF); } public void init() { Class cls = (Class)RefType.refTypeList.getItemByName("ch/ntb/inf/deep/unsafe/US"); if (cls == null) {ErrorReporter.reporter.error(630); return;} Method m = Configuration.getOS().getSystemMethodByName(cls, "PUT1"); if (m != null) idPUT1 = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "PUT2"); if (m != null) idPUT2 = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "PUT4"); if (m != null) idPUT4 = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "PUT8"); if (m != null) idPUT8 = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "GET1"); if (m != null) idGET1 = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "GET2"); if (m != null) idGET2 = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "GET4"); if (m != null) idGET4 = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "GET8"); if (m != null) idGET8 = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "BIT"); if (m != null) idBIT = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "ASM"); if (m != null) idASM = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "GETGPR"); if (m != null) idGETGPR = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "GETFPR"); if (m != null) idGETFPR = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "GETSPR"); if (m != null) idGETSPR = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "PUTGPR"); if (m != null) idPUTGPR = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "PUTFPR"); if (m != null) idPUTFPR = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "PUTSPR"); if (m != null) idPUTSPR = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "ADR_OF_METHOD"); if (m != null) idADR_OF_METHOD = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "HALT"); if (m != null) idHALT = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "ENABLE_FLOATS"); if (m != null) idENABLE_FLOATS = m.id; else {ErrorReporter.reporter.error(631); return;} m = Configuration.getOS().getSystemMethodByName(cls, "REF"); if (m != null) idREF = m.id; else {ErrorReporter.reporter.error(631); return;} cls = (Class)RefType.refTypeList.getItemByName("ch/ntb/inf/deep/lowLevel/LL"); if (cls == null) {ErrorReporter.reporter.error(632); return;} m = Configuration.getOS().getSystemMethodByName(cls, "doubleToBits"); if(m != null) idDoubleToBits = m.id; else {ErrorReporter.reporter.error(633); return;} m = Configuration.getOS().getSystemMethodByName(cls, "bitsToDouble"); if(m != null) idBitsToDouble = m.id; else {ErrorReporter.reporter.error(633); return;} m = Configuration.getOS().getSystemMethodByName(cls, "floatToBits"); if(m != null) idFloatToBits = m.id; else {ErrorReporter.reporter.error(633); return;} m = Configuration.getOS().getSystemMethodByName(cls, "bitsToFloat"); if(m != null) idBitsToFloat = m.id; else {ErrorReporter.reporter.error(633); return;} objectSize = Type.wktObject.objectSize; stringSize = Type.wktString.objectSize; int2floatConst1 = new StdConstant(HString.getRegisteredHString("int2floatConst1"), (double)(0x10000000000000L + 0x80000000L)); int2floatConst2 = new StdConstant(HString.getRegisteredHString("int2floatConst2"), (double)0x100000000L); int2floatConst3 = new StdConstant(HString.getRegisteredHString("int2floatConst3"), (double)0x10000000000000L); Linker32.globalConstantTable = null; Linker32.addGlobalConstant(int2floatConst1); Linker32.addGlobalConstant(int2floatConst2); Linker32.addGlobalConstant(int2floatConst3); Method.createCompSpecSubroutine("handleException"); final Class stringClass = (Class)Type.wktString; final Class heapClass = Configuration.getOS().heapClass; if ((stringClass != null) && (stringClass.methods != null)) { // check if string class is loaded at all stringNewstringMethod = (Method)stringClass.methods.getItemByName("newstring"); if(heapClass != null) { heapNewstringMethod = (Method)heapClass.methods.getItemByName("newstring"); } if(dbg) { if (stringNewstringMethod != null) StdStreams.vrb.println("stringNewstringMethod = " + stringNewstringMethod.name + stringNewstringMethod.methDescriptor); else StdStreams.vrb.println("stringNewstringMethod: not found"); if (heapNewstringMethod != null) StdStreams.vrb.println("heapNewstringMethod = " + heapNewstringMethod.name + heapNewstringMethod.methDescriptor); else StdStreams.vrb.println("heapNewstringMethod: not found"); } m = (Method)stringClass.methods; while (m != null) { if (m.name.equals(HString.getRegisteredHString("<init>"))) { if (m.methDescriptor.equals(HString.getRegisteredHString("([C)V"))) strInitC = m; else if (m.methDescriptor.equals(HString.getRegisteredHString("([CII)V"))) strInitCII = m; } m = (Method)m.next; } if(dbg) { if (strInitC != null) StdStreams.vrb.println("stringInitC = " + strInitC.name + strInitC.methDescriptor); else StdStreams.vrb.println("stringInitC: not found"); if (strInitCII != null) StdStreams.vrb.println("stringInitCII = " + strInitCII.name + strInitCII.methDescriptor); else StdStreams.vrb.println("stringInitCII: not found"); } m = (Method)stringClass.methods; while (m != null) { if (m.name.equals(HString.getRegisteredHString("allocateString"))) { if (m.methDescriptor.equals(HString.getRegisteredHString("(I[C)Ljava/lang/String;"))) strAllocC = m; else if (m.methDescriptor.equals(HString.getRegisteredHString("(I[CII)Ljava/lang/String;"))) strAllocCII = m; } m = (Method)m.next; } if(dbg) { if (strAllocC != null) StdStreams.vrb.println("allocateStringC = " + strAllocC.name + strAllocC.methDescriptor); else StdStreams.vrb.println("allocateStringC: not found"); if (strAllocCII != null) StdStreams.vrb.println("allocateStringCII = " + strAllocCII.name + strAllocCII.methDescriptor); else StdStreams.vrb.println("allocateStringCII: not found"); } } } }
/* * Copyright 2016 higherfrequencytrading.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.network.cluster; import net.openhft.chronicle.core.annotation.UsedViaReflection; import net.openhft.chronicle.core.io.IORuntimeException; import net.openhft.chronicle.core.threads.EventLoop; import net.openhft.chronicle.core.util.ThrowingFunction; import net.openhft.chronicle.network.*; import net.openhft.chronicle.network.cluster.handlers.UberHandler; import net.openhft.chronicle.network.cluster.handlers.UberHandler.Factory; import net.openhft.chronicle.network.connection.WireOutPublisher; import net.openhft.chronicle.wire.*; import org.jetbrains.annotations.NotNull; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; public abstract class ClusterContext implements Demarshallable, Marshallable, Consumer<HostDetails> { private transient Factory handlerFactory; private transient Function<WireType, WireOutPublisher> wireOutPublisherFactory; private transient Function<ClusterContext, NetworkContext> networkContextFactory; private transient Function<ClusterContext, WriteMarshallable> heartbeatFactory; private transient Function<ClusterContext, NetworkStatsListener> networkStatsListenerFactory; private transient Supplier<ConnectionManager> connectionEventHandler; private transient EventLoop eventLoop; private long heartbeatTimeoutMs = 40_000; private long heartbeatIntervalMs = 20_000; private ConnectionStrategy connectionStrategy; private WireType wireType; private String clusterName; private byte localIdentifier; private ServerThreadingStrategy serverThreadingStrategy; private long retryInterval = 1_000L; private String procPrefix; @UsedViaReflection protected ClusterContext(@NotNull WireIn wire) throws IORuntimeException { readMarshallable(wire); } protected ClusterContext() { defaults(); } public String procPrefix() { return procPrefix; } public void procPrefix(String procPrefix) { this.procPrefix = procPrefix; } @Override public void readMarshallable(@NotNull WireIn wire) throws IORuntimeException { defaults(); while (wire.bytes().readRemaining() > 0) { wire.consumePadding(); if (wire.bytes().readRemaining() > 0) wireParser().parseOne(wire); } } public Function<ClusterContext, NetworkStatsListener> networkStatsListenerFactory() { return networkStatsListenerFactory; } @NotNull public ClusterContext networkStatsListenerFactory(Function<ClusterContext, NetworkStatsListener> networkStatsListenerFactory) { this.networkStatsListenerFactory = networkStatsListenerFactory; return this; } public long heartbeatIntervalMs() { return heartbeatIntervalMs; } @NotNull public abstract ThrowingFunction<NetworkContext, TcpEventHandler, IOException> tcpEventHandlerFactory(); @NotNull protected WireParser wireParser() { @NotNull VanillaWireParser parser = new VanillaWireParser((s, v) -> { }, WireParser.SKIP_READABLE_BYTES); parser.register(() -> "wireType", (s, v) -> v.text(this, (o, x) -> this.wireType(WireType.valueOf(x)))); parser.register(() -> "handlerFactory", (s, v) -> this.handlerFactory(v.typedMarshallable())); parser.register(() -> "heartbeatTimeoutMs", (s, v) -> this.heartbeatTimeoutMs(v.int64())); parser.register(() -> "heartbeatIntervalMs", (s, v) -> this.heartbeatIntervalMs(v.int64())); parser.register(() -> "wireOutPublisherFactory", (s, v) -> this.wireOutPublisherFactory(v.typedMarshallable())); parser.register(() -> "networkContextFactory", (s, v) -> this.networkContextFactory(v.typedMarshallable())); parser.register(() -> "connectionStrategy", (s, v) -> this.connectionStrategy(v.typedMarshallable())); parser.register(() -> "connectionEventHandler", (s, v) -> this.connectionEventHandler(v.typedMarshallable())); parser.register(() -> "heartbeatFactory", (s, v) -> this.heartbeatFactory(v.typedMarshallable())); parser.register(() -> "networkStatsListenerFactory", (s, v) -> this.networkStatsListenerFactory(v.typedMarshallable())); parser.register(() -> "serverThreadingStrategy", (s, v) -> this.serverThreadingStrategy(v.asEnum(ServerThreadingStrategy.class))); return parser; } public void serverThreadingStrategy(ServerThreadingStrategy serverThreadingStrategy) { this.serverThreadingStrategy = serverThreadingStrategy; } public ServerThreadingStrategy serverThreadingStrategy() { return serverThreadingStrategy; } private UberHandler.Factory handlerFactory() { return handlerFactory; } public void handlerFactory(UberHandler.Factory handlerFactory) { this.handlerFactory = handlerFactory; } public void clusterName(String clusterName) { this.clusterName = clusterName; } public EventLoop eventLoop() { return eventLoop; } @NotNull public ClusterContext eventLoop(EventLoop eventLoop) { this.eventLoop = eventLoop; return this; } public void defaults() { } @NotNull public ClusterContext localIdentifier(byte localIdentifier) { this.localIdentifier = localIdentifier; return this; } @NotNull public ClusterContext wireType(WireType wireType) { this.wireType = wireType; return this; } @NotNull public ClusterContext heartbeatFactory(Function<ClusterContext, WriteMarshallable> heartbeatFactor) { this.heartbeatFactory = heartbeatFactor; return this; } @NotNull public ClusterContext heartbeatIntervalMs(long heartbeatIntervalMs) { this.heartbeatIntervalMs = heartbeatIntervalMs; return this; } @NotNull public ClusterContext heartbeatTimeoutMs(long heartbeatTimeoutMs) { this.heartbeatTimeoutMs = heartbeatTimeoutMs; return this; } @NotNull public ClusterContext wireOutPublisherFactory(Function<WireType, WireOutPublisher> wireOutPublisherFactory) { this.wireOutPublisherFactory = wireOutPublisherFactory; return this; } @NotNull public ClusterContext networkContextFactory(Function<ClusterContext, NetworkContext> networkContextFactory) { this.networkContextFactory = networkContextFactory; return this; } public WireType wireType() { return wireType; } public Function<WireType, WireOutPublisher> wireOutPublisherFactory() { return wireOutPublisherFactory; } public long heartbeatTimeoutMs() { return heartbeatTimeoutMs; } public String clusterName() { return clusterName; } public byte localIdentifier() { return localIdentifier; } public Function<ClusterContext, NetworkContext> networkContextFactory() { return networkContextFactory; } @NotNull public ClusterContext connectionStrategy(ConnectionStrategy connectionStrategy) { this.connectionStrategy = connectionStrategy; return this; } private ConnectionStrategy connectionStrategy() { return this.connectionStrategy; } private Supplier<ConnectionManager> connectionEventHandler() { return connectionEventHandler; } @NotNull public ClusterContext connectionEventHandler(Supplier<ConnectionManager> connectionEventHandler) { this.connectionEventHandler = connectionEventHandler; return this; } private Function<ClusterContext, WriteMarshallable> heartbeatFactory() { return heartbeatFactory; } @Override public void accept(@NotNull HostDetails hd) { if (this.localIdentifier == hd.hostId()) return; final ConnectionStrategy connectionStrategy = this.connectionStrategy(); hd.connectionStrategy(connectionStrategy); final ConnectionManager connectionManager = this .connectionEventHandler().get(); hd.connectionManager(connectionManager); @NotNull final HostConnector hostConnector = new HostConnector(this, new RemoteConnector(this.tcpEventHandlerFactory()), hd); hd.hostConnector(hostConnector); @NotNull ClusterNotifier clusterNotifier = new ClusterNotifier(connectionManager, hostConnector, bootstraps(hd)); hd.clusterNotifier(clusterNotifier); hd.terminationEventHandler(clusterNotifier); clusterNotifier.connect(); } @NotNull private List<WriteMarshallable> bootstraps(HostDetails hd) { final UberHandler.Factory handler = this.handlerFactory(); final Function<ClusterContext, WriteMarshallable> heartbeat = this.heartbeatFactory(); @NotNull ArrayList<WriteMarshallable> result = new ArrayList<>(); result.add(handler.apply(this, hd)); result.add(heartbeat.apply(this)); return result; } public long retryInterval() { return retryInterval; } public ClusterContext retryInterval(final long retryInterval) { this.retryInterval = retryInterval; return this; } }
/* * MIT License * * Copyright (c) 2021 Brandon Li * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ /*............................................................................................ . Copyright © 2021 Brandon Li . . . . Permission is hereby granted, free of charge, to any person obtaining a copy of this . . software and associated documentation files (the “Software”), to deal in the Software . . without restriction, including without limitation the rights to use, copy, modify, merge, . . publish, distribute, sublicense, and/or sell copies of the Software, and to permit . . persons to whom the Software is furnished to do so, subject to the following conditions: . . . . The above copyright notice and this permission notice shall be included in all copies . . or substantial portions of the Software. . . . . THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, . . EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF . . MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND . . NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS . . BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN . . ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN . . CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE . . SOFTWARE. . ............................................................................................*/ package io.github.pulsebeat02.ezmediacore.nms; import java.nio.ByteBuffer; import java.util.UUID; import org.bukkit.entity.Entity; import org.bukkit.entity.Player; import org.jetbrains.annotations.NotNull; public interface PacketHandler { void displayDebugMarker( final UUID[] viewers, final int x, final int y, final int z, final int color, final int time); void displayMaps( final UUID[] viewers, final int map, final int mapWidth, final int mapHeight, final ByteBuffer rgb, final int videoWidth, final int xOffset, final int yOffset); void displayMaps( final UUID[] viewers, final int map, final int mapWidth, final int mapHeight, final ByteBuffer rgb, final int videoWidth); void displayEntities( final UUID[] viewers, final Entity[] entities, final int[] data, final int width); void injectPlayer(@NotNull final Player player); void uninjectPlayer(@NotNull final Player player); boolean isMapRegistered(final int id); void unregisterMap(final int id); void registerMap(final int id); Object onPacketInterceptOut(final Player viewer, final Object packet); Object onPacketInterceptIn(final Player viewer, final Object packet); }
package com.ruoyi.system.service.impl; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.ruoyi.common.annotation.DataScope; import com.ruoyi.common.constant.UserConstants; import com.ruoyi.common.core.text.Convert; import com.ruoyi.common.exception.BusinessException; import com.ruoyi.common.utils.StringUtils; import com.ruoyi.common.utils.spring.SpringUtils; import com.ruoyi.system.domain.SysRole; import com.ruoyi.system.domain.SysRoleDept; import com.ruoyi.system.domain.SysRoleMenu; import com.ruoyi.system.domain.SysUserRole; import com.ruoyi.system.mapper.SysRoleDeptMapper; import com.ruoyi.system.mapper.SysRoleMapper; import com.ruoyi.system.mapper.SysRoleMenuMapper; import com.ruoyi.system.mapper.SysUserRoleMapper; import com.ruoyi.system.service.ISysRoleService; /** * 角色 业务层处理 * * @author ruoyi */ @Service public class SysRoleServiceImpl implements ISysRoleService { @Autowired private SysRoleMapper roleMapper; @Autowired private SysRoleMenuMapper roleMenuMapper; @Autowired private SysUserRoleMapper userRoleMapper; @Autowired private SysRoleDeptMapper roleDeptMapper; /** * 根据条件分页查询角色数据 * * @param role 角色信息 * @return 角色数据集合信息 */ @Override @DataScope(deptAlias = "d") public List<SysRole> selectRoleList(SysRole role) { return roleMapper.selectRoleList(role); } /** * 根据用户ID查询权限 * * @param userId 用户ID * @return 权限列表 */ @Override public Set<String> selectRoleKeys(Long userId) { List<SysRole> perms = roleMapper.selectRolesByUserId(userId); Set<String> permsSet = new HashSet<>(); for (SysRole perm : perms) { if (StringUtils.isNotNull(perm)) { permsSet.addAll(Arrays.asList(perm.getRoleKey().trim().split(","))); } } return permsSet; } /** * 根据用户ID查询角色 * * @param userId 用户ID * @return 角色列表 */ @Override public List<SysRole> selectRolesByUserId(Long userId) { List<SysRole> userRoles = roleMapper.selectRolesByUserId(userId); List<SysRole> roles = selectRoleAll(); for (SysRole role : roles) { for (SysRole userRole : userRoles) { if (role.getRoleId().longValue() == userRole.getRoleId().longValue()) { role.setFlag(true); break; } } } return roles; } /** * 查询所有角色 * * @return 角色列表 */ @Override public List<SysRole> selectRoleAll() { return SpringUtils.getAopProxy(this).selectRoleList(new SysRole()); } /** * 通过角色ID查询角色 * * @param roleId 角色ID * @return 角色对象信息 */ @Override public SysRole selectRoleById(Long roleId) { return roleMapper.selectRoleById(roleId); } /** * 通过角色ID删除角色 * * @param roleId 角色ID * @return 结果 */ @Override public boolean deleteRoleById(Long roleId) { return roleMapper.deleteRoleById(roleId) > 0 ? true : false; } /** * 批量删除角色信息 * * @param ids 需要删除的数据ID * @throws Exception */ @Override public int deleteRoleByIds(String ids) throws BusinessException { Long[] roleIds = Convert.toLongArray(ids); for (Long roleId : roleIds) { checkRoleAllowed(new SysRole(roleId)); SysRole role = selectRoleById(roleId); if (countUserRoleByRoleId(roleId) > 0) { throw new BusinessException(String.format("%1$s已分配,不能删除", role.getRoleName())); } } return roleMapper.deleteRoleByIds(roleIds); } /** * 新增保存角色信息 * * @param role 角色信息 * @return 结果 */ @Override @Transactional public int insertRole(SysRole role) { // 新增角色信息 roleMapper.insertRole(role); return insertRoleMenu(role); } /** * 修改保存角色信息 * * @param role 角色信息 * @return 结果 */ @Override @Transactional public int updateRole(SysRole role) { // 修改角色信息 roleMapper.updateRole(role); // 删除角色与菜单关联 roleMenuMapper.deleteRoleMenuByRoleId(role.getRoleId()); return insertRoleMenu(role); } /** * 修改数据权限信息 * * @param role 角色信息 * @return 结果 */ @Override @Transactional public int authDataScope(SysRole role) { // 修改角色信息 roleMapper.updateRole(role); // 删除角色与部门关联 roleDeptMapper.deleteRoleDeptByRoleId(role.getRoleId()); // 新增角色和部门信息(数据权限) return insertRoleDept(role); } /** * 新增角色菜单信息 * * @param role 角色对象 */ public int insertRoleMenu(SysRole role) { int rows = 1; // 新增用户与角色管理 List<SysRoleMenu> list = new ArrayList<SysRoleMenu>(); for (Long menuId : role.getMenuIds()) { SysRoleMenu rm = new SysRoleMenu(); rm.setRoleId(role.getRoleId()); rm.setMenuId(menuId); list.add(rm); } if (list.size() > 0) { rows = roleMenuMapper.batchRoleMenu(list); } return rows; } /** * 新增角色部门信息(数据权限) * * @param role 角色对象 */ public int insertRoleDept(SysRole role) { int rows = 1; // 新增角色与部门(数据权限)管理 List<SysRoleDept> list = new ArrayList<SysRoleDept>(); for (Long deptId : role.getDeptIds()) { SysRoleDept rd = new SysRoleDept(); rd.setRoleId(role.getRoleId()); rd.setDeptId(deptId); list.add(rd); } if (list.size() > 0) { rows = roleDeptMapper.batchRoleDept(list); } return rows; } /** * 校验角色名称是否唯一 * * @param role 角色信息 * @return 结果 */ @Override public String checkRoleNameUnique(SysRole role) { Long roleId = StringUtils.isNull(role.getRoleId()) ? -1L : role.getRoleId(); SysRole info = roleMapper.checkRoleNameUnique(role.getRoleName()); if (StringUtils.isNotNull(info) && info.getRoleId().longValue() != roleId.longValue()) { return UserConstants.ROLE_NAME_NOT_UNIQUE; } return UserConstants.ROLE_NAME_UNIQUE; } /** * 校验角色权限是否唯一 * * @param role 角色信息 * @return 结果 */ @Override public String checkRoleKeyUnique(SysRole role) { Long roleId = StringUtils.isNull(role.getRoleId()) ? -1L : role.getRoleId(); SysRole info = roleMapper.checkRoleKeyUnique(role.getRoleKey()); if (StringUtils.isNotNull(info) && info.getRoleId().longValue() != roleId.longValue()) { return UserConstants.ROLE_KEY_NOT_UNIQUE; } return UserConstants.ROLE_KEY_UNIQUE; } /** * 校验角色是否允许操作 * * @param role 角色信息 */ @Override public void checkRoleAllowed(SysRole role) { if (StringUtils.isNotNull(role.getRoleId()) && role.isAdmin()) { throw new BusinessException("不允许操作超级管理员角色"); } } /** * 通过角色ID查询角色使用数量 * * @param roleId 角色ID * @return 结果 */ @Override public int countUserRoleByRoleId(Long roleId) { return userRoleMapper.countUserRoleByRoleId(roleId); } /** * 角色状态修改 * * @param role 角色信息 * @return 结果 */ @Override public int changeStatus(SysRole role) { return roleMapper.updateRole(role); } /** * 取消授权用户角色 * * @param userRole 用户和角色关联信息 * @return 结果 */ @Override public int deleteAuthUser(SysUserRole userRole) { return userRoleMapper.deleteUserRoleInfo(userRole); } /** * 批量取消授权用户角色 * * @param roleId 角色ID * @param userIds 需要删除的用户数据ID * @return 结果 */ @Override public int deleteAuthUsers(Long roleId, String userIds) { return userRoleMapper.deleteUserRoleInfos(roleId, Convert.toLongArray(userIds)); } /** * 批量选择授权用户角色 * * @param roleId 角色ID * @param userIds 需要删除的用户数据ID * @return 结果 */ @Override public int insertAuthUsers(Long roleId, String userIds) { Long[] users = Convert.toLongArray(userIds); // 新增用户与角色管理 List<SysUserRole> list = new ArrayList<SysUserRole>(); for (Long userId : users) { SysUserRole ur = new SysUserRole(); ur.setUserId(userId); ur.setRoleId(roleId); list.add(ur); } return userRoleMapper.batchUserRole(list); } }
/* * Copyright 2002-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.format; import org.springframework.core.convert.converter.ConverterRegistry; import java.lang.annotation.Annotation; /** * A registry of field formatting logic. * * @author Keith Donald * @author Juergen Hoeller * @since 3.0 */ public interface FormatterRegistry extends ConverterRegistry { /** * Adds a Formatter to format fields of a specific type. * The field type is implied by the parameterized Formatter instance. * * @param formatter the formatter to add * @see #addFormatterForFieldType(Class, Formatter) * @since 3.1 */ void addFormatter(Formatter<?> formatter); /** * Adds a Formatter to format fields of the given type. * <p>On print, if the Formatter's type T is declared and {@code fieldType} is not assignable to T, * a coercion to T will be attempted before delegating to {@code formatter} to print a field value. * On parse, if the parsed object returned by {@code formatter} is not assignable to the runtime field type, * a coercion to the field type will be attempted before returning the parsed field value. * * @param fieldType the field type to format * @param formatter the formatter to add */ void addFormatterForFieldType(Class<?> fieldType, Formatter<?> formatter); /** * Adds a Printer/Parser pair to format fields of a specific type. * The formatter will delegate to the specified {@code printer} for printing * and the specified {@code parser} for parsing. * <p>On print, if the Printer's type T is declared and {@code fieldType} is not assignable to T, * a coercion to T will be attempted before delegating to {@code printer} to print a field value. * On parse, if the object returned by the Parser is not assignable to the runtime field type, * a coercion to the field type will be attempted before returning the parsed field value. * * @param fieldType the field type to format * @param printer the printing part of the formatter * @param parser the parsing part of the formatter */ void addFormatterForFieldType(Class<?> fieldType, Printer<?> printer, Parser<?> parser); /** * Adds a Formatter to format fields annotated with a specific format annotation. * * @param annotationFormatterFactory the annotation formatter factory to add */ void addFormatterForFieldAnnotation(AnnotationFormatterFactory<? extends Annotation> annotationFormatterFactory); }
package me.alex.hackathon.pages; import java.io.IOException; import me.alex.hackathon.database.Database; import me.alex.hackathon.database.Post; public class Vote extends Page { @Override public String getURL() { return "/vote"; } @Override public String generateResponse(String body) { long postId = Long.valueOf(body.split(",")[0]); long newState = Long.valueOf(body.split(",")[1]); for (Post post : Database.getAllPosts()) { if (post.id == postId) { long currState = post.voteState; if (currState == 0 && newState == 1) { post.numUpvotes++; } else if (currState == 0 && newState == 2) { post.numDownvotes++; } else if (currState == 1 && newState == 2) { post.numUpvotes--; post.numDownvotes++; } else if (currState == 2 && newState == 1) { post.numDownvotes--; post.numUpvotes++; } else if (currState == 1 && newState == 0) { post.numUpvotes--; } else if (currState == 2 && newState == 0) { post.numDownvotes--; } post.voteState = newState; try { Database.saveAllPosts(); } catch (IOException e) { e.printStackTrace(); } return "{\"voted\": true}"; } } return "{\"error\": \"Post does not exist\"}"; } }
package org.innovateuk.ifs.assessment.workflow.actions; import org.innovateuk.ifs.assessment.domain.Assessment; import org.innovateuk.ifs.assessment.domain.AssessorFormInputResponse; import org.innovateuk.ifs.assessment.domain.AverageAssessorScore; import org.innovateuk.ifs.assessment.repository.AssessorFormInputResponseRepository; import org.innovateuk.ifs.assessment.repository.AverageAssessorScoreRepository; import org.innovateuk.ifs.assessment.resource.AssessmentEvent; import org.innovateuk.ifs.assessment.resource.AssessmentState; import org.innovateuk.ifs.assessment.workflow.configuration.AssessmentWorkflow; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.statemachine.StateContext; import org.springframework.stereotype.Component; import java.math.BigDecimal; import java.util.List; import java.util.Optional; import static org.innovateuk.ifs.form.resource.FormInputType.ASSESSOR_SCORE; /** * The {@code SubmitAction} is used by the assessor. It handles the submit event * for an application after assessment and during funding decision notifications. * For more info see {@link AssessmentWorkflow} */ @Component public class SubmitAction extends BaseAssessmentAction { @Autowired private AssessorFormInputResponseRepository assessorFormInputResponseRepository; @Autowired private AverageAssessorScoreRepository averageAssessorScoreRepository; @Override protected void doExecute(Assessment assessment, StateContext<AssessmentState, AssessmentEvent> context) { List<AssessorFormInputResponse> responses = assessorFormInputResponseRepository.findByAssessmentTargetId(assessment.getTarget().getId()); BigDecimal percentage = getAveragePercentage(responses); Optional<AverageAssessorScore> averageAssessorScore = averageAssessorScoreRepository.findByApplicationId(assessment.getTarget().getId()); if (averageAssessorScore.isPresent()) { averageAssessorScore.get().setScore(percentage); } else { averageAssessorScoreRepository.save(new AverageAssessorScore(assessment.getTarget(), percentage)); } } private BigDecimal getAveragePercentage(List<AssessorFormInputResponse> responses) { return BigDecimal.valueOf(responses.stream() .filter(input -> input.getFormInput().getType() == ASSESSOR_SCORE) .filter(response -> response.getValue() != null) .mapToDouble(value -> (Double.parseDouble(value.getValue()) / value.getFormInput().getQuestion().getAssessorMaximumScore()) * 100.0) .average() .orElse(0.0)).setScale(1, BigDecimal.ROUND_HALF_UP); } }
package com.baeldung.web.controller.handlermapping; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.mvc.AbstractController; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; public class BeanNameHandlerMappingController extends AbstractController { @Override protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) throws Exception { ModelAndView model = new ModelAndView("bean-name-handler-mapping"); return model; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.search.basic; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import static org.elasticsearch.client.Requests.clusterHealthRequest; import static org.elasticsearch.client.Requests.refreshRequest; import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class TransportSearchFailuresIT extends ESIntegTestCase { @Override protected int maximumNumberOfReplicas() { return 1; } public void testFailedSearchWithWrongQuery() throws Exception { logger.info("Start Testing failed search with wrong query"); assertAcked(prepareCreate("test", 1).setMapping("foo", "type=geo_point")); NumShards test = getNumShards("test"); for (int i = 0; i < 100; i++) { index(client(), Integer.toString(i), "test", i); } RefreshResponse refreshResponse = client().admin().indices().refresh(refreshRequest("test")).actionGet(); assertThat(refreshResponse.getTotalShards(), equalTo(test.totalNumShards)); assertThat(refreshResponse.getSuccessfulShards(), equalTo(test.numPrimaries)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); for (int i = 0; i < 5; i++) { try { SearchResponse searchResponse = client().search( searchRequest("test").source(new SearchSourceBuilder().query(new MatchQueryBuilder("foo", "biz"))) ).actionGet(); assertThat(searchResponse.getTotalShards(), equalTo(test.numPrimaries)); assertThat(searchResponse.getSuccessfulShards(), equalTo(0)); assertThat(searchResponse.getFailedShards(), equalTo(test.numPrimaries)); fail("search should fail"); } catch (ElasticsearchException e) { assertThat(e.unwrapCause(), instanceOf(SearchPhaseExecutionException.class)); // all is well } } allowNodes("test", 2); assertThat( client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes(">=2").get().isTimedOut(), equalTo(false) ); logger.info("Running Cluster Health"); ClusterHealthResponse clusterHealth = client().admin() .cluster() .health( clusterHealthRequest("test").waitForYellowStatus() .waitForNoRelocatingShards(true) .waitForEvents(Priority.LANGUID) .waitForActiveShards(test.totalNumShards) ) .actionGet(); logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), anyOf(equalTo(ClusterHealthStatus.YELLOW), equalTo(ClusterHealthStatus.GREEN))); assertThat(clusterHealth.getActiveShards(), equalTo(test.totalNumShards)); refreshResponse = client().admin().indices().refresh(refreshRequest("test")).actionGet(); assertThat(refreshResponse.getTotalShards(), equalTo(test.totalNumShards)); assertThat(refreshResponse.getSuccessfulShards(), equalTo(test.totalNumShards)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); for (int i = 0; i < 5; i++) { try { SearchResponse searchResponse = client().search( searchRequest("test").source(new SearchSourceBuilder().query(new MatchQueryBuilder("foo", "biz"))) ).actionGet(); assertThat(searchResponse.getTotalShards(), equalTo(test.numPrimaries)); assertThat(searchResponse.getSuccessfulShards(), equalTo(0)); assertThat(searchResponse.getFailedShards(), equalTo(test.numPrimaries)); fail("search should fail"); } catch (ElasticsearchException e) { assertThat(e.unwrapCause(), instanceOf(SearchPhaseExecutionException.class)); // all is well } } logger.info("Done Testing failed search"); } private void index(Client client, String id, String nameValue, int age) throws IOException { client.index(Requests.indexRequest("test").id(id).source(source(id, nameValue, age))).actionGet(); } private XContentBuilder source(String id, String nameValue, int age) throws IOException { StringBuilder multi = new StringBuilder().append(nameValue); for (int i = 0; i < age; i++) { multi.append(" ").append(nameValue); } return jsonBuilder().startObject() .field("id", id) .field("name", nameValue + id) .field("age", age) .field("multi", multi.toString()) .endObject(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.maoo.indexer.client; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; public class AlfrescoResponse { private final long lastTransactionId; private final long lastAclChangesetId; private final String storeId; private final String storeProtocol; private final Iterable<Map<String, Object>> documents; public AlfrescoResponse(long lastTransactionId, long lastAclChangesetId, String storeId, String storeProtocol, Iterable<Map<String, Object>> documents) { this.lastTransactionId = lastTransactionId; this.lastAclChangesetId = lastAclChangesetId; this.storeId = storeId; this.storeProtocol = storeProtocol; this.documents = documents; } public AlfrescoResponse(long lastTransactionId, long lastAclChangesetId) { this(lastTransactionId, lastAclChangesetId, "", "", Collections.<Map<String, Object>>emptyList()); } public long getLastTransactionId() { return lastTransactionId; } public long getLastAclChangesetId() { return lastAclChangesetId; } public String getStoreId() { return storeId; } public String getStoreProtocol() { return storeProtocol; } public Iterable<Map<String,Object>> getDocuments() { return documents; } public List<Map<String, Object>> getDocumentList() { List<Map<String, Object>> list = new ArrayList<Map<String, Object>>(); for (Map<String, Object> m : documents) { list.add(m); } return list; } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package scjp.collections; /** * * Adrian Lapierre <adrian@softproject.com.pl> */ public class TestInteger { public static void main(String[] args) { Integer i1 = 127; Integer i2 = 127; System.out.println(i1 == i2); } }
package com.sun.corba.se.spi.activation; /** * com/sun/corba/se/spi/activation/ServerAlreadyInstalledHolder.java . * Generated by the IDL-to-Java compiler (portable), version "3.2" * from c:/ade/jenkins/workspace/8-2-build-windows-amd64-cygwin/jdk8u241/331/corba/src/share/classes/com/sun/corba/se/spi/activation/activation.idl * Wednesday, December 11, 2019 10:23:05 AM UTC */ public final class ServerAlreadyInstalledHolder implements org.omg.CORBA.portable.Streamable { public com.sun.corba.se.spi.activation.ServerAlreadyInstalled value = null; public ServerAlreadyInstalledHolder () { } public ServerAlreadyInstalledHolder (com.sun.corba.se.spi.activation.ServerAlreadyInstalled initialValue) { value = initialValue; } public void _read (org.omg.CORBA.portable.InputStream i) { value = com.sun.corba.se.spi.activation.ServerAlreadyInstalledHelper.read (i); } public void _write (org.omg.CORBA.portable.OutputStream o) { com.sun.corba.se.spi.activation.ServerAlreadyInstalledHelper.write (o, value); } public org.omg.CORBA.TypeCode _type () { return com.sun.corba.se.spi.activation.ServerAlreadyInstalledHelper.type (); } }
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.jdbc; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.boot.context.properties.source.InvalidConfigurationPropertyValueException; import org.springframework.boot.jdbc.DataSourceBuilder; import org.springframework.boot.jdbc.DataSourceInitializationMode; import org.springframework.boot.jdbc.EmbeddedDatabaseConnection; import org.springframework.core.io.DefaultResourceLoader; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.jdbc.config.SortedResourcesFactoryBean; import org.springframework.jdbc.datasource.init.DatabasePopulatorUtils; import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; import org.springframework.util.StringUtils; /** * Initialize a {@link DataSource} based on a matching {@link DataSourceProperties} * config. * * @author Dave Syer * @author Phillip Webb * @author Eddú Meléndez * @author Stephane Nicoll * @author Kazuki Shimizu */ class DataSourceInitializer { private static final Log logger = LogFactory.getLog(DataSourceInitializer.class); private final DataSource dataSource; private final DataSourceProperties properties; private final ResourceLoader resourceLoader; /** * Create a new instance with the {@link DataSource} to initialize and its matching * {@link DataSourceProperties configuration}. * @param dataSource the datasource to initialize * @param properties the matching configuration * @param resourceLoader the resource loader to use (can be null) */ DataSourceInitializer(DataSource dataSource, DataSourceProperties properties, ResourceLoader resourceLoader) { this.dataSource = dataSource; this.properties = properties; this.resourceLoader = (resourceLoader != null) ? resourceLoader : new DefaultResourceLoader(); } /** * Create a new instance with the {@link DataSource} to initialize and its matching * {@link DataSourceProperties configuration}. * @param dataSource the datasource to initialize * @param properties the matching configuration */ DataSourceInitializer(DataSource dataSource, DataSourceProperties properties) { this(dataSource, properties, null); } public DataSource getDataSource() { return this.dataSource; } /** * Create the schema if necessary. * @return {@code true} if the schema was created * @see DataSourceProperties#getSchema() */ public boolean createSchema() { List<Resource> scripts = getScripts("spring.datasource.schema", this.properties.getSchema(), "schema"); if (!scripts.isEmpty()) { if (!isEnabled()) { logger.debug("Initialization disabled (not running DDL scripts)"); return false; } String username = this.properties.getSchemaUsername(); String password = this.properties.getSchemaPassword(); runScripts(scripts, username, password); } return !scripts.isEmpty(); } /** * Initialize the schema if necessary. * @see DataSourceProperties#getData() */ public void initSchema() { List<Resource> scripts = getScripts("spring.datasource.data", this.properties.getData(), "data"); if (!scripts.isEmpty()) { if (!isEnabled()) { logger.debug("Initialization disabled (not running data scripts)"); return; } String username = this.properties.getDataUsername(); String password = this.properties.getDataPassword(); runScripts(scripts, username, password); } } private boolean isEnabled() { DataSourceInitializationMode mode = this.properties.getInitializationMode(); if (mode == DataSourceInitializationMode.NEVER) { return false; } if (mode == DataSourceInitializationMode.EMBEDDED && !isEmbedded()) { return false; } return true; } private boolean isEmbedded() { try { return EmbeddedDatabaseConnection.isEmbedded(this.dataSource); } catch (Exception ex) { logger.debug("Could not determine if datasource is embedded", ex); return false; } } private List<Resource> getScripts(String propertyName, List<String> resources, String fallback) { if (resources != null) { return getResources(propertyName, resources, true); } String platform = this.properties.getPlatform(); List<String> fallbackResources = new ArrayList<>(); fallbackResources.add("classpath*:" + fallback + "-" + platform + ".sql"); fallbackResources.add("classpath*:" + fallback + ".sql"); return getResources(propertyName, fallbackResources, false); } private List<Resource> getResources(String propertyName, List<String> locations, boolean validate) { List<Resource> resources = new ArrayList<>(); for (String location : locations) { for (Resource resource : doGetResources(location)) { if (resource.exists()) { resources.add(resource); } else if (validate) { throw new InvalidConfigurationPropertyValueException(propertyName, resource, "The specified resource does not exist."); } } } return resources; } private Resource[] doGetResources(String location) { try { SortedResourcesFactoryBean factory = new SortedResourcesFactoryBean( this.resourceLoader, Collections.singletonList(location)); factory.afterPropertiesSet(); return factory.getObject(); } catch (Exception ex) { throw new IllegalStateException("Unable to load resources from " + location, ex); } } private void runScripts(List<Resource> resources, String username, String password) { if (resources.isEmpty()) { return; } ResourceDatabasePopulator populator = new ResourceDatabasePopulator(); populator.setContinueOnError(this.properties.isContinueOnError()); populator.setSeparator(this.properties.getSeparator()); if (this.properties.getSqlScriptEncoding() != null) { populator.setSqlScriptEncoding(this.properties.getSqlScriptEncoding().name()); } for (Resource resource : resources) { populator.addScript(resource); } DataSource dataSource = this.dataSource; if (StringUtils.hasText(username) && StringUtils.hasText(password)) { dataSource = DataSourceBuilder.create(this.properties.getClassLoader()) .driverClassName(this.properties.determineDriverClassName()) .url(this.properties.determineUrl()).username(username) .password(password).build(); } DatabasePopulatorUtils.execute(populator, dataSource); } }
package tk.martijn_heil.nincore.api.events; import tk.martijn_heil.nincore.api.ArmorType; import org.bukkit.Material; import org.bukkit.entity.Player; import org.bukkit.event.Cancellable; import org.bukkit.event.HandlerList; import org.bukkit.event.player.PlayerEvent; import org.bukkit.inventory.ItemStack; /** * @author Borlea * https://github.com/borlea/ * http://codingforcookies.com/ * @since Jul 30, 2015 */ public final class ArmorEquipEvent extends PlayerEvent implements Cancellable { private static final HandlerList handlers = new HandlerList(); private boolean cancel = false; private final EquipMethod equipType; private final ArmorType type; private ItemStack oldArmorPiece, newArmorPiece; /** * Constructor for the ArmorEquipEvent. * * @param player The player who put on / removed the armor. * @param type The ArmorType of the armor added * @param oldArmorPiece The ItemStack of the armor removed. * @param newArmorPiece The ItemStack of the armor added. */ public ArmorEquipEvent(final Player player, final EquipMethod equipType, final ArmorType type, final ItemStack oldArmorPiece, final ItemStack newArmorPiece) { super(player); this.equipType = equipType; this.type = type; this.oldArmorPiece = oldArmorPiece; this.newArmorPiece = newArmorPiece; } /** * Gets a list of handlers handling this event. * * @return A list of handlers handling this event. */ public final static HandlerList getHandlerList() { return handlers; } /** * Gets a list of handlers handling this event. * * @return A list of handlers handling this event. */ @Override public final HandlerList getHandlers() { return handlers; } /** * Sets if this event should be cancelled. * * @param cancel If this event should be cancelled. */ public final void setCancelled(final boolean cancel) { this.cancel = cancel; } /** * Gets if this event is cancelled. * * @return If this event is cancelled */ public final boolean isCancelled() { return cancel; } public final ArmorType getType() { return type; } /** * Returns the last equipped armor piece, could be a piece of armor, {@link Material#AIR}, or null. */ public final ItemStack getOldArmorPiece() { return oldArmorPiece; } public final void setOldArmorPiece(final ItemStack oldArmorPiece) { this.oldArmorPiece = oldArmorPiece; } /** * Returns the newly equipped armor, could be a piece of armor, {@link Material#AIR}, or null. */ public final ItemStack getNewArmorPiece() { return newArmorPiece; } public final void setNewArmorPiece(final ItemStack newArmorPiece) { this.newArmorPiece = newArmorPiece; } /** * Gets the method used to either equip or unequip an armor piece. */ public EquipMethod getMethod() { return equipType; } public enum EquipMethod { /** * When you shift click an armor piece to equip or unequip */ SHIFT_CLICK, /** * When you drag and drop the item to equip or unequip */ DRAG, /** * When you right click an armor piece in the hotbar without the inventory open to equip. */ HOTBAR, /** * When you press the hotbar slot number while hovering over the armor slot to equip or unequip */ HOTBAR_SWAP, /** * When in range of a dispenser that shoots an armor piece to equip. */ DISPENSER, /** * When an armor piece breaks to unequip */ BROKE, /** * When you die causing all armor to unequip */ DEATH } }
package org.dhis2.usescases.about; import org.hisp.dhis.android.core.user.UserCredentials; /** * QUADRAM. Created by ppajuelo on 05/07/2018. */ public class AboutContracts { public interface AboutView{ void renderUserCredentials(UserCredentials userCredentialsModel); void renderServerUrl(String serverUrl); } public interface AboutPresenter { void init(AboutView aboutFragment); void onPause(); } }
package org.bian.dto; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import javax.validation.Valid; /** * BQDemographicsUpdateInputModelDemographicsInstanceRecord */ public class BQDemographicsUpdateInputModelDemographicsInstanceRecord { private String socioEconomicClassification = null; private String ethnicityReligion = null; private String employment = null; private String employmentHistory = null; private String educationHistory = null; private String servicingConstraints = null; /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: A bank defined classification of the socio-economic rating and prospect * @return socioEconomicClassification **/ public String getSocioEconomicClassification() { return socioEconomicClassification; } public void setSocioEconomicClassification(String socioEconomicClassification) { this.socioEconomicClassification = socioEconomicClassification; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: Where needed to ensure the bank's handling of the relationship follows legal and regulatory requirements * @return ethnicityReligion **/ public String getEthnicityReligion() { return ethnicityReligion; } public void setEthnicityReligion(String ethnicityReligion) { this.ethnicityReligion = ethnicityReligion; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: Reference to the customer's employer (company name) * @return employment **/ public String getEmployment() { return employment; } public void setEmployment(String employment) { this.employment = employment; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: Details of the customer's employment history (e.g. employer, dates, job position, salary) * @return employmentHistory **/ public String getEmploymentHistory() { return employmentHistory; } public void setEmploymentHistory(String employmentHistory) { this.employmentHistory = employmentHistory; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: Customer's education (e.g. academic institutions attended, duration, qualifications and certifications) * @return educationHistory **/ public String getEducationHistory() { return educationHistory; } public void setEducationHistory(String educationHistory) { this.educationHistory = educationHistory; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: Description of any known constraints on servicing the customer (sight, hearing, religious considerations) * @return servicingConstraints **/ public String getServicingConstraints() { return servicingConstraints; } public void setServicingConstraints(String servicingConstraints) { this.servicingConstraints = servicingConstraints; } }
package com.karogath.enhancedvanilla.item; import net.minecraftforge.registries.ObjectHolder; import net.minecraft.item.ItemStack; import net.minecraft.item.ItemGroup; import net.minecraft.item.Item; import net.minecraft.block.BlockState; import com.karogath.enhancedvanilla.EnhancedvanillaModElements; @EnhancedvanillaModElements.ModElement.Tag public class GlowstoneShardItem extends EnhancedvanillaModElements.ModElement { @ObjectHolder("enhancedvanilla:glowstoneshard") public static final Item block = null; public GlowstoneShardItem(EnhancedvanillaModElements instance) { super(instance, 55); } @Override public void initElements() { elements.items.add(() -> new ItemCustom()); } public static class ItemCustom extends Item { public ItemCustom() { super(new Item.Properties().group(ItemGroup.MISC).maxStackSize(64)); setRegistryName("glowstoneshard"); } @Override public int getItemEnchantability() { return 0; } @Override public int getUseDuration(ItemStack itemstack) { return 0; } @Override public float getDestroySpeed(ItemStack par1ItemStack, BlockState par2Block) { return 1F; } } }
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.lib.io; import com.streamsets.pipeline.api.impl.Utils; import com.streamsets.pipeline.config.PostProcessingOptions; import com.streamsets.pipeline.lib.util.ThreadUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Closeable; import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * The <code>MultiFileReader</code> is a Reader that allows to read multiple files in a 'tail -f' mode while * keeping track of the current offsets and detecting if the files has been renamed. * <p/> * It builds on top of the {@link SingleLineLiveFileReader} adding support for reading data from multiple files in different * directories. * <p/> * Directories are read in round-robin fashion to avoid starvation. * <p/> * The usage pattern is: * <p/> * <pre> * offsetMap = .... * reader.setOffsets(offsetMap); * chunk = reader.next(timeoutInMillis); * if (chunk != null) { * .... * } * offsetMap = reader.getOffsets(); * </pre> * <p/> * The offsetMap must be kept/persisted by the caller to ensure current offsets are not lost. */ public class MultiFileReader implements Closeable { private static final Logger LOG = LoggerFactory.getLogger(MultiFileReader.class); private final static long MAX_YIELD_TIME = Integer.parseInt(System.getProperty("MultiFileReader.yield.ms", "500")); private final FileContextProvider fileContextProvider; private final List<FileEvent> events; private boolean open; private boolean inPreviewMode; /** * Creates a <code>MultiFileReader</code> that will scan/read multiple directories for data. * * @param fileInfos a list with the information for for each directory to scan/read. * @param charset the data charset (for all files) * @param maxLineLength the maximum line length (for all files) * @throws IOException thrown if there was an IO error while creating the reader. */ public MultiFileReader( List<MultiFileInfo> fileInfos, Charset charset, int maxLineLength, PostProcessingOptions postProcessing, String archiveDir, boolean globbing, int scanIntervalSecs, boolean allowForLateDirectoryCreation, boolean inPreviewMode ) throws IOException { Utils.checkNotNull(fileInfos, "fileInfos"); Utils.checkArgument(!fileInfos.isEmpty(), "fileInfos cannot be empty"); Utils.checkNotNull(charset, "charset"); Utils.checkArgument(maxLineLength > 1, "maxLineLength must be greater than one"); Utils.checkNotNull(postProcessing, "postProcessing"); Utils.checkArgument( postProcessing != PostProcessingOptions.ARCHIVE || (archiveDir != null && !archiveDir.isEmpty()), "archiveDir cannot be empty if postProcessing is ARCHIVE"); archiveDir = (postProcessing == PostProcessingOptions.ARCHIVE) ? archiveDir : null; this.inPreviewMode = inPreviewMode; events = new ArrayList<>(fileInfos.size() * 2); FileEventPublisher eventPublisher = new FileEventPublisher() { @Override public void publish(FileEvent event) { events.add(event); } }; //We assume ExactFileContextProvider has fileInfo which are exact and present. //We are using GlobFileInfo during FileTailSource which will allow for late Directory creation/ supports wild cards. fileContextProvider = (globbing)? new GlobFileContextProvider( allowForLateDirectoryCreation, fileInfos, scanIntervalSecs, charset, maxLineLength, postProcessing, archiveDir, eventPublisher, inPreviewMode ) : new ExactFileContextProvider( fileInfos, charset, maxLineLength, postProcessing, archiveDir, eventPublisher, inPreviewMode ); open = true; } /** * Sets the file offsets to use for the next read. To work correctly, the last return offsets should be used or * an empty <code>Map</code> if there is none. * <p/> * If a reader is already live, the corresponding set offset is ignored as we cache all the contextual information * of live readers. * * @param offsets directory offsets. * @throws IOException thrown if there was an IO error while preparing file offsets. */ public void setOffsets(Map<String, String> offsets) throws IOException { Utils.checkState(open, "Not open"); fileContextProvider.setOffsets(offsets); // we reset the events on every setOffsets(). events.clear(); } /** * Purge invalid file entries. */ public void purge() { fileContextProvider.purge(); } /** * Returns the current file offsets. The returned offsets should be set before the next read. * * @return the current file offsets. * @throws IOException thrown if there was an IO error while preparing file offsets. */ public Map<String, String> getOffsets() throws IOException { Utils.checkState(open, "Not open"); return fileContextProvider.getOffsets(); } /** * Returns all file events (start and end) since the last {@link #setOffsets(java.util.Map)} call. * * @return all files events. */ public List<FileEvent> getEvents() { return events; } // if we are in timeout private boolean isTimeout(long startTime ,long maxWaitTimeMillis) { return (System.currentTimeMillis() - startTime) > maxWaitTimeMillis; } // remaining time till timeout, return zero if already in timeout private long getRemainingWaitTime(long startTime, long maxWaitTimeMillis) { long remaining = maxWaitTimeMillis - (System.currentTimeMillis() - startTime); return (remaining > 0) ? remaining : 0; } /** * Reads the next {@link LiveFileChunk} from the directories waiting the specified time for one. * * @param waitMillis number of milliseconds to block waiting for a chunk. * @return the next chunk, or <code>null</code> if there is no next chunk and the waiting time passed. */ public LiveFileChunk next(long waitMillis) { Utils.checkState(open, "Not open"); waitMillis = (waitMillis > 0) ? waitMillis : 0; long startTime = System.currentTimeMillis(); LiveFileChunk chunk = null; boolean exit = false; fileContextProvider.startNewLoop(); while (!exit) { if (!fileContextProvider.didFullLoop()) { FileContext fileContext = fileContextProvider.next(); try { LiveFileReader reader = fileContext.getReader(); if (reader != null) { if (reader.hasNext()) { chunk = reader.next(0); if (LOG.isTraceEnabled()) { LOG.trace("next(): directory '{}', file '{}', offset '{}' got data '{}'", fileContext.getMultiFileInfo().getFileFullPath(), reader.getLiveFile(), reader.getOffset(), chunk != null); } } else { if (LOG.isTraceEnabled()) { LOG.trace("next(): directory '{}', file '{}', offset '{}' EOF reached", fileContext.getMultiFileInfo().getFileFullPath(), reader.getLiveFile(), reader.getOffset()); } } fileContext.releaseReader(false); } else { if (LOG.isTraceEnabled()) { LOG.trace("next(): directory '{}', no reader available", fileContext.getMultiFileInfo().getFileFullPath()); } } } catch (IOException ex) { LOG.error("Error while reading file: {}", ex.toString(), ex); try { fileContext.releaseReader(true); } catch (IOException ex1) { LOG.warn("Error while releasing reader in error: {}", ex1.toString(), ex1); } } } // check exit conditions (we have a chunk, or we timed-out waitMillis) exit = chunk != null; if (!exit) { // if we looped thru all dir contexts in this call we yield CPU if (fileContextProvider.didFullLoop()) { exit = isTimeout(startTime, waitMillis); if (!exit && LOG.isTraceEnabled()) { LOG.trace("next(): looped through all directories, yielding CPU"); } exit = exit || !ThreadUtil.sleep(Math.min(getRemainingWaitTime(startTime, waitMillis), MAX_YIELD_TIME)); fileContextProvider.startNewLoop(); } } } return chunk; } /** * Determines the offset lag for each active file being read. * * @param offsetMap the current Offset for file keys. * @return map of fileKey to offset. */ public Map<String, Long> getOffsetsLag(Map<String, String> offsetMap) throws IOException{ return fileContextProvider.getOffsetsLag(offsetMap); } /** * Determines the number of files yet to be processed. * * @return map of file key (One per directory where files are located) to the number of files */ public Map<String, Long> getPendingFiles() throws IOException{ return fileContextProvider.getPendingFiles(); } /** * Closes all open readers. */ @Override public void close() throws IOException { if (open) { open = false; fileContextProvider.close(); } } }
package site.linkway.core.entity.po; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; @Data @NoArgsConstructor @AllArgsConstructor public class Comment { String commentId;//评论id String content;//评论文字内容 String userId;//用户id String userName;//用户昵称 String avatarURL;//用户头像URL String time;//评论发布时间 int rate;//(注:子评论没有rate,但此字段返回值前端) long childCount;//此条评论子评论数量 }
package com.edu.lesson7; public class Sender { private String name; public Sender(String name) { this.name = name; } @Override public String toString() { return "Sender{" + "name='" + name + '\'' + '}'; } public String getName() { return name; } public void setName(String name) { this.name = name; } }
import java.util.HashMap ; /** * CanonicalTreeNode extends TreeNode to canonicalize the nodes. * We use a HashMap so we can return the canonicalized node. */ public class CanonicalTreeNode extends TreeNode { /** * We need to provide a hashCode() and an equals() method to be * able to hash these objects. */ public int hashCode() { if (level == 0) return (int)population ; return System.identityHashCode(nw) + 11 * System.identityHashCode(ne) + 101 * System.identityHashCode(sw) + 1007 * System.identityHashCode(se) ; } public boolean equals(Object o) { TreeNode t = (TreeNode)o ; if (level != t.level) return false ; if (level == 0) return alive == t.alive ; return nw == t.nw && ne == t.ne && sw == t.sw && se == t.se ; } /** * Given a node, return the canonical one if it exists, or make it * the canonical one. */ TreeNode intern() { TreeNode canon = (TreeNode)hashMap.get(this) ; if (canon != null) return canon ; hashMap.put(this, this) ; return this ; } /** * Our canonicalization hashset. */ static HashMap hashMap = new HashMap() ; /** * Provide constructors. The rest of the code manages the factory * interface mechanism used by TreeNode. We use intern() in all * three create() functions to guarantee that all new nodes are * canonicalized. */ CanonicalTreeNode(boolean alive) { super(alive) ; } CanonicalTreeNode(TreeNode nw, TreeNode ne, TreeNode sw, TreeNode se) { super(nw, ne, sw, se) ; } /** * We override the three create functions. */ TreeNode create(boolean living) { return new CanonicalTreeNode(living).intern() ; } TreeNode create(TreeNode nw, TreeNode ne, TreeNode sw, TreeNode se) { return new CanonicalTreeNode(nw, ne, sw, se).intern() ; } static TreeNode create() { return new CanonicalTreeNode(false).emptyTree(3) ; } }
package com.github.ddth.queue.qnd.rocksdb; import java.io.File; import org.apache.commons.io.FileUtils; import com.github.ddth.commons.rocksdb.RocksDbWrapper; import com.github.ddth.queue.utils.QueueUtils; public class QndRocksDbMultiThreads { public static void main(String[] args) throws Exception { File storageDir = new File("/tmp/rocksdb"); FileUtils.deleteQuietly(storageDir); storageDir.mkdirs(); final int NUM_THREADS = 4; final int NUM_ITEMS_PER_THREAD = 100000; final Thread[] THREADS = new Thread[NUM_THREADS]; final String[] CF_LIST = new String[NUM_THREADS]; for (int i = 0; i < NUM_THREADS; i++) { CF_LIST[i] = "CF" + i; } try (RocksDbWrapper rocskDb = RocksDbWrapper.openReadWrite(storageDir, CF_LIST)) { long t1 = System.currentTimeMillis(); for (int i = 0; i < NUM_THREADS; i++) { final int tid = i; Thread t = new Thread() { public void run() { final String cfName = "CF" + tid; for (int i = 0; i < NUM_ITEMS_PER_THREAD; i++) { String key = QueueUtils.IDGEN.generateId128Hex(); String value = String.valueOf(i + 1); rocskDb.put(cfName, key, value); } } }; t.start(); THREADS[i] = t; } for (int i = 0; i < NUM_THREADS; i++) { THREADS[i].join(); } long t2 = System.currentTimeMillis(); long d = t2 - t1; long total = (long) NUM_THREADS * (long) NUM_ITEMS_PER_THREAD; System.out.println( "Wrote " + total + " in " + d + " ms (" + (total * 1000.0 / d) + "items/sec"); } try (RocksDbWrapper rocskDbReadonly = RocksDbWrapper.openReadOnly(storageDir)) { System.out.println(rocskDbReadonly.getProperty("default", "rocksdb.estimate-num-keys")); System.out.println(rocskDbReadonly.getEstimateNumKeys("CF0")); System.out.println(rocskDbReadonly.getEstimateNumKeys("CF1")); System.out.println(rocskDbReadonly.getEstimateNumKeys("CF2")); System.out.println(rocskDbReadonly.getEstimateNumKeys("CF3")); System.out.println(rocskDbReadonly.getEstimateNumKeys("CF4")); } } }
package org.alljoyn.ioe.controlpanelbrowser; /****************************************************************************** * Copyright (c) 2013-2014, AllSeen Alliance. All rights reserved. * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ******************************************************************************/ import java.util.Collection; import java.util.Locale; import org.alljoyn.ioe.controlpaneladapter.ControlPanelAdapter; import org.alljoyn.ioe.controlpaneladapter.ControlPanelExceptionHandler; import org.alljoyn.ioe.controlpanelservice.ControlPanelCollection; import org.alljoyn.ioe.controlpanelservice.ControlPanelException; import org.alljoyn.ioe.controlpanelservice.ControlPanelService; import org.alljoyn.ioe.controlpanelservice.ControllableDevice; import org.alljoyn.ioe.controlpanelservice.DeviceEventsListener; import org.alljoyn.ioe.controlpanelservice.Unit; import org.alljoyn.ioe.controlpanelservice.ui.AlertDialogWidget; import org.alljoyn.ioe.controlpanelservice.ui.ContainerWidget; import org.alljoyn.ioe.controlpanelservice.ui.ControlPanelEventsListener; import org.alljoyn.ioe.controlpanelservice.ui.DeviceControlPanel; import org.alljoyn.ioe.controlpanelservice.ui.UIElement; import org.alljoyn.ioe.controlpanelservice.ui.UIElementType; import android.app.AlertDialog; import android.content.DialogInterface; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ArrayAdapter; import android.widget.LinearLayout; import android.widget.Spinner; import android.widget.TextView; import android.widget.Toast; /** * A fragment representing a single Appliance detail screen. This fragment is * either contained in a {@link DeviceListActivity} in two-pane mode (on * tablets) or a {@link DeviceDetailActivity} on handsets. */ public class DeviceDetailFragment extends Fragment { /** * For logging */ private final static String TAG = "cpappApplianceDetailFragment"; /** * The fragment argument representing the item ID that this fragment * represents. */ public static final String ARG_ITEM_ID = "item_id"; /** * The dummy content this fragment is presenting. */ private DeviceList.DeviceContext deviceContext; /** * The device controller this fragment is presenting. */ private DeviceController deviceController; private View rootView; /** * Mandatory empty constructor for the fragment manager to instantiate the * fragment (e.g. upon screen orientation changes). */ public DeviceDetailFragment() { } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getArguments().containsKey(ARG_ITEM_ID)) { // Load the dummy content specified by the fragment // arguments. In a real-world scenario, use a Loader // to load content from a content provider. deviceContext = getArguments().getParcelable(ARG_ITEM_ID); // get the controllable device try { ControllableDevice controllableDevice = ControlPanelService.getInstance().getControllableDevice(deviceContext.getDeviceId(), deviceContext.getBusName()); if (controllableDevice != null) { for (String objPath: deviceContext.getBusObjects()) { controllableDevice.addControlPanel(objPath, deviceContext.getInterfaces(objPath)); } deviceController = new DeviceController(controllableDevice); deviceController.start(); } } catch (ControlPanelException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { rootView = inflater.inflate(R.layout.fragment_device_detail, container, false); return rootView; } @Override public void onDestroy() { if (deviceController != null) { deviceController.stop(); } super.onDestroy(); } class DeviceController implements DeviceEventsListener, ControlPanelExceptionHandler, ControlPanelEventsListener { final ControllableDevice device; private DeviceControlPanel deviceControlPanel; private ControlPanelAdapter controlPanelAdapter; private AlertDialog alertDialog; DeviceController(ControllableDevice controllableDevice) { this.device = controllableDevice; } public void start() throws ControlPanelException { try { Log.d(TAG, "Starting the session with the device"); if (device != null) device.startSession(this); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } public void stop() { try { Log.d(TAG, "Releasing the device control panel"); if (deviceControlPanel != null) { deviceControlPanel.release(); } Log.d(TAG, "Stopping the session with the device"); if (device != null) { device.endSession(); } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Override public void sessionLost(final ControllableDevice device) { if (this.device.getDeviceId().equalsIgnoreCase(device.getDeviceId())) { getActivity().runOnUiThread(new Runnable(){ @Override public void run() { String text = "Received SESSION_LOST for device: '" + device.getDeviceId() + "'"; Log.d(TAG, text); Toast.makeText(getActivity(), text, Toast.LENGTH_LONG).show(); }}); } } @Override public void sessionEstablished(final ControllableDevice device, java.util.Collection<ControlPanelCollection> controlPanelContainer) { Log.d(TAG, "Received sessionEstablished for device: '" + device.getDeviceId() + "'"); // public void sessionEstablished(ControllableDevice device, Collection<DeviceControlPanel> controlPanelContainer) // { getActivity().runOnUiThread(new Runnable(){ @Override public void run() { selectControlPanel(device); } }); } private void onControlPanelSelected() { try { UIElement rootContainerElement = deviceControlPanel.getRootElement(this); if ( rootContainerElement == null ) { Log.e(TAG, "RootContainerElement wasn't created!!! Can't continue"); return; } controlPanelAdapter = new ControlPanelAdapter(getActivity(), this); UIElementType elementType = rootContainerElement.getElementType(); Log.d(TAG, "Found root container of type: '" + elementType + "'"); if ( elementType == UIElementType.CONTAINER ) { ContainerWidget container = ((ContainerWidget)rootContainerElement); /* create an android view for the abstract container */ final View adapterView = controlPanelAdapter.createContainerView(container); getActivity().runOnUiThread(new Runnable(){ @Override public void run() { if (rootView != null) { LinearLayout body = (LinearLayout) rootView.findViewById(R.id.control_panel); body.removeAllViews(); body.addView(adapterView); } }}); } else if ( elementType == UIElementType.ALERT_DIALOG ) { AlertDialogWidget alertDialogWidget = ((AlertDialogWidget)rootContainerElement); AlertDialog alertDialog = controlPanelAdapter.createAlertDialog(alertDialogWidget); alertDialog.setCancelable(false); alertDialog.setCanceledOnTouchOutside(false); alertDialog.setOnDismissListener(new AlertDialog.OnDismissListener() { @Override public void onDismiss(DialogInterface arg0) { String text = "Dialog dismissed."; Toast.makeText(getActivity(), text, Toast.LENGTH_LONG).show(); Log.d(TAG, text); } }); alertDialog.show(); } }//try catch(ControlPanelException cpe) { String errMsg = "Failed to access remote methods of control panel, Error: '" + cpe.getMessage() + "'"; Log.e(TAG, errMsg); final TextView returnView = new TextView(getActivity()); returnView.setText(errMsg); getActivity().runOnUiThread(new Runnable(){ @Override public void run() { if (rootView != null) { LinearLayout body = (LinearLayout) rootView.findViewById(R.id.control_panel); body.removeAllViews(); body.addView(returnView); } }}); return; } } private void selectControlPanel(ControllableDevice device) { if (rootView != null) { Spinner unitSelector = (Spinner) rootView.findViewById(R.id.unit_selector); Collection<Unit> unitCollection = device.getUnitCollection(); if (unitCollection.size() == 0) { Log.w(TAG, "No units found"); unitSelector.setEnabled(false); } else { final ArrayAdapter<LabelValuePair> adapter = new ArrayAdapter<LabelValuePair>(getActivity(), android.R.layout.simple_spinner_item); for (Unit unit: unitCollection) { adapter.add(new LabelValuePair(unit.getUnitId(), unit)); } unitSelector.setAdapter(adapter); if (unitCollection.size() == 1) { unitSelector.setEnabled(false); onUnitSelection(unitCollection.iterator().next()); } else { // register a selection listener OnItemSelectedListener listener = new OnItemSelectedListener() { int currentSelection = 1000; public void onItemSelected(AdapterView<?> parent, View view, final int pos, long id) { if (pos == currentSelection) { Log.d(TAG, String.format("Selected position %d already selected. No action required", pos)); } else { currentSelection = pos; LabelValuePair item = adapter.getItem(pos); Unit selectedUnit = (Unit) item.value; onUnitSelection(selectedUnit); } } public void onNothingSelected(AdapterView<?> parent) { // Another interface callback } }; unitSelector.setOnItemSelectedListener(listener); } } } } private void onUnitSelection(Unit selectedUnit) { Log.d(TAG, String.format("Unit selected: '%s'", selectedUnit.getUnitId())); Collection<ControlPanelCollection> controlPanelContainer = selectedUnit.getControlPanelCollection(); selectControlPanelCollection(controlPanelContainer); } private void selectControlPanelCollection( Collection<ControlPanelCollection> controlPanelContainer) { if (rootView != null) { Spinner cpCollectionSelector = (Spinner) rootView.findViewById(R.id.cp_collection_selector); if (controlPanelContainer.size() == 0) { Log.w(TAG, "No control panel collections found"); cpCollectionSelector.setEnabled(false); } else { final ArrayAdapter<LabelValuePair> adapter = new ArrayAdapter<LabelValuePair>(getActivity(), android.R.layout.simple_spinner_item); for (ControlPanelCollection cpCollection: controlPanelContainer) { adapter.add(new LabelValuePair(cpCollection.getName(), cpCollection)); } cpCollectionSelector.setAdapter(adapter); if (controlPanelContainer.size() == 1) { cpCollectionSelector.setEnabled(false); onControlPanelCollectionSelection(controlPanelContainer.iterator().next()); } else { // register a selection listener OnItemSelectedListener listener = new OnItemSelectedListener() { int currentSelection = 1000; public void onItemSelected(AdapterView<?> parent, View view, final int pos, long id) { if (pos == currentSelection) { Log.d(TAG, String.format("Selected position %d already selected. No action required", pos)); } else { currentSelection = pos; LabelValuePair item = adapter.getItem(pos); ControlPanelCollection cpCollection = (ControlPanelCollection) item.value; onControlPanelCollectionSelection(cpCollection); } } public void onNothingSelected(AdapterView<?> parent) { // Another interface callback } }; cpCollectionSelector.setOnItemSelectedListener(listener); } } } } private void onControlPanelCollectionSelection(ControlPanelCollection controlPanelCollection) { Collection<DeviceControlPanel> controlPanels = controlPanelCollection.getControlPanels(); String language_IETF_RFC5646_java = Locale.getDefault().toString(); //"en_US", "es_SP" String language_IETF_RFC5646 = language_IETF_RFC5646_java.replace('_', '-'); String languageISO639 = Locale.getDefault().getLanguage(); //"en", "es" DeviceControlPanel previousControlPanel = deviceControlPanel; boolean found = false; for(DeviceControlPanel controlPanel : controlPanels) { String cpLanugage = controlPanel.getLanguage(); Log.d(TAG, String.format("Control Panel language: %s", cpLanugage)); if (cpLanugage.equalsIgnoreCase(language_IETF_RFC5646) || cpLanugage.equalsIgnoreCase(languageISO639) // phone language=de_DE (de), cp language=de_AT || cpLanugage.startsWith(languageISO639)) { deviceControlPanel = controlPanel; found = true; Log.d(TAG, String.format("Found a control panel that matches phone languages: RFC5646=%s, ISO639=%s, Given language was: %s", language_IETF_RFC5646, languageISO639, cpLanugage)); break; } } if (!found && !controlPanels.isEmpty()) { Log.w(TAG, String.format("Could not find a control panel that matches phone languages: RFC5646=%s, ISO639=%s", language_IETF_RFC5646, languageISO639)); deviceControlPanel = controlPanels.iterator().next(); Log.d(TAG, String.format("Defaulting to the control panel of language: %s", deviceControlPanel.getLanguage())); } Log.d(TAG, "Releasing the previous device control panel"); if (previousControlPanel != null) { previousControlPanel.release(); } onControlPanelSelected(); } public void metadataChanged(ControllableDevice device, final UIElement uielement) { UIElementType elementType = uielement.getElementType(); Log.d(TAG, "METADATA_CHANGED : Received metadata changed signal, device: '" + device.getDeviceId() + "', ObjPath: '" + uielement.getObjectPath() + "', element type: '" + elementType + "'"); FragmentActivity activity = getActivity(); if (activity != null) { activity.runOnUiThread(new Runnable() { @Override public void run() { controlPanelAdapter.onMetaDataChange(uielement); } }); } } @Override public void errorOccurred(ControllableDevice device, final String reason) { final String text = "Error: '" + reason + "'"; Log.e(TAG, text); if (this.device.getDeviceId().equalsIgnoreCase(device.getDeviceId())) { final FragmentActivity activity = getActivity(); if (activity != null) { activity.runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(activity, text, Toast.LENGTH_LONG).show(); } }); } } } @Override public void handleControlPanelException(ControlPanelException e) { String text = getString(R.string.action_failed); Log.e(TAG, text + ", error in calling remote object: '" + e.getMessage() + "'"); Toast.makeText(getActivity(), text, Toast.LENGTH_SHORT).show(); } @Override public void errorOccurred(DeviceControlPanel deviceControlPanel, String reason) { errorOccurred(deviceControlPanel.getDevice(), reason); } @Override public void metadataChanged(DeviceControlPanel deviceControlPanel, final UIElement uielement) { Log.d(TAG, "Received metadataChanged signal, device: '" + deviceControlPanel.getDevice().getDeviceId() + "', ObjPath: '" + uielement.getObjectPath() + "'"); if (device.getDeviceId().equalsIgnoreCase(deviceControlPanel.getDevice().getDeviceId())) { UIElementType elementType = uielement.getElementType(); Log.d(TAG, "Received metadataChanged : Received metadata changed signal, device: '" + device.getDeviceId() + "', ObjPath: '" + uielement.getObjectPath() + "', element type: '" + elementType + "'"); FragmentActivity activity = getActivity(); if (activity != null) { activity.runOnUiThread(new Runnable() { @Override public void run() { controlPanelAdapter.onMetaDataChange(uielement); } }); } } } @Override public void valueChanged(DeviceControlPanel deviceControlPanel, final UIElement uielement, final Object newValue) { Log.d(TAG, "Received valueChanged signal, device: '" + deviceControlPanel.getDevice().getDeviceId() + "', ObjPath: '" + uielement.getObjectPath() + "', NewValue: '" + newValue + "'"); if (device.getDeviceId().equalsIgnoreCase(deviceControlPanel.getDevice().getDeviceId())) { if (controlPanelAdapter != null) { final FragmentActivity activity = getActivity(); if (activity != null) { activity.runOnUiThread(new Runnable() { @Override public void run() { controlPanelAdapter.onValueChange(uielement, newValue); String text = "Received value changed signal, ObjPath: '" + uielement.getObjectPath() + "', NewValue: '" + newValue + "'"; Toast.makeText(activity, text, Toast.LENGTH_SHORT).show(); } }); } } } } @Override public void notificationActionDismiss(DeviceControlPanel deviceControlPanel) { Log.d(TAG,"Received notificationActionDismiss"); if (alertDialog != null && alertDialog.isShowing()) { Log.d(TAG,"Dismissing the dialog"); alertDialog.dismiss(); } } }; /** * A wrapper class for hosting a {label,value} pair inside an ArrayAdapter. * So that the label is displayed, while practically the real value is used. */ class LabelValuePair { final String label; final Object value; public LabelValuePair(String label, Object value) { super(); this.value = value; this.label = label; } @Override // This does the trick of displaying the label and not the value in the Adapter public String toString() { return label; } } }
package com.github.qaware.adcl.information; import org.jetbrains.annotations.NotNull; import org.neo4j.ogm.annotation.RelationshipEntity; /** * Represents an edge on the graph describing a dependency from any node to a class node */ @RelationshipEntity("ClassDependency") public final class ClassDependency extends RelationshipInformation<ClassInformation<?>> { /** * Needed for neo4j initialization */ @SuppressWarnings("unused") private ClassDependency() { super(); } /** * Creates a new class dependency * * @param from the node which has the dependency * @param to the dependency * @see Information#addClassDependency(ClassInformation, VersionInformation) * @see Information#getClassDependencies(VersionInformation) * @see Information#getAllClassDependencies(VersionInformation, boolean) * @see Information#getAllClassDependenciesAggregated(VersionInformation, boolean) */ ClassDependency(@NotNull Information<?> from, @NotNull ClassInformation<?> to) { super(from, to); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ozone.om.request.file; import java.io.IOException; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.Map; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import org.apache.hadoop.ozone.om.helpers.OmBucketInfo; import org.apache.hadoop.ozone.om.helpers.OmKeyInfo; import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfoGroup; import org.apache.hadoop.ozone.om.helpers.OzoneAclUtil; import org.apache.hadoop.ozone.om.helpers.OzoneFSUtils; import org.apache.hadoop.ozone.om.ratis.utils.OzoneManagerDoubleBufferHelper; import org.apache.hadoop.ozone.security.acl.IAccessAuthorizer; import org.apache.hadoop.ozone.security.acl.OzoneObj; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.ozone.audit.AuditLogger; import org.apache.hadoop.ozone.audit.OMAction; import org.apache.hadoop.ozone.om.OMMetadataManager; import org.apache.hadoop.ozone.om.OMMetrics; import org.apache.hadoop.ozone.om.OzoneManager; import org.apache.hadoop.ozone.om.exceptions.OMException; import org.apache.hadoop.ozone.om.request.key.OMKeyRequest; import org.apache.hadoop.ozone.om.response.OMClientResponse; import org.apache.hadoop.ozone.om.response.file.OMDirectoryCreateResponse; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos .CreateDirectoryRequest; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos .CreateDirectoryResponse; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos .KeyArgs; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos .OMRequest; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos .OMResponse; import org.apache.hadoop.util.Time; import org.apache.hadoop.hdds.utils.db.cache.CacheKey; import org.apache.hadoop.hdds.utils.db.cache.CacheValue; import static org.apache.hadoop.ozone.om.exceptions.OMException.ResultCodes.FILE_ALREADY_EXISTS; import static org.apache.hadoop.ozone.om.lock.OzoneManagerLock.Resource.BUCKET_LOCK; import static org.apache.hadoop.ozone.om.request.file.OMFileRequest.OMDirectoryResult.DIRECTORY_EXISTS_IN_GIVENPATH; import static org.apache.hadoop.ozone.om.request.file.OMFileRequest.OMDirectoryResult.FILE_EXISTS_IN_GIVENPATH; import static org.apache.hadoop.ozone.om.request.file.OMFileRequest.OMDirectoryResult.NONE; import static org.apache.hadoop.ozone.om.request.file.OMFileRequest.OMDirectoryResult.FILE_EXISTS; /** * Handle create directory request. */ public class OMDirectoryCreateRequest extends OMKeyRequest { private static final Logger LOG = LoggerFactory.getLogger(OMDirectoryCreateRequest.class); public OMDirectoryCreateRequest(OMRequest omRequest) { super(omRequest); } @Override public OMRequest preExecute(OzoneManager ozoneManager) { CreateDirectoryRequest createDirectoryRequest = getOmRequest().getCreateDirectoryRequest(); Preconditions.checkNotNull(createDirectoryRequest); KeyArgs.Builder newKeyArgs = createDirectoryRequest.getKeyArgs() .toBuilder().setModificationTime(Time.now()); CreateDirectoryRequest.Builder newCreateDirectoryRequest = createDirectoryRequest.toBuilder().setKeyArgs(newKeyArgs); return getOmRequest().toBuilder().setCreateDirectoryRequest( newCreateDirectoryRequest).setUserInfo(getUserInfo()).build(); } @Override public OMClientResponse validateAndUpdateCache(OzoneManager ozoneManager, long transactionLogIndex, OzoneManagerDoubleBufferHelper ozoneManagerDoubleBufferHelper) { KeyArgs keyArgs = getOmRequest().getCreateDirectoryRequest().getKeyArgs(); String volumeName = keyArgs.getVolumeName(); String bucketName = keyArgs.getBucketName(); String keyName = keyArgs.getKeyName(); OMResponse.Builder omResponse = OzoneManagerProtocolProtos.OMResponse.newBuilder().setCmdType( OzoneManagerProtocolProtos.Type.CreateDirectory).setStatus( OzoneManagerProtocolProtos.Status.OK); OMMetrics omMetrics = ozoneManager.getMetrics(); omMetrics.incNumCreateDirectory(); AuditLogger auditLogger = ozoneManager.getAuditLogger(); OzoneManagerProtocolProtos.UserInfo userInfo = getOmRequest().getUserInfo(); Map<String, String> auditMap = buildKeyArgsAuditMap(keyArgs); OMMetadataManager omMetadataManager = ozoneManager.getMetadataManager(); boolean acquiredLock = false; IOException exception = null; OMClientResponse omClientResponse = null; try { // check Acl checkKeyAcls(ozoneManager, volumeName, bucketName, keyName, IAccessAuthorizer.ACLType.CREATE, OzoneObj.ResourceType.KEY); // Check if this is the root of the filesystem. if (keyName.length() == 0) { return new OMDirectoryCreateResponse(null, omResponse.setCreateDirectoryResponse( CreateDirectoryResponse.newBuilder()).build()); } // acquire lock acquiredLock = omMetadataManager.getLock().acquireWriteLock(BUCKET_LOCK, volumeName, bucketName); validateBucketAndVolume(omMetadataManager, volumeName, bucketName); // Need to check if any files exist in the given path, if they exist we // cannot create a directory with the given key. OMFileRequest.OMDirectoryResult omDirectoryResult = OMFileRequest.verifyFilesInPath(omMetadataManager, volumeName, bucketName, keyName, Paths.get(keyName)); OmBucketInfo omBucketInfo = omMetadataManager.getBucketTable().get( omMetadataManager.getBucketKey(volumeName, bucketName)); OmKeyInfo dirKeyInfo = null; if (omDirectoryResult == FILE_EXISTS || omDirectoryResult == FILE_EXISTS_IN_GIVENPATH) { throw new OMException("Unable to create directory: " +keyName + " in volume/bucket: " + volumeName + "/" + bucketName, FILE_ALREADY_EXISTS); } else if (omDirectoryResult == DIRECTORY_EXISTS_IN_GIVENPATH || omDirectoryResult == NONE) { dirKeyInfo = createDirectoryKeyInfo(ozoneManager, omBucketInfo, volumeName, bucketName, keyName, keyArgs, transactionLogIndex); omMetadataManager.getKeyTable().addCacheEntry( new CacheKey<>(omMetadataManager.getOzoneKey(volumeName, bucketName, dirKeyInfo.getKeyName())), new CacheValue<>(Optional.of(dirKeyInfo), transactionLogIndex)); } // if directory already exists do nothing or do we need to throw // exception? Current KeyManagerImpl code does just return, following // similar approach. omResponse.setCreateDirectoryResponse( CreateDirectoryResponse.newBuilder()); omClientResponse = new OMDirectoryCreateResponse(dirKeyInfo, omResponse.build()); } catch (IOException ex) { exception = ex; omClientResponse = new OMDirectoryCreateResponse(null, createErrorOMResponse(omResponse, exception)); } finally { if (omClientResponse != null) { omClientResponse.setFlushFuture( ozoneManagerDoubleBufferHelper.add(omClientResponse, transactionLogIndex)); } if (acquiredLock) { omMetadataManager.getLock().releaseWriteLock(BUCKET_LOCK, volumeName, bucketName); } } auditLog(auditLogger, buildAuditMessage(OMAction.CREATE_DIRECTORY, auditMap, exception, userInfo)); if (exception == null) { LOG.debug("Directory is successfully created for Key: {} in " + "volume/bucket:{}/{}", keyName, volumeName, bucketName); omMetrics.incNumKeys(); return omClientResponse; } else { LOG.error("CreateDirectory failed for Key: {} in volume/bucket:{}/{}", keyName, volumeName, bucketName, exception); omMetrics.incNumCreateDirectoryFails(); return omClientResponse; } } private OmKeyInfo createDirectoryKeyInfo(OzoneManager ozoneManager, OmBucketInfo omBucketInfo, String volumeName, String bucketName, String keyName, KeyArgs keyArgs, long transactionLogIndex) throws IOException { Optional<FileEncryptionInfo> encryptionInfo = getFileEncryptionInfo(ozoneManager, omBucketInfo); String dirName = OzoneFSUtils.addTrailingSlashIfNeeded(keyName); return new OmKeyInfo.Builder() .setVolumeName(volumeName) .setBucketName(bucketName) .setKeyName(dirName) .setOmKeyLocationInfos(Collections.singletonList( new OmKeyLocationInfoGroup(0, new ArrayList<>()))) .setCreationTime(keyArgs.getModificationTime()) .setModificationTime(keyArgs.getModificationTime()) .setDataSize(0) .setReplicationType(HddsProtos.ReplicationType.RATIS) .setReplicationFactor(HddsProtos.ReplicationFactor.ONE) .setFileEncryptionInfo(encryptionInfo.orNull()) .setAcls(OzoneAclUtil.fromProtobuf(keyArgs.getAclsList())) .setObjectID(transactionLogIndex) .setUpdateID(transactionLogIndex) .build(); } }
/* * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amplifyframework.datastore.storage.sqlite; import androidx.annotation.NonNull; /** * Enumerate the types of data supported by SQLite. */ public enum SQLiteDataType { /** * The value is a NULL value. */ NULL("NULL"), /** * The value is a signed integer, stored in 1, 2, 3, 4, 6, or 8 bytes * depending on the magnitude of the value. */ INTEGER("INTEGER"), /** * The value is a floating point value, stored as an 8-byte ("double precision") * IEEE floating point number. */ REAL("REAL"), /** * The value is a text string, stored using the database encoding * (UTF-8, UTF-16BE or UTF-16LE). */ TEXT("TEXT"), /** * The value is a blob of data, stored exactly as it was input. */ BLOB("BLOB"); private final String sqliteDataType; SQLiteDataType(@NonNull String sqliteDataType) { this.sqliteDataType = sqliteDataType; } /** * Return the string that represents the value of the enumeration constant. * @return the string that represents the value of the enumeration constant. */ public String getSqliteDataType() { return this.sqliteDataType; } /** * Construct and return the SqliteDataType enumeration for the given string * representation of the field type. * @param sqliteDataType the string representation of the field type. * @return the enumeration constant. */ public static SQLiteDataType from(@NonNull String sqliteDataType) { for (final SQLiteDataType type : SQLiteDataType.values()) { if (sqliteDataType.equals(type.getSqliteDataType())) { return type; } } throw new IllegalArgumentException("Cannot create enum from " + sqliteDataType + " value."); } }
/* * Copyright 2012-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.logging.log4j; import java.io.File; import org.apache.commons.logging.impl.Log4JLogger; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.springframework.boot.logging.AbstractLoggingSystemTests; import org.springframework.boot.logging.LogLevel; import org.springframework.boot.test.OutputCapture; import org.springframework.util.StringUtils; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; /** * Tests for {@link Log4JLoggingSystem}. * * @author Phillip Webb */ public class Log4JLoggingSystemTests extends AbstractLoggingSystemTests { @Rule public OutputCapture output = new OutputCapture(); private final Log4JLoggingSystem loggingSystem = new Log4JLoggingSystem(getClass() .getClassLoader()); private Log4JLogger logger; @Before public void setup() { this.logger = new Log4JLogger(getClass().getName()); } @Test public void noFile() throws Exception { this.loggingSystem.beforeInitialize(); this.logger.info("Hidden"); this.loggingSystem.initialize(null, null); this.logger.info("Hello world"); String output = this.output.toString().trim(); assertTrue("Wrong output:\n" + output, output.contains("Hello world")); assertFalse("Output not hidden:\n" + output, output.contains("Hidden")); assertFalse(new File(tmpDir() + "/spring.log").exists()); } @Test public void withFile() throws Exception { this.loggingSystem.beforeInitialize(); this.logger.info("Hidden"); this.loggingSystem.initialize(null, tmpDir() + "/spring.log"); this.logger.info("Hello world"); String output = this.output.toString().trim(); assertTrue("Wrong output:\n" + output, output.contains("Hello world")); assertFalse("Output not hidden:\n" + output, output.contains("Hidden")); assertTrue(new File(tmpDir() + "/spring.log").exists()); } @Test public void testNonDefaultConfigLocation() throws Exception { this.loggingSystem.beforeInitialize(); this.loggingSystem.initialize("classpath:log4j-nondefault.properties", tmpDir() + "/spring.log"); this.logger.info("Hello world"); String output = this.output.toString().trim(); assertTrue("Wrong output:\n" + output, output.contains("Hello world")); assertTrue("Wrong output:\n" + output, output.contains(tmpDir() + "/spring.log")); assertFalse(new File(tmpDir() + "/tmp.log").exists()); } @Test(expected = IllegalStateException.class) public void testNonexistentConfigLocation() throws Exception { this.loggingSystem.beforeInitialize(); this.loggingSystem.initialize("classpath:log4j-nonexistent.xml", null); } @Test public void setLevel() throws Exception { this.loggingSystem.beforeInitialize(); this.loggingSystem.initialize(null, null); this.logger.debug("Hello"); this.loggingSystem.setLogLevel("org.springframework.boot", LogLevel.DEBUG); this.logger.debug("Hello"); assertThat(StringUtils.countOccurrencesOf(this.output.toString(), "Hello"), equalTo(1)); } @Test @Ignore("Fails on Bamboo") public void loggingThatUsesJulIsCaptured() { this.loggingSystem.beforeInitialize(); this.loggingSystem.initialize(null, null); java.util.logging.Logger julLogger = java.util.logging.Logger .getLogger(getClass().getName()); julLogger.severe("Hello world"); String output = this.output.toString().trim(); assertTrue("Wrong output:\n" + output, output.contains("Hello world")); } }
package net.openid.conformance.condition.client; import com.google.common.base.Strings; import net.openid.conformance.condition.AbstractCondition; import net.openid.conformance.condition.PostEnvironment; import net.openid.conformance.condition.PreEnvironment; import net.openid.conformance.testmodule.Environment; public class SetProtectedResourceUrlToSingleResourceEndpoint extends AbstractCondition { @Override @PreEnvironment(required = "resource") @PostEnvironment(strings = "protected_resource_url") public Environment evaluate(Environment env) { String resourceUrl = env.getString("resource", "resourceUrl"); if(Strings.isNullOrEmpty(resourceUrl)){ throw error("Missing Resource URL"); } env.putString("protected_resource_url", resourceUrl); logSuccess("Set protected resource URL", args("protected_resource_url", resourceUrl)); return env; } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.impala.planner; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.ListIterator; import org.apache.iceberg.DataFile; import org.apache.iceberg.expressions.Expressions; import org.apache.iceberg.expressions.Expression.Operation; import org.apache.iceberg.expressions.UnboundPredicate; import org.apache.impala.analysis.Analyzer; import org.apache.impala.analysis.BinaryPredicate; import org.apache.impala.analysis.BoolLiteral; import org.apache.impala.analysis.DateLiteral; import org.apache.impala.analysis.Expr; import org.apache.impala.analysis.LiteralExpr; import org.apache.impala.analysis.MultiAggregateInfo; import org.apache.impala.analysis.NumericLiteral; import org.apache.impala.analysis.SlotRef; import org.apache.impala.analysis.StringLiteral; import org.apache.impala.analysis.TableRef; import org.apache.impala.analysis.TupleDescriptor; import org.apache.impala.catalog.FeCatalogUtils; import org.apache.impala.catalog.FeFsPartition; import org.apache.impala.catalog.FeFsTable; import org.apache.impala.catalog.FeIcebergTable; import org.apache.impala.catalog.TableLoadingException; import org.apache.impala.catalog.Type; import org.apache.impala.catalog.HdfsPartition.FileDescriptor; import org.apache.impala.common.ImpalaException; import org.apache.impala.common.ImpalaRuntimeException; import org.apache.impala.util.IcebergUtil; import com.google.common.base.Preconditions; import org.apache.impala.util.KuduUtil; /** * Scan of a single iceberg table */ public class IcebergScanNode extends HdfsScanNode { private final FeIcebergTable icebergTable_; // Exprs in icebergConjuncts_ converted to UnboundPredicate. private final List<UnboundPredicate> icebergPredicates_ = new ArrayList<>(); public IcebergScanNode(PlanNodeId id, TupleDescriptor desc, List<Expr> conjuncts, TableRef hdfsTblRef, FeFsTable feFsTable, MultiAggregateInfo aggInfo) { super(id, desc, conjuncts, getIcebergPartition(feFsTable), hdfsTblRef, aggInfo, null, false); icebergTable_ = (FeIcebergTable) desc_.getTable(); // Hdfs table transformed from iceberg table only has one partition Preconditions.checkState(partitions_.size() == 1); } /** * Get partition info from FeFsTable, we treat iceberg table as an * unpartitioned hdfs table */ private static List<? extends FeFsPartition> getIcebergPartition(FeFsTable feFsTable) { Collection<? extends FeFsPartition> partitions = FeCatalogUtils.loadAllPartitions(feFsTable); return new ArrayList<>(partitions); } @Override public void init(Analyzer analyzer) throws ImpalaException { extractIcebergConjuncts(analyzer); super.init(analyzer); } /** * We need prune hdfs partition FileDescriptor by iceberg predicates */ public List<FileDescriptor> getFileDescriptorByIcebergPredicates() throws ImpalaRuntimeException{ List<DataFile> dataFileList; try { dataFileList = IcebergUtil.getIcebergDataFiles(icebergTable_, icebergPredicates_); } catch (TableLoadingException e) { throw new ImpalaRuntimeException(String.format( "Failed to load data files for Iceberg table: %s", icebergTable_.getFullName()), e); } List<FileDescriptor> fileDescList = new ArrayList<>(); for (DataFile dataFile : dataFileList) { FileDescriptor fileDesc = icebergTable_.getPathHashToFileDescMap() .get(IcebergUtil.getDataFilePathHash(dataFile)); fileDescList.add(fileDesc); //Todo: how to deal with iceberg metadata update, we need to invalidate manually now if (fileDesc == null) { throw new ImpalaRuntimeException("Cannot find file in cache: " + dataFile.path() + " with snapshot id: " + String.valueOf(icebergTable_.snapshotId())); } } return fileDescList; } /** * Extracts predicates from conjuncts_ that can be pushed down to Iceberg. * * Since Iceberg will filter data files by metadata instead of scan data files, * we pushdown all predicates to Iceberg to get the minimum data files to scan. * Here are three cases for predicate pushdown: * 1.The column is not part of any Iceberg partition expression * 2.The column is part of all partition keys without any transformation (i.e. IDENTITY) * 3.The column is part of all partition keys with transformation (i.e. MONTH/DAY/HOUR) * We can use case 1 and 3 to filter data files, but also need to evaluate it in the * scan, for case 2 we don't need to evaluate it in the scan. So we evaluate all * predicates in the scan to keep consistency. More details about Iceberg scanning, * please refer: https://iceberg.apache.org/spec/#scan-planning */ private void extractIcebergConjuncts(Analyzer analyzer) throws ImpalaException { ListIterator<Expr> it = conjuncts_.listIterator(); while (it.hasNext()) { tryConvertBinaryIcebergPredicate(analyzer, it.next()); } } /** * Transform impala binary predicate to iceberg predicate */ private boolean tryConvertBinaryIcebergPredicate(Analyzer analyzer, Expr expr) throws ImpalaException { if (! (expr instanceof BinaryPredicate)) return false; BinaryPredicate predicate = (BinaryPredicate) expr; Operation op = getIcebergOperator(predicate.getOp()); if (op == null) return false; if (!(predicate.getChild(0) instanceof SlotRef)) return false; SlotRef ref = (SlotRef) predicate.getChild(0); if (!(predicate.getChild(1) instanceof LiteralExpr)) return false; LiteralExpr literal = (LiteralExpr) predicate.getChild(1); // If predicate contains map/struct, this column would be null if (ref.getDesc().getColumn() == null) return false; String colName = ref.getDesc().getColumn().getName(); UnboundPredicate unboundPredicate = null; switch (literal.getType().getPrimitiveType()) { case BOOLEAN: { unboundPredicate = Expressions.predicate(op, colName, ((BoolLiteral) literal).getValue()); break; } case TINYINT: case SMALLINT: case INT: { unboundPredicate = Expressions.predicate(op, colName, ((NumericLiteral) literal).getIntValue()); break; } case BIGINT: { unboundPredicate = Expressions.predicate(op, colName, ((NumericLiteral) literal).getLongValue()); break; } case FLOAT: { unboundPredicate = Expressions.predicate(op, colName, (float)((NumericLiteral) literal).getDoubleValue()); break; } case DOUBLE: { unboundPredicate = Expressions.predicate(op, colName, ((NumericLiteral) literal).getDoubleValue()); break; } case STRING: case DATETIME: case CHAR: { unboundPredicate = Expressions.predicate(op, colName, ((StringLiteral) literal).getUnescapedValue()); break; } case TIMESTAMP: { long unixMicros = KuduUtil.timestampToUnixTimeMicros(analyzer, literal); if (unixMicros >= 0) { // Iceberg's partition transformations have a bug for times before the epoch. // See https://github.com/apache/iceberg/pull/1981 // TODO: IMPALA-10433 remove the workarounds once we use an Iceberg version // that contains the fix. unboundPredicate = Expressions.predicate(op, colName, unixMicros); } break; } case DATE: { int daysSinceEpoch = ((DateLiteral) literal).getValue(); if (daysSinceEpoch >= 0) { // See comment at TIMESTAMP. unboundPredicate = Expressions.predicate(op, colName, daysSinceEpoch); } break; } case DECIMAL: { Type colType = ref.getDesc().getColumn().getType(); int scale = colType.getDecimalDigits(); BigDecimal literalValue = ((NumericLiteral) literal).getValue(); if (literalValue.scale() <= scale) { // Iceberg DecimalLiteral needs to have the exact same scale. if (literalValue.scale() < scale) literalValue = literalValue.setScale(scale); unboundPredicate = Expressions.predicate(op, colName, literalValue); } break; } default: break; } if (unboundPredicate == null) return false; icebergPredicates_.add(unboundPredicate); return true; } /** * Returns Iceberg operator by BinaryPredicate operator, or null if the operation * is not supported by Iceberg. */ private Operation getIcebergOperator(BinaryPredicate.Operator op) { switch (op) { case EQ: return Operation.EQ; case NE: return Operation.NOT_EQ; case LE: return Operation.LT_EQ; case GE: return Operation.GT_EQ; case LT: return Operation.LT; case GT: return Operation.GT; default: return null; } } }
package com.caizilong.netesase.splash.bean; import java.io.Serializable; import java.util.List; /** * @author 小码哥Android学院(520it.com) * @time 2016/10/12 10:31 * @desc ${TODD} */ public class Ads implements Serializable { int next_req ; List<AdsDetail> ads; public List<AdsDetail> getAds() { return ads; } public void setAds(List<AdsDetail> ads) { this.ads = ads; } public int getNext_req() { return next_req; } public void setNext_req(int next_req) { this.next_req = next_req; } @Override public String toString() { return "Ads{" + "next_req=" + next_req + ", ads=" + ads + '}'; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.codedeploy.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.codedeploy.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * ListTagsForResourceRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class ListTagsForResourceRequestProtocolMarshaller implements Marshaller<Request<ListTagsForResourceRequest>, ListTagsForResourceRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/") .httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true) .operationIdentifier("CodeDeploy_20141006.ListTagsForResource").serviceName("AmazonCodeDeploy").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public ListTagsForResourceRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<ListTagsForResourceRequest> marshall(ListTagsForResourceRequest listTagsForResourceRequest) { if (listTagsForResourceRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<ListTagsForResourceRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING, listTagsForResourceRequest); protocolMarshaller.startMarshalling(); ListTagsForResourceRequestMarshaller.getInstance().marshall(listTagsForResourceRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
package org.apache.archiva; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.archiva.web.api.RuntimeInfoService; import org.apache.archiva.web.model.ApplicationRuntimeInfo; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.apache.cxf.jaxrs.client.JAXRSClientFactory; import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider; import org.apache.archiva.redback.rest.services.AbstractRestServicesTest; import org.junit.Before; import org.junit.Test; import java.io.File; import java.util.Collections; import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner; import org.junit.runner.RunWith; /** * @author Olivier Lamy */ @RunWith( ArchivaBlockJUnit4ClassRunner.class ) public class RuntimeInfoServiceTest extends AbstractRestServicesTest { @Override @Before public void startServer() throws Exception { File appServerBase = new File( System.getProperty( "appserver.base" ) ); File jcrDirectory = new File( appServerBase, "jcr" ); if ( jcrDirectory.exists() ) { FileUtils.deleteDirectory( jcrDirectory ); } super.startServer(); } @Override protected String getSpringConfigLocation() { return "classpath*:META-INF/spring-context.xml,classpath:/spring-context-with-jcr.xml"; } protected String getRestServicesPath() { return "restServices"; } protected String getBaseUrl() { String baseUrlSysProps = System.getProperty( "archiva.baseRestUrl" ); return StringUtils.isBlank( baseUrlSysProps ) ? "http://localhost:" + port : baseUrlSysProps; } @Test public void runtimeInfoService() throws Exception { RuntimeInfoService service = JAXRSClientFactory.create( getBaseUrl() + "/" + getRestServicesPath() + "/archivaUiServices/", RuntimeInfoService.class, Collections.singletonList( new JacksonJaxbJsonProvider() ) ); ApplicationRuntimeInfo applicationRuntimeInfo = service.getApplicationRuntimeInfo( "en" ); assertEquals( System.getProperty( "expectedVersion" ), applicationRuntimeInfo.getVersion() ); assertFalse( applicationRuntimeInfo.isJavascriptLog() ); assertTrue( applicationRuntimeInfo.isLogMissingI18n() ); } }
/* * ### * PHR_NodeJSWebService * %% * Copyright (C) 1999 - 2012 Photon Infotech Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ### */ package com.photon.phresco.uiconstants; import java.lang.reflect.Field; public class PhrescoNodejsUiConstants { private ReadXMLFile readXml; public String ELEMENT = "stringCapturehelloworldUrl"; public PhrescoNodejsUiConstants() { try { readXml = new ReadXMLFile(); readXml.NodejsData(); System.out.println("*********Reading Data from NodejsData**********"); Field[] arrayOfField1 = super.getClass().getFields(); Field[] arrayOfField2 = arrayOfField1; int i = arrayOfField2.length; for (int j = 0; j < i; ++j) { Field localField = arrayOfField2[j]; Object localObject = localField.get(this); if (localObject instanceof String) localField .set(this, readXml.getValue((String) localObject)); } } catch (Exception localException) { throw new RuntimeException("Loading " + super.getClass().getSimpleName() + " failed", localException); } } }
package scene; import gameobj.Actor; import utils.CommandSolver; import utils.Global; import java.awt.*; public class MainScene extends Scene { private Actor actor; @Override public void sceneBegin() { actor = new Actor(Global.SCREEN_X / 2, Global.SCREEN_Y / 2, 6); } @Override public void sceneEnd() { } @Override public void paint(Graphics g) { actor.paint(g); } @Override public void update() { actor.update(); } @Override public CommandSolver.MouseCommandListener mouseListener() { return null; } @Override public CommandSolver.KeyListener keyListener() { return null; } }
package planets_extended; import planets_extended.windows.Menu; import planets_extended.windows.Game; import planets_extended.windows.Window; import java.util.ArrayList; import javafx.animation.AnimationTimer; import javafx.application.Application; import javafx.stage.Stage; import static planets_extended.windows.Game.root; import planets_extended.utils.DebugUtils; import planets_extended.windows.Load; import planets_extended.windows.ResultScreen; /** * The main class for the application. * Handles screens and periodic calls * * @author Adri and Dysta */ public class Planets extends Application { /** * Can only be changed in the source code, enables more logs. */ public final static boolean DEBUG = false; /** * The allowed width for this application. */ private final static double WIDTH = 1280; /** * The allowed height for this application. */ private final static double HEIGHT = 1000; /** * The preffered width for this application. */ private final static double MENU_WIDTH = 600; /** * The preffered height for this application. */ private final static double MENU_HEIGHT = 400; /** * How many frames should be processed every second. */ private final static int REFRESHRATE = 60; /** * How many game ticks should be processed every second. */ private final static int TICKRATE = 60; /** * How many times per second are AIs able to "think". */ private final static int AI_THINKRATE = 2; /** * Stores the last game tick, used for respecting TICKRATE. */ private static long last_tick; /** * Stores the last processed frame, used for respecting REFRESHRATE. */ private static long last_frame; /** * Stores the last AIs process turn, used for respecting AI_THINKRATE. */ private static long last_think; /** * Stores ticks to compute the tickrate, used in logs. */ private static ArrayList<Long> ticks; /** * The main Stage, containing displayed elements. */ public static Stage stage; /** * The Menu window : Offers to start a new game, see the Load menu, and quit. */ public static Menu menu; /** * The Game window. */ public static Game game; /** * The Load window : Lists the savegames to load. */ public static Load load; /** * The ResultScreen window : Printed when the game has ended. */ public static ResultScreen resultScreen; /** * Starts the application. * * @param args Default console arguments passed to Application.launch */ public static void main(String[] args) { launch(args); } /** * Creates the initial game state and initializes basic assets. * * @param primaryStage This argument is provided by the Application superclass */ @Override public void start(Stage primaryStage) { stage = primaryStage; menu = new Menu(); load = new Load(menu); try { ResourcesManager.initGlobalAssets(MENU_WIDTH, MENU_HEIGHT); } catch (Exception e) { System.err.println("Failed to load ResourcesManager GlobalAssets: " + e); } Planets.last_tick = System.currentTimeMillis(); Planets.last_frame = System.currentTimeMillis(); startMenu(); } /** * Creates the Main Menu and starts a timer whose goal is to handle Window switching. */ public static void startMenu() { menu.setStage(stage, "Main Menu"); menu.init(MENU_WIDTH, MENU_HEIGHT); AnimationTimer menu_anim = new AnimationTimer() { @Override public void handle(long arg0) { int selectedMenu; selectedMenu = menu.getSelectedWindow(); menu.setSelectedWindow(Window.STANDBY); switch (selectedMenu) { case Window.MAIN_MENU: if(game!=null) { game.clear(); } menu.setStage(stage, "Main Menu"); menu.init(MENU_WIDTH, MENU_HEIGHT); menu.stage.centerOnScreen(); menu.show(); break; case Window.GAME: int nbPlayers = menu.getNbPlayers(); int nbPlanets = menu.getNbPlanets(); if (nbPlayers >= 1 && nbPlanets >= nbPlayers) { menu.clear(); Planets.startGame(menu.getNbPlayers(), menu.getNbPlanets()); Game.setFreeze(false); } break; case Window.LOAD: load = new Load(menu); load.setStage(stage, "Load save"); load.init(MENU_WIDTH, MENU_HEIGHT); load.stage.centerOnScreen(); break; case Window.LOADING: try { ResourcesManager.initGameAssets(WIDTH, HEIGHT); } catch (Exception e) { System.err.println("Failed to load ResourcesManager MenuAssets: " + e); } break; case Window.RESULT_SCREEN: if(game!=null) { game.clear(); } resultScreen = new ResultScreen(); resultScreen.setStage(stage, "Game Over !"); resultScreen.init(MENU_WIDTH, MENU_HEIGHT); resultScreen.stage.centerOnScreen(); resultScreen.show(); break; case Window.QUIT: System.exit(0); break; } } }; menu_anim.start(); } /** * Creates a random game and starts the global Game handler. * * @param nbPlayers The maximum amount of players to assign a planet too * @param nbPlanets The maximum amount of planets_extended to generate */ public static void startGame(int nbPlayers, int nbPlanets) { //try { ResourcesManager.initGameAssets(WIDTH, HEIGHT); //} catch (Exception e) { // System.err.println("Failed to load ResourcesManager GameAssets: " + e); //} // Create Game and start it game = new Game(nbPlayers,nbPlanets); game.setStage(stage, "Planets"); game.init(WIDTH, HEIGHT); Planets.ticks = new ArrayList<>(); for (int i = 60; i > 0; i--) { Planets.ticks.add((long) 0); } AnimationTimer game_anim = new AnimationTimer() { @Override public void handle(long arg0) { long now = System.currentTimeMillis(); if (now - Planets.last_tick >= 1000 / Planets.TICKRATE) { Planets.last_tick = now; game.handle(arg0); Planets.ticks.remove(0); Planets.ticks.add(now); } if (now - Planets.last_think >= 1000 / Planets.AI_THINKRATE) { Planets.last_think = now; game.handleAI(); } if (now - Planets.last_frame >= 1000 / Planets.REFRESHRATE) { Planets.last_frame = now; if (Planets.DEBUG) { System.out.println("-------------- Tick n°" + Game.ticks + " (tickrate: " + DebugUtils.tickRate(Planets.ticks) + ") --------------"); System.out.println("Nodes : " + DebugUtils.getAllNodes(root).size()); } game.updateUI(); } } }; game_anim.start(); } }
package com.sunil.expandablerecyclerview; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
package hudson.remoting; import org.apache.commons.io.FileUtils; import org.apache.commons.io.output.TeeOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStream; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import static org.junit.Assert.*; /** * Runs a channel in a separate JVM by launching a new JVM. */ public class ForkRunner implements ChannelRunner { private Process proc; private ExecutorService executor; private Copier copier; protected List<String> buildCommandLine() { String cp = getClasspath(); System.out.println(cp); List<String> r = new ArrayList<String>(); r.add("-cp"); r.add(cp); r.add(Launcher.class.getName()); return r; } public Channel start() throws Exception { System.out.println("forking a new process"); // proc = Runtime.getRuntime().exec("java -Xdebug -Xrunjdwp:transport=dt_socket,server=y,address=8000 hudson.remoting.Launcher"); List<String> cmds = buildCommandLine(); cmds.add(0,"java"); proc = Runtime.getRuntime().exec(cmds.toArray(new String[0])); copier = new Copier("copier",proc.getErrorStream(),System.out); copier.start(); executor = Executors.newCachedThreadPool(); OutputStream out = proc.getOutputStream(); if (RECORD_OUTPUT) { File f = File.createTempFile("remoting",".log"); System.out.println("Recording to "+f); out = new TeeOutputStream(out,new FileOutputStream(f)); } return new Channel("north", executor, proc.getInputStream(), out); } public void stop(Channel channel) throws Exception { channel.close(); channel.join(10*1000); // System.out.println("north completed"); executor.shutdown(); copier.join(); int r = proc.waitFor(); // System.out.println("south completed"); assertEquals("exit code should have been 0", 0, r); } public String getName() { return "fork"; } public String getClasspath() { // this assumes we run in Maven StringBuilder buf = new StringBuilder(); URLClassLoader ucl = (URLClassLoader)getClass().getClassLoader(); for (URL url : ucl.getURLs()) { if (buf.length()>0) buf.append(File.pathSeparatorChar); buf.append(FileUtils.toFile(url)); // assume all of them are file URLs } return buf.toString(); } /** * Record the communication to the remote node. Used during debugging. */ private static boolean RECORD_OUTPUT = false; }
/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.openwire.v5; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.activemq.openwire.*; import org.apache.activemq.command.*; /** * Marshalling code for Open Wire Format for ActiveMQDestinationMarshaller * * * NOTE!: This file is auto generated - do not modify! * if you need to make a change, please see the modify the groovy scripts in the * under src/gram/script and then use maven openwire:generate to regenerate * this file. * * */ public abstract class ActiveMQDestinationMarshaller extends BaseDataStreamMarshaller { /** * Un-marshal an object instance from the data input stream * * @param o the object to un-marshal * @param dataIn the data input stream to build the object from * @throws IOException */ public void tightUnmarshal(OpenWireFormat wireFormat, Object o, DataInput dataIn, BooleanStream bs) throws IOException { super.tightUnmarshal(wireFormat, o, dataIn, bs); ActiveMQDestination info = (ActiveMQDestination)o; info.setPhysicalName(tightUnmarshalString(dataIn, bs)); } /** * Write the booleans that this object uses to a BooleanStream */ public int tightMarshal1(OpenWireFormat wireFormat, Object o, BooleanStream bs) throws IOException { ActiveMQDestination info = (ActiveMQDestination)o; int rc = super.tightMarshal1(wireFormat, o, bs); rc += tightMarshalString1(info.getPhysicalName(), bs); return rc + 0; } /** * Write a object instance to data output stream * * @param o the instance to be marshaled * @param dataOut the output stream * @throws IOException thrown if an error occurs */ public void tightMarshal2(OpenWireFormat wireFormat, Object o, DataOutput dataOut, BooleanStream bs) throws IOException { super.tightMarshal2(wireFormat, o, dataOut, bs); ActiveMQDestination info = (ActiveMQDestination)o; tightMarshalString2(info.getPhysicalName(), dataOut, bs); } /** * Un-marshal an object instance from the data input stream * * @param o the object to un-marshal * @param dataIn the data input stream to build the object from * @throws IOException */ public void looseUnmarshal(OpenWireFormat wireFormat, Object o, DataInput dataIn) throws IOException { super.looseUnmarshal(wireFormat, o, dataIn); ActiveMQDestination info = (ActiveMQDestination)o; info.setPhysicalName(looseUnmarshalString(dataIn)); } /** * Write the booleans that this object uses to a BooleanStream */ public void looseMarshal(OpenWireFormat wireFormat, Object o, DataOutput dataOut) throws IOException { ActiveMQDestination info = (ActiveMQDestination)o; super.looseMarshal(wireFormat, o, dataOut); looseMarshalString(info.getPhysicalName(), dataOut); } }
/* DungeonDiver7: A Dungeon-Diving RPG Copyright (C) 2021-present Eric Ahnell Any questions should be directed to the author via email at: products@puttysoftware.com */ package com.puttysoftware.dungeondiver7.dungeon.objects; import com.puttysoftware.dungeondiver7.dungeon.abstractobjects.AbstractMover; import com.puttysoftware.dungeondiver7.utilities.Direction; import com.puttysoftware.dungeondiver7.utilities.TypeConstants; public class PartyMover extends AbstractMover { // Constructors public PartyMover() { super(); this.setDirection(Direction.NORTH); this.setFrameNumber(1); this.type.set(TypeConstants.TYPE_MOVER); } @Override public final int getStringBaseID() { return 37; } }
package co.adeshina.c19ta.data_api.service; import co.adeshina.c19ta.common.dto.TweetAggregate; import co.adeshina.c19ta.data_api.dto.DataPacket; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class DataPacketServiceImplTest { KafkaConsumerService<TweetAggregate> mockKafkaConsumer = (KafkaConsumerService<TweetAggregate>) mock(KafkaConsumerService.class); @BeforeEach public void setup() { when(mockKafkaConsumer.poll()).thenReturn(dataHelper()); doNothing().when(mockKafkaConsumer).commitOffsets(); } @Test public void shouldBuildCorrectDataPacket() { DataPacketService dataPacketService = new DataPacketServiceImpl(mockKafkaConsumer); dataPacketService.buildPacket(); Optional<DataPacket> dataPacketOptional = dataPacketService.buildPacket(); assertTrue(dataPacketOptional.isPresent()); DataPacket packet = dataPacketOptional.get(); Optional<DataPacket.Data> coronavirusOptional = packet.getData() .stream() .filter(data -> data.getTerm().equalsIgnoreCase("coronavirus")) .findFirst(); assertTrue(coronavirusOptional.isPresent()); DataPacket.Data coronavirusData = coronavirusOptional.get(); double percentageThousandFollowers = coronavirusData.getPercentageThousandFollowers(); double percentageLessThanThousandFollowers = coronavirusData.getPercentageLessThanThousandFollowers(); assertEquals(44.0, percentageLessThanThousandFollowers); assertEquals(34.6, percentageThousandFollowers); } private List<TweetAggregate> dataHelper() { List<TweetAggregate> result = new ArrayList<>(); result.add(construct("COVID-19", 12, 8)); result.add(construct("Coronavirus", 9, 11)); result.add(construct("Chinese Virus", 5, 6)); return result; } private TweetAggregate construct(String term, int tweetCountThousandFollowers, int countLessThanThousandFollowers) { TweetAggregate tweetAggregate = new TweetAggregate(); tweetAggregate.setTerm(term); Map<TweetAggregate.AccountType, Integer> countMap = new HashMap<>(); countMap.put(TweetAggregate.AccountType.ONE_THOUSAND_FOLLOWERS, tweetCountThousandFollowers); countMap.put(TweetAggregate.AccountType.LESS_THAN_ONE_THOUSAND_FOLLOWERS, countLessThanThousandFollowers); tweetAggregate.setCountByAccountType(countMap); return tweetAggregate; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.deployers.impl; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.activemq.artemis.ArtemisConstants; import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration; import org.apache.activemq.artemis.api.core.BroadcastEndpointFactory; import org.apache.activemq.artemis.api.core.BroadcastGroupConfiguration; import org.apache.activemq.artemis.api.core.DiscoveryGroupConfiguration; import org.apache.activemq.artemis.api.core.JGroupsFileBroadcastEndpointFactory; import org.apache.activemq.artemis.api.core.Pair; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.api.core.UDPBroadcastEndpointFactory; import org.apache.activemq.artemis.api.core.client.ActiveMQClient; import org.apache.activemq.artemis.core.config.BridgeConfiguration; import org.apache.activemq.artemis.core.config.ClusterConnectionConfiguration; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.config.ConnectorServiceConfiguration; import org.apache.activemq.artemis.core.config.CoreQueueConfiguration; import org.apache.activemq.artemis.core.config.DivertConfiguration; import org.apache.activemq.artemis.core.config.ScaleDownConfiguration; import org.apache.activemq.artemis.core.config.ha.ColocatedPolicyConfiguration; import org.apache.activemq.artemis.core.config.ha.LiveOnlyPolicyConfiguration; import org.apache.activemq.artemis.core.config.ha.ReplicaPolicyConfiguration; import org.apache.activemq.artemis.core.config.ha.ReplicatedPolicyConfiguration; import org.apache.activemq.artemis.core.config.ha.SharedStoreMasterPolicyConfiguration; import org.apache.activemq.artemis.core.config.ha.SharedStoreSlavePolicyConfiguration; import org.apache.activemq.artemis.core.config.impl.ConfigurationImpl; import org.apache.activemq.artemis.core.config.impl.Validators; import org.apache.activemq.artemis.core.config.storage.DatabaseStorageConfiguration; import org.apache.activemq.artemis.core.config.storage.FileStorageConfiguration; import org.apache.activemq.artemis.core.io.aio.AIOSequentialFileFactory; import org.apache.activemq.artemis.core.security.Role; import org.apache.activemq.artemis.core.server.ActiveMQServerLogger; import org.apache.activemq.artemis.core.server.JournalType; import org.apache.activemq.artemis.core.server.SecuritySettingPlugin; import org.apache.activemq.artemis.core.server.cluster.impl.MessageLoadBalancingType; import org.apache.activemq.artemis.core.server.group.impl.GroupingHandlerConfiguration; import org.apache.activemq.artemis.core.settings.impl.AddressFullMessagePolicy; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.apache.activemq.artemis.core.settings.impl.ResourceLimitSettings; import org.apache.activemq.artemis.core.settings.impl.SlowConsumerPolicy; import org.apache.activemq.artemis.uri.AcceptorTransportConfigurationParser; import org.apache.activemq.artemis.uri.ConnectorTransportConfigurationParser; import org.apache.activemq.artemis.utils.ClassloadingUtil; import org.apache.activemq.artemis.utils.DefaultSensitiveStringCodec; import org.apache.activemq.artemis.utils.PasswordMaskingUtil; import org.apache.activemq.artemis.utils.SensitiveDataCodec; import org.apache.activemq.artemis.utils.XMLConfigurationUtil; import org.apache.activemq.artemis.utils.XMLUtil; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; /** * Parses an XML document according to the {@literal artemis-configuration.xsd} schema. */ public final class FileConfigurationParser extends XMLConfigurationUtil { // Security Parsing public static final String SECURITY_ELEMENT_NAME = "security-setting"; public static final String SECURITY_PLUGIN_ELEMENT_NAME = "security-setting-plugin"; private static final String PERMISSION_ELEMENT_NAME = "permission"; private static final String SETTING_ELEMENT_NAME = "setting"; private static final String TYPE_ATTR_NAME = "type"; private static final String ROLES_ATTR_NAME = "roles"; private static final String NAME_ATTR_NAME = "name"; private static final String VALUE_ATTR_NAME = "value"; static final String CREATEDURABLEQUEUE_NAME = "createDurableQueue"; private static final String DELETEDURABLEQUEUE_NAME = "deleteDurableQueue"; private static final String CREATE_NON_DURABLE_QUEUE_NAME = "createNonDurableQueue"; private static final String DELETE_NON_DURABLE_QUEUE_NAME = "deleteNonDurableQueue"; // HORNETQ-309 we keep supporting these attribute names for compatibility private static final String CREATETEMPQUEUE_NAME = "createTempQueue"; private static final String DELETETEMPQUEUE_NAME = "deleteTempQueue"; private static final String SEND_NAME = "send"; private static final String CONSUME_NAME = "consume"; private static final String MANAGE_NAME = "manage"; private static final String BROWSE_NAME = "browse"; // Address parsing private static final String DEAD_LETTER_ADDRESS_NODE_NAME = "dead-letter-address"; private static final String EXPIRY_ADDRESS_NODE_NAME = "expiry-address"; private static final String EXPIRY_DELAY_NODE_NAME = "expiry-delay"; private static final String REDELIVERY_DELAY_NODE_NAME = "redelivery-delay"; private static final String REDELIVERY_DELAY_MULTIPLIER_NODE_NAME = "redelivery-delay-multiplier"; private static final String MAX_REDELIVERY_DELAY_NODE_NAME = "max-redelivery-delay"; private static final String MAX_DELIVERY_ATTEMPTS = "max-delivery-attempts"; private static final String MAX_SIZE_BYTES_NODE_NAME = "max-size-bytes"; private static final String ADDRESS_FULL_MESSAGE_POLICY_NODE_NAME = "address-full-policy"; private static final String PAGE_SIZE_BYTES_NODE_NAME = "page-size-bytes"; private static final String PAGE_MAX_CACHE_SIZE_NODE_NAME = "page-max-cache-size"; private static final String MESSAGE_COUNTER_HISTORY_DAY_LIMIT_NODE_NAME = "message-counter-history-day-limit"; private static final String LVQ_NODE_NAME = "last-value-queue"; private static final String REDISTRIBUTION_DELAY_NODE_NAME = "redistribution-delay"; private static final String SEND_TO_DLA_ON_NO_ROUTE = "send-to-dla-on-no-route"; private static final String SLOW_CONSUMER_THRESHOLD_NODE_NAME = "slow-consumer-threshold"; private static final String SLOW_CONSUMER_CHECK_PERIOD_NODE_NAME = "slow-consumer-check-period"; private static final String SLOW_CONSUMER_POLICY_NODE_NAME = "slow-consumer-policy"; private static final String AUTO_CREATE_JMS_QUEUES = "auto-create-jms-queues"; private static final String AUTO_DELETE_JMS_QUEUES = "auto-delete-jms-queues"; private static final String AUTO_CREATE_JMS_TOPICS = "auto-create-jms-topics"; private static final String AUTO_DELETE_JMS_TOPICS = "auto-delete-jms-topics"; private static final String MANAGEMENT_BROWSE_PAGE_SIZE = "management-browse-page-size"; private static final String MAX_CONNECTIONS_NODE_NAME = "max-connections"; private static final String MAX_QUEUES_NODE_NAME = "max-queues"; private static final String GLOBAL_MAX_SIZE = "global-max-size"; private static final String MAX_DISK_USAGE = "max-disk-usage"; private static final String DISK_SCAN_PERIOD = "disk-scan-period"; // Attributes ---------------------------------------------------- private boolean validateAIO = false; /** * @return the validateAIO */ public boolean isValidateAIO() { return validateAIO; } /** * @param validateAIO the validateAIO to set */ public void setValidateAIO(final boolean validateAIO) { this.validateAIO = validateAIO; } public Configuration parseMainConfig(final InputStream input) throws Exception { Reader reader = new InputStreamReader(input); String xml = XMLUtil.readerToString(reader); xml = XMLUtil.replaceSystemProps(xml); Element e = XMLUtil.stringToElement(xml); Configuration config = new ConfigurationImpl(); parseMainConfig(e, config); return config; } public void parseMainConfig(final Element e, final Configuration config) throws Exception { config.setName(getString(e, "name", config.getName(), Validators.NO_CHECK)); NodeList haPolicyNodes = e.getElementsByTagName("ha-policy"); if (haPolicyNodes.getLength() > 0) { parseHAPolicyConfiguration((Element) haPolicyNodes.item(0), config); } //if we aren already set then set to default if (config.getHAPolicyConfiguration() == null) { config.setHAPolicyConfiguration(new LiveOnlyPolicyConfiguration()); } NodeList storeTypeNodes = e.getElementsByTagName("store"); if (storeTypeNodes.getLength() > 0) { parseStoreConfiguration((Element) storeTypeNodes.item(0), config); } config.setResolveProtocols(getBoolean(e, "resolve-protocols", config.isResolveProtocols())); config.setPersistenceEnabled(getBoolean(e, "persistence-enabled", config.isPersistenceEnabled())); config.setPersistDeliveryCountBeforeDelivery(getBoolean(e, "persist-delivery-count-before-delivery", config.isPersistDeliveryCountBeforeDelivery())); config.setScheduledThreadPoolMaxSize(getInteger(e, "scheduled-thread-pool-max-size", config.getScheduledThreadPoolMaxSize(), Validators.GT_ZERO)); config.setThreadPoolMaxSize(getInteger(e, "thread-pool-max-size", config.getThreadPoolMaxSize(), Validators.MINUS_ONE_OR_GT_ZERO)); config.setSecurityEnabled(getBoolean(e, "security-enabled", config.isSecurityEnabled())); config.setGracefulShutdownEnabled(getBoolean(e, "graceful-shutdown-enabled", config.isGracefulShutdownEnabled())); config.setGracefulShutdownTimeout(getLong(e, "graceful-shutdown-timeout", config.getGracefulShutdownTimeout(), Validators.MINUS_ONE_OR_GE_ZERO)); config.setJMXManagementEnabled(getBoolean(e, "jmx-management-enabled", config.isJMXManagementEnabled())); config.setJMXDomain(getString(e, "jmx-domain", config.getJMXDomain(), Validators.NOT_NULL_OR_EMPTY)); config.setJMXUseBrokerName(getBoolean(e, "jmx-use-broker-name", config.isJMXUseBrokerName())); config.setSecurityInvalidationInterval(getLong(e, "security-invalidation-interval", config.getSecurityInvalidationInterval(), Validators.GT_ZERO)); config.setConnectionTTLOverride(getLong(e, "connection-ttl-override", config.getConnectionTTLOverride(), Validators.MINUS_ONE_OR_GT_ZERO)); config.setEnabledAsyncConnectionExecution(getBoolean(e, "async-connection-execution-enabled", config.isAsyncConnectionExecutionEnabled())); config.setTransactionTimeout(getLong(e, "transaction-timeout", config.getTransactionTimeout(), Validators.GT_ZERO)); config.setTransactionTimeoutScanPeriod(getLong(e, "transaction-timeout-scan-period", config.getTransactionTimeoutScanPeriod(), Validators.GT_ZERO)); config.setMessageExpiryScanPeriod(getLong(e, "message-expiry-scan-period", config.getMessageExpiryScanPeriod(), Validators.MINUS_ONE_OR_GT_ZERO)); config.setMessageExpiryThreadPriority(getInteger(e, "message-expiry-thread-priority", config.getMessageExpiryThreadPriority(), Validators.THREAD_PRIORITY_RANGE)); config.setIDCacheSize(getInteger(e, "id-cache-size", config.getIDCacheSize(), Validators.GT_ZERO)); config.setPersistIDCache(getBoolean(e, "persist-id-cache", config.isPersistIDCache())); config.setManagementAddress(new SimpleString(getString(e, "management-address", config.getManagementAddress().toString(), Validators.NOT_NULL_OR_EMPTY))); config.setManagementNotificationAddress(new SimpleString(getString(e, "management-notification-address", config.getManagementNotificationAddress().toString(), Validators.NOT_NULL_OR_EMPTY))); config.setMaskPassword(getBoolean(e, "mask-password", false)); config.setPasswordCodec(getString(e, "password-codec", DefaultSensitiveStringCodec.class.getName(), Validators.NOT_NULL_OR_EMPTY)); config.setPopulateValidatedUser(getBoolean(e, "populate-validated-user", config.isPopulateValidatedUser())); config.setConnectionTtlCheckInterval(getLong(e, "connection-ttl-check-interval", config.getConnectionTtlCheckInterval(), Validators.GT_ZERO)); config.setConfigurationFileRefreshPeriod(getLong(e, "configuration-file-refresh-period", config.getConfigurationFileRefreshPeriod(), Validators.GT_ZERO)); config.setGlobalMaxSize(getLong(e, GLOBAL_MAX_SIZE, config.getGlobalMaxSize(), Validators.MINUS_ONE_OR_GT_ZERO)); config.setMaxDiskUsage(getInteger(e, MAX_DISK_USAGE, config.getMaxDiskUsage(), Validators.PERCENTAGE)); config.setDiskScanPeriod(getInteger(e, DISK_SCAN_PERIOD, config.getDiskScanPeriod(), Validators.MINUS_ONE_OR_GT_ZERO)); // parsing cluster password String passwordText = getString(e, "cluster-password", null, Validators.NO_CHECK); final boolean maskText = config.isMaskPassword(); if (passwordText != null) { if (maskText) { SensitiveDataCodec<String> codec = PasswordMaskingUtil.getCodec(config.getPasswordCodec()); config.setClusterPassword(codec.decode(passwordText)); } else { config.setClusterPassword(passwordText); } } config.setClusterUser(getString(e, "cluster-user", config.getClusterUser(), Validators.NO_CHECK)); NodeList interceptorNodes = e.getElementsByTagName("remoting-interceptors"); ArrayList<String> incomingInterceptorList = new ArrayList<>(); if (interceptorNodes.getLength() > 0) { NodeList interceptors = interceptorNodes.item(0).getChildNodes(); for (int i = 0; i < interceptors.getLength(); i++) { if ("class-name".equalsIgnoreCase(interceptors.item(i).getNodeName())) { String clazz = getTrimmedTextContent(interceptors.item(i)); incomingInterceptorList.add(clazz); } } } NodeList incomingInterceptorNodes = e.getElementsByTagName("remoting-incoming-interceptors"); if (incomingInterceptorNodes.getLength() > 0) { NodeList interceptors = incomingInterceptorNodes.item(0).getChildNodes(); for (int i = 0; i < interceptors.getLength(); i++) { if ("class-name".equalsIgnoreCase(interceptors.item(i).getNodeName())) { String clazz = getTrimmedTextContent(interceptors.item(i)); incomingInterceptorList.add(clazz); } } } config.setIncomingInterceptorClassNames(incomingInterceptorList); NodeList outgoingInterceptorNodes = e.getElementsByTagName("remoting-outgoing-interceptors"); ArrayList<String> outgoingInterceptorList = new ArrayList<>(); if (outgoingInterceptorNodes.getLength() > 0) { NodeList interceptors = outgoingInterceptorNodes.item(0).getChildNodes(); for (int i = 0; i < interceptors.getLength(); i++) { if ("class-name".equalsIgnoreCase(interceptors.item(i).getNodeName())) { String clazz = interceptors.item(i).getTextContent(); outgoingInterceptorList.add(clazz); } } } config.setOutgoingInterceptorClassNames(outgoingInterceptorList); NodeList connectorNodes = e.getElementsByTagName("connector"); for (int i = 0; i < connectorNodes.getLength(); i++) { Element connectorNode = (Element) connectorNodes.item(i); TransportConfiguration connectorConfig = parseConnectorTransportConfiguration(connectorNode, config); if (connectorConfig.getName() == null) { ActiveMQServerLogger.LOGGER.connectorWithNoName(); continue; } if (config.getConnectorConfigurations().containsKey(connectorConfig.getName())) { ActiveMQServerLogger.LOGGER.connectorAlreadyDeployed(connectorConfig.getName()); continue; } config.getConnectorConfigurations().put(connectorConfig.getName(), connectorConfig); } NodeList acceptorNodes = e.getElementsByTagName("acceptor"); for (int i = 0; i < acceptorNodes.getLength(); i++) { Element acceptorNode = (Element) acceptorNodes.item(i); TransportConfiguration acceptorConfig = parseAcceptorTransportConfiguration(acceptorNode, config); config.getAcceptorConfigurations().add(acceptorConfig); } NodeList bgNodes = e.getElementsByTagName("broadcast-group"); for (int i = 0; i < bgNodes.getLength(); i++) { Element bgNode = (Element) bgNodes.item(i); parseBroadcastGroupConfiguration(bgNode, config); } NodeList dgNodes = e.getElementsByTagName("discovery-group"); for (int i = 0; i < dgNodes.getLength(); i++) { Element dgNode = (Element) dgNodes.item(i); parseDiscoveryGroupConfiguration(dgNode, config); } NodeList brNodes = e.getElementsByTagName("bridge"); for (int i = 0; i < brNodes.getLength(); i++) { Element mfNode = (Element) brNodes.item(i); parseBridgeConfiguration(mfNode, config); } NodeList gaNodes = e.getElementsByTagName("grouping-handler"); for (int i = 0; i < gaNodes.getLength(); i++) { Element gaNode = (Element) gaNodes.item(i); parseGroupingHandlerConfiguration(gaNode, config); } NodeList ccNodes = e.getElementsByTagName("cluster-connection"); for (int i = 0; i < ccNodes.getLength(); i++) { Element ccNode = (Element) ccNodes.item(i); parseClusterConnectionConfiguration(ccNode, config); } NodeList ccNodesURI = e.getElementsByTagName("cluster-connection-uri"); for (int i = 0; i < ccNodesURI.getLength(); i++) { Element ccNode = (Element) ccNodesURI.item(i); parseClusterConnectionConfigurationURI(ccNode, config); } NodeList dvNodes = e.getElementsByTagName("divert"); for (int i = 0; i < dvNodes.getLength(); i++) { Element dvNode = (Element) dvNodes.item(i); parseDivertConfiguration(dvNode, config); } // Persistence config config.setLargeMessagesDirectory(getString(e, "large-messages-directory", config.getLargeMessagesDirectory(), Validators.NOT_NULL_OR_EMPTY)); config.setBindingsDirectory(getString(e, "bindings-directory", config.getBindingsDirectory(), Validators.NOT_NULL_OR_EMPTY)); config.setCreateBindingsDir(getBoolean(e, "create-bindings-dir", config.isCreateBindingsDir())); config.setJournalDirectory(getString(e, "journal-directory", config.getJournalDirectory(), Validators.NOT_NULL_OR_EMPTY)); config.setPageMaxConcurrentIO(getInteger(e, "page-max-concurrent-io", config.getPageMaxConcurrentIO(), Validators.MINUS_ONE_OR_GT_ZERO)); config.setPagingDirectory(getString(e, "paging-directory", config.getPagingDirectory(), Validators.NOT_NULL_OR_EMPTY)); config.setCreateJournalDir(getBoolean(e, "create-journal-dir", config.isCreateJournalDir())); String s = getString(e, "journal-type", config.getJournalType().toString(), Validators.JOURNAL_TYPE); if (s.equals(JournalType.NIO.toString())) { config.setJournalType(JournalType.NIO); } else if (s.equals(JournalType.ASYNCIO.toString())) { // https://jira.jboss.org/jira/browse/HORNETQ-295 // We do the check here to see if AIO is supported so we can use the correct defaults and/or use // correct settings in xml // If we fall back later on these settings can be ignored boolean supportsAIO = AIOSequentialFileFactory.isSupported(); if (supportsAIO) { config.setJournalType(JournalType.ASYNCIO); } else { if (validateAIO) { ActiveMQServerLogger.LOGGER.AIONotFound(); } config.setJournalType(JournalType.NIO); } } config.setJournalSyncTransactional(getBoolean(e, "journal-sync-transactional", config.isJournalSyncTransactional())); config.setJournalSyncNonTransactional(getBoolean(e, "journal-sync-non-transactional", config.isJournalSyncNonTransactional())); config.setJournalFileSize(getInteger(e, "journal-file-size", config.getJournalFileSize(), Validators.GT_ZERO)); int journalBufferTimeout = getInteger(e, "journal-buffer-timeout", config.getJournalType() == JournalType.ASYNCIO ? ArtemisConstants.DEFAULT_JOURNAL_BUFFER_TIMEOUT_AIO : ArtemisConstants.DEFAULT_JOURNAL_BUFFER_TIMEOUT_NIO, Validators.GT_ZERO); int journalBufferSize = getInteger(e, "journal-buffer-size", config.getJournalType() == JournalType.ASYNCIO ? ArtemisConstants.DEFAULT_JOURNAL_BUFFER_SIZE_AIO : ArtemisConstants.DEFAULT_JOURNAL_BUFFER_SIZE_NIO, Validators.GT_ZERO); int journalMaxIO = getInteger(e, "journal-max-io", config.getJournalType() == JournalType.ASYNCIO ? ActiveMQDefaultConfiguration.getDefaultJournalMaxIoAio() : ActiveMQDefaultConfiguration.getDefaultJournalMaxIoNio(), Validators.GT_ZERO); if (config.getJournalType() == JournalType.ASYNCIO) { config.setJournalBufferTimeout_AIO(journalBufferTimeout); config.setJournalBufferSize_AIO(journalBufferSize); config.setJournalMaxIO_AIO(journalMaxIO); } else { config.setJournalBufferTimeout_NIO(journalBufferTimeout); config.setJournalBufferSize_NIO(journalBufferSize); config.setJournalMaxIO_NIO(journalMaxIO); } config.setJournalMinFiles(getInteger(e, "journal-min-files", config.getJournalMinFiles(), Validators.GT_ZERO)); config.setJournalPoolFiles(getInteger(e, "journal-pool-files", config.getJournalPoolFiles(), Validators.MINUS_ONE_OR_GT_ZERO)); config.setJournalCompactMinFiles(getInteger(e, "journal-compact-min-files", config.getJournalCompactMinFiles(), Validators.GE_ZERO)); config.setJournalCompactPercentage(getInteger(e, "journal-compact-percentage", config.getJournalCompactPercentage(), Validators.PERCENTAGE)); config.setLogJournalWriteRate(getBoolean(e, "log-journal-write-rate", ActiveMQDefaultConfiguration.isDefaultJournalLogWriteRate())); config.setJournalPerfBlastPages(getInteger(e, "perf-blast-pages", ActiveMQDefaultConfiguration.getDefaultJournalPerfBlastPages(), Validators.MINUS_ONE_OR_GT_ZERO)); config.setRunSyncSpeedTest(getBoolean(e, "run-sync-speed-test", config.isRunSyncSpeedTest())); config.setWildcardRoutingEnabled(getBoolean(e, "wild-card-routing-enabled", config.isWildcardRoutingEnabled())); config.setMessageCounterEnabled(getBoolean(e, "message-counter-enabled", config.isMessageCounterEnabled())); config.setMessageCounterSamplePeriod(getLong(e, "message-counter-sample-period", config.getMessageCounterSamplePeriod(), Validators.GT_ZERO)); config.setMessageCounterMaxDayHistory(getInteger(e, "message-counter-max-day-history", config.getMessageCounterMaxDayHistory(), Validators.GT_ZERO)); config.setServerDumpInterval(getLong(e, "server-dump-interval", config.getServerDumpInterval(), Validators.MINUS_ONE_OR_GT_ZERO)); // in milliseconds config.setMemoryWarningThreshold(getInteger(e, "memory-warning-threshold", config.getMemoryWarningThreshold(), Validators.PERCENTAGE)); config.setMemoryMeasureInterval(getLong(e, "memory-measure-interval", config.getMemoryMeasureInterval(), Validators.MINUS_ONE_OR_GT_ZERO)); // in parseAddressSettings(e, config); parseResourceLimits(e, config); parseQueues(e, config); parseSecurity(e, config); NodeList connectorServiceConfigs = e.getElementsByTagName("connector-service"); ArrayList<ConnectorServiceConfiguration> configs = new ArrayList<>(); for (int i = 0; i < connectorServiceConfigs.getLength(); i++) { Element node = (Element) connectorServiceConfigs.item(i); configs.add((parseConnectorService(node))); } config.setConnectorServiceConfigurations(configs); } /** * @param e * @param config */ private void parseSecurity(final Element e, final Configuration config) { NodeList elements = e.getElementsByTagName("security-settings"); if (elements.getLength() != 0) { Element node = (Element) elements.item(0); NodeList list = node.getElementsByTagName(SECURITY_ELEMENT_NAME); for (int i = 0; i < list.getLength(); i++) { Pair<String, Set<Role>> securityItem = parseSecurityRoles(list.item(i)); config.putSecurityRoles(securityItem.getA(), securityItem.getB()); } list = node.getElementsByTagName(SECURITY_PLUGIN_ELEMENT_NAME); for (int i = 0; i < list.getLength(); i++) { Pair<SecuritySettingPlugin, Map<String, String>> securityItem = parseSecuritySettingPlugins(list.item(i)); config.addSecuritySettingPlugin(securityItem.getA().init(securityItem.getB())); } } } /** * @param e * @param config */ private void parseQueues(final Element e, final Configuration config) { NodeList elements = e.getElementsByTagName("queues"); if (elements.getLength() != 0) { Element node = (Element) elements.item(0); NodeList list = node.getElementsByTagName("queue"); for (int i = 0; i < list.getLength(); i++) { CoreQueueConfiguration queueConfig = parseQueueConfiguration(list.item(i)); config.getQueueConfigurations().add(queueConfig); } } } /** * @param e * @param config */ private void parseAddressSettings(final Element e, final Configuration config) { NodeList elements = e.getElementsByTagName("address-settings"); if (elements.getLength() != 0) { Element node = (Element) elements.item(0); NodeList list = node.getElementsByTagName("address-setting"); for (int i = 0; i < list.getLength(); i++) { Pair<String, AddressSettings> addressSettings = parseAddressSettings(list.item(i)); config.getAddressesSettings().put(addressSettings.getA(), addressSettings.getB()); } } } /** * @param e * @param config */ private void parseResourceLimits(final Element e, final Configuration config) { NodeList elements = e.getElementsByTagName("resource-limit-settings"); if (elements.getLength() != 0) { Element node = (Element) elements.item(0); NodeList list = node.getElementsByTagName("resource-limit-setting"); for (int i = 0; i < list.getLength(); i++) { config.addResourceLimitSettings(parseResourceLimitSettings(list.item(i))); } } } /** * @param node * @return */ protected Pair<String, Set<Role>> parseSecurityRoles(final Node node) { final String match = node.getAttributes().getNamedItem("match").getNodeValue(); Set<Role> securityRoles = new HashSet<>(); Pair<String, Set<Role>> securityMatch = new Pair<>(match, securityRoles); ArrayList<String> send = new ArrayList<>(); ArrayList<String> consume = new ArrayList<>(); ArrayList<String> createDurableQueue = new ArrayList<>(); ArrayList<String> deleteDurableQueue = new ArrayList<>(); ArrayList<String> createNonDurableQueue = new ArrayList<>(); ArrayList<String> deleteNonDurableQueue = new ArrayList<>(); ArrayList<String> manageRoles = new ArrayList<>(); ArrayList<String> browseRoles = new ArrayList<>(); ArrayList<String> allRoles = new ArrayList<>(); NodeList children = node.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { Node child = children.item(i); final String name = child.getNodeName(); if (PERMISSION_ELEMENT_NAME.equalsIgnoreCase(name)) { final String type = getAttributeValue(child, TYPE_ATTR_NAME); final String roleString = getAttributeValue(child, ROLES_ATTR_NAME); String[] roles = roleString.split(","); for (String role : roles) { if (SEND_NAME.equals(type)) { send.add(role.trim()); } else if (CONSUME_NAME.equals(type)) { consume.add(role.trim()); } else if (CREATEDURABLEQUEUE_NAME.equals(type)) { createDurableQueue.add(role.trim()); } else if (DELETEDURABLEQUEUE_NAME.equals(type)) { deleteDurableQueue.add(role.trim()); } else if (CREATE_NON_DURABLE_QUEUE_NAME.equals(type)) { createNonDurableQueue.add(role.trim()); } else if (DELETE_NON_DURABLE_QUEUE_NAME.equals(type)) { deleteNonDurableQueue.add(role.trim()); } else if (CREATETEMPQUEUE_NAME.equals(type)) { createNonDurableQueue.add(role.trim()); } else if (DELETETEMPQUEUE_NAME.equals(type)) { deleteNonDurableQueue.add(role.trim()); } else if (MANAGE_NAME.equals(type)) { manageRoles.add(role.trim()); } else if (BROWSE_NAME.equals(type)) { browseRoles.add(role.trim()); } else { ActiveMQServerLogger.LOGGER.rolePermissionConfigurationError(type); } if (!allRoles.contains(role.trim())) { allRoles.add(role.trim()); } } } } for (String role : allRoles) { securityRoles.add(new Role(role, send.contains(role), consume.contains(role), createDurableQueue.contains(role), deleteDurableQueue.contains(role), createNonDurableQueue.contains(role), deleteNonDurableQueue.contains(role), manageRoles.contains(role), browseRoles.contains(role))); } return securityMatch; } private Pair<SecuritySettingPlugin, Map<String, String>> parseSecuritySettingPlugins(Node item) { final String clazz = item.getAttributes().getNamedItem("class-name").getNodeValue(); final Map<String, String> settings = new HashMap<>(); NodeList children = item.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Node child = children.item(j); final String nodeName = child.getNodeName(); if (SETTING_ELEMENT_NAME.equalsIgnoreCase(nodeName)) { final String settingName = getAttributeValue(child, NAME_ATTR_NAME); final String settingValue = getAttributeValue(child, VALUE_ATTR_NAME); settings.put(settingName, settingValue); } } SecuritySettingPlugin securitySettingPlugin = AccessController.doPrivileged(new PrivilegedAction<SecuritySettingPlugin>() { @Override public SecuritySettingPlugin run() { return (SecuritySettingPlugin) ClassloadingUtil.newInstanceFromClassLoader(clazz); } }); return new Pair<>(securitySettingPlugin, settings); } /** * @param node * @return */ protected Pair<String, AddressSettings> parseAddressSettings(final Node node) { String match = getAttributeValue(node, "match"); NodeList children = node.getChildNodes(); AddressSettings addressSettings = new AddressSettings(); Pair<String, AddressSettings> setting = new Pair<>(match, addressSettings); for (int i = 0; i < children.getLength(); i++) { final Node child = children.item(i); final String name = child.getNodeName(); if (DEAD_LETTER_ADDRESS_NODE_NAME.equalsIgnoreCase(name)) { SimpleString queueName = new SimpleString(getTrimmedTextContent(child)); addressSettings.setDeadLetterAddress(queueName); } else if (EXPIRY_ADDRESS_NODE_NAME.equalsIgnoreCase(name)) { SimpleString queueName = new SimpleString(getTrimmedTextContent(child)); addressSettings.setExpiryAddress(queueName); } else if (EXPIRY_DELAY_NODE_NAME.equalsIgnoreCase(name)) { addressSettings.setExpiryDelay(XMLUtil.parseLong(child)); } else if (REDELIVERY_DELAY_NODE_NAME.equalsIgnoreCase(name)) { addressSettings.setRedeliveryDelay(XMLUtil.parseLong(child)); } else if (REDELIVERY_DELAY_MULTIPLIER_NODE_NAME.equalsIgnoreCase(name)) { addressSettings.setRedeliveryMultiplier(XMLUtil.parseDouble(child)); } else if (MAX_REDELIVERY_DELAY_NODE_NAME.equalsIgnoreCase(name)) { addressSettings.setMaxRedeliveryDelay(XMLUtil.parseLong(child)); } else if (MAX_SIZE_BYTES_NODE_NAME.equalsIgnoreCase(name)) { addressSettings.setMaxSizeBytes(XMLUtil.parseLong(child)); } else if (PAGE_SIZE_BYTES_NODE_NAME.equalsIgnoreCase(name)) { addressSettings.setPageSizeBytes(XMLUtil.parseLong(child)); } else if (PAGE_MAX_CACHE_SIZE_NODE_NAME.equalsIgnoreCase(name)) { addressSettings.setPageCacheMaxSize(XMLUtil.parseInt(child)); } else if (MESSAGE_COUNTER_HISTORY_DAY_LIMIT_NODE_NAME.equalsIgnoreCase(name)) { addressSettings.setMessageCounterHistoryDayLimit(XMLUtil.parseInt(child)); } else if (ADDRESS_FULL_MESSAGE_POLICY_NODE_NAME.equalsIgnoreCase(name)) { String value = getTrimmedTextContent(child); Validators.ADDRESS_FULL_MESSAGE_POLICY_TYPE.validate(ADDRESS_FULL_MESSAGE_POLICY_NODE_NAME, value); AddressFullMessagePolicy policy = Enum.valueOf(AddressFullMessagePolicy.class, value); addressSettings.setAddressFullMessagePolicy(policy); } else if (LVQ_NODE_NAME.equalsIgnoreCase(name)) { addressSettings.setLastValueQueue(XMLUtil.parseBoolean(child)); } else if (MAX_DELIVERY_ATTEMPTS.equalsIgnoreCase(name)) { addressSettings.setMaxDeliveryAttempts(XMLUtil.parseInt(child)); } else if (REDISTRIBUTION_DELAY_NODE_NAME.equalsIgnoreCase(name)) { addressSettings.setRedistributionDelay(XMLUtil.parseLong(child)); } else if (SEND_TO_DLA_ON_NO_ROUTE.equalsIgnoreCase(name)) { addressSettings.setSendToDLAOnNoRoute(XMLUtil.parseBoolean(child)); } else if (SLOW_CONSUMER_THRESHOLD_NODE_NAME.equalsIgnoreCase(name)) { long slowConsumerThreshold = XMLUtil.parseLong(child); Validators.MINUS_ONE_OR_GT_ZERO.validate(SLOW_CONSUMER_THRESHOLD_NODE_NAME, slowConsumerThreshold); addressSettings.setSlowConsumerThreshold(slowConsumerThreshold); } else if (SLOW_CONSUMER_CHECK_PERIOD_NODE_NAME.equalsIgnoreCase(name)) { long slowConsumerCheckPeriod = XMLUtil.parseLong(child); Validators.GT_ZERO.validate(SLOW_CONSUMER_CHECK_PERIOD_NODE_NAME, slowConsumerCheckPeriod); addressSettings.setSlowConsumerCheckPeriod(slowConsumerCheckPeriod); } else if (SLOW_CONSUMER_POLICY_NODE_NAME.equalsIgnoreCase(name)) { String value = getTrimmedTextContent(child); Validators.SLOW_CONSUMER_POLICY_TYPE.validate(SLOW_CONSUMER_POLICY_NODE_NAME, value); SlowConsumerPolicy policy = Enum.valueOf(SlowConsumerPolicy.class, value); addressSettings.setSlowConsumerPolicy(policy); } else if (AUTO_CREATE_JMS_QUEUES.equalsIgnoreCase(name)) { addressSettings.setAutoCreateJmsQueues(XMLUtil.parseBoolean(child)); } else if (AUTO_DELETE_JMS_QUEUES.equalsIgnoreCase(name)) { addressSettings.setAutoDeleteJmsQueues(XMLUtil.parseBoolean(child)); } else if (AUTO_CREATE_JMS_TOPICS.equalsIgnoreCase(name)) { addressSettings.setAutoCreateJmsTopics(XMLUtil.parseBoolean(child)); } else if (AUTO_DELETE_JMS_TOPICS.equalsIgnoreCase(name)) { addressSettings.setAutoDeleteJmsTopics(XMLUtil.parseBoolean(child)); } else if (MANAGEMENT_BROWSE_PAGE_SIZE.equalsIgnoreCase(name)) { addressSettings.setManagementBrowsePageSize(XMLUtil.parseInt(child)); } } return setting; } /** * @param node * @return */ protected ResourceLimitSettings parseResourceLimitSettings(final Node node) { ResourceLimitSettings resourceLimitSettings = new ResourceLimitSettings(); resourceLimitSettings.setMatch(SimpleString.toSimpleString(getAttributeValue(node, "match"))); NodeList children = node.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { final Node child = children.item(i); final String name = child.getNodeName(); if (MAX_CONNECTIONS_NODE_NAME.equalsIgnoreCase(name)) { resourceLimitSettings.setMaxConnections(XMLUtil.parseInt(child)); } else if (MAX_QUEUES_NODE_NAME.equalsIgnoreCase(name)) { resourceLimitSettings.setMaxQueues(XMLUtil.parseInt(child)); } } return resourceLimitSettings; } protected CoreQueueConfiguration parseQueueConfiguration(final Node node) { String name = getAttributeValue(node, "name"); String address = null; String filterString = null; boolean durable = true; NodeList children = node.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Node child = children.item(j); if (child.getNodeName().equals("address")) { address = getTrimmedTextContent(child); } else if (child.getNodeName().equals("filter")) { filterString = getAttributeValue(child, "string"); } else if (child.getNodeName().equals("durable")) { durable = XMLUtil.parseBoolean(child); } } return new CoreQueueConfiguration().setAddress(address).setName(name).setFilterString(filterString).setDurable(durable); } private TransportConfiguration parseAcceptorTransportConfiguration(final Element e, final Configuration mainConfig) throws Exception { Node nameNode = e.getAttributes().getNamedItem("name"); String name = nameNode != null ? nameNode.getNodeValue() : null; String uri = e.getChildNodes().item(0).getNodeValue(); AcceptorTransportConfigurationParser parser = new AcceptorTransportConfigurationParser(); List<TransportConfiguration> configurations = parser.newObject(parser.expandURI(uri), name); Map<String, Object> params = configurations.get(0).getParams(); if (mainConfig.isMaskPassword()) { params.put(ActiveMQDefaultConfiguration.getPropMaskPassword(), mainConfig.isMaskPassword()); if (mainConfig.getPasswordCodec() != null) { params.put(ActiveMQDefaultConfiguration.getPropPasswordCodec(), mainConfig.getPasswordCodec()); } } return configurations.get(0); } private TransportConfiguration parseConnectorTransportConfiguration(final Element e, final Configuration mainConfig) throws Exception { Node nameNode = e.getAttributes().getNamedItem("name"); String name = nameNode != null ? nameNode.getNodeValue() : null; String uri = e.getChildNodes().item(0).getNodeValue(); ConnectorTransportConfigurationParser parser = new ConnectorTransportConfigurationParser(); List<TransportConfiguration> configurations = parser.newObject(parser.expandURI(uri), name); Map<String, Object> params = configurations.get(0).getParams(); if (mainConfig.isMaskPassword()) { params.put(ActiveMQDefaultConfiguration.getPropMaskPassword(), mainConfig.isMaskPassword()); if (mainConfig.getPasswordCodec() != null) { params.put(ActiveMQDefaultConfiguration.getPropPasswordCodec(), mainConfig.getPasswordCodec()); } } return configurations.get(0); } private static final ArrayList<String> POLICY_LIST = new ArrayList<>(); static { POLICY_LIST.add("colocated"); POLICY_LIST.add("live-only"); POLICY_LIST.add("replicated"); POLICY_LIST.add("replica"); POLICY_LIST.add("shared-store-master"); POLICY_LIST.add("shared-store-slave"); } private static final ArrayList<String> HA_LIST = new ArrayList<>(); static { HA_LIST.add("live-only"); HA_LIST.add("shared-store"); HA_LIST.add("replication"); } private static final ArrayList<String> STORE_TYPE_LIST = new ArrayList<>(); static { STORE_TYPE_LIST.add("database-store"); STORE_TYPE_LIST.add("file-store"); } private void parseStoreConfiguration(final Element e, final Configuration mainConfig) { for (String storeType : STORE_TYPE_LIST) { NodeList storeNodeList = e.getElementsByTagName(storeType); if (storeNodeList.getLength() > 0) { Element storeNode = (Element) storeNodeList.item(0); if (storeNode.getTagName().equals("database-store")) { mainConfig.setStoreConfiguration(createDatabaseStoreConfig(storeNode)); } else if (storeNode.getTagName().equals("file-store")) { mainConfig.setStoreConfiguration(createFileStoreConfig(storeNode)); } } } } private void parseHAPolicyConfiguration(final Element e, final Configuration mainConfig) { for (String haType : HA_LIST) { NodeList haNodeList = e.getElementsByTagName(haType); if (haNodeList.getLength() > 0) { Element haNode = (Element) haNodeList.item(0); if (haNode.getTagName().equals("replication")) { NodeList masterNodeList = e.getElementsByTagName("master"); if (masterNodeList.getLength() > 0) { Element masterNode = (Element) masterNodeList.item(0); mainConfig.setHAPolicyConfiguration(createReplicatedHaPolicy(masterNode)); } NodeList slaveNodeList = e.getElementsByTagName("slave"); if (slaveNodeList.getLength() > 0) { Element slaveNode = (Element) slaveNodeList.item(0); mainConfig.setHAPolicyConfiguration(createReplicaHaPolicy(slaveNode)); } NodeList colocatedNodeList = e.getElementsByTagName("colocated"); if (colocatedNodeList.getLength() > 0) { Element colocatedNode = (Element) colocatedNodeList.item(0); mainConfig.setHAPolicyConfiguration(createColocatedHaPolicy(colocatedNode, true)); } } else if (haNode.getTagName().equals("shared-store")) { NodeList masterNodeList = e.getElementsByTagName("master"); if (masterNodeList.getLength() > 0) { Element masterNode = (Element) masterNodeList.item(0); mainConfig.setHAPolicyConfiguration(createSharedStoreMasterHaPolicy(masterNode)); } NodeList slaveNodeList = e.getElementsByTagName("slave"); if (slaveNodeList.getLength() > 0) { Element slaveNode = (Element) slaveNodeList.item(0); mainConfig.setHAPolicyConfiguration(createSharedStoreSlaveHaPolicy(slaveNode)); } NodeList colocatedNodeList = e.getElementsByTagName("colocated"); if (colocatedNodeList.getLength() > 0) { Element colocatedNode = (Element) colocatedNodeList.item(0); mainConfig.setHAPolicyConfiguration(createColocatedHaPolicy(colocatedNode, false)); } } else if (haNode.getTagName().equals("live-only")) { NodeList noneNodeList = e.getElementsByTagName("live-only"); Element noneNode = (Element) noneNodeList.item(0); mainConfig.setHAPolicyConfiguration(createLiveOnlyHaPolicy(noneNode)); } } } } private LiveOnlyPolicyConfiguration createLiveOnlyHaPolicy(Element policyNode) { LiveOnlyPolicyConfiguration configuration = new LiveOnlyPolicyConfiguration(); configuration.setScaleDownConfiguration(parseScaleDownConfig(policyNode)); return configuration; } private ReplicatedPolicyConfiguration createReplicatedHaPolicy(Element policyNode) { ReplicatedPolicyConfiguration configuration = new ReplicatedPolicyConfiguration(); configuration.setCheckForLiveServer(getBoolean(policyNode, "check-for-live-server", configuration.isCheckForLiveServer())); configuration.setGroupName(getString(policyNode, "group-name", configuration.getGroupName(), Validators.NO_CHECK)); configuration.setClusterName(getString(policyNode, "cluster-name", configuration.getClusterName(), Validators.NO_CHECK)); configuration.setInitialReplicationSyncTimeout(getLong(policyNode, "initial-replication-sync-timeout", configuration.getInitialReplicationSyncTimeout(), Validators.GT_ZERO)); return configuration; } private ReplicaPolicyConfiguration createReplicaHaPolicy(Element policyNode) { ReplicaPolicyConfiguration configuration = new ReplicaPolicyConfiguration(); configuration.setRestartBackup(getBoolean(policyNode, "restart-backup", configuration.isRestartBackup())); configuration.setGroupName(getString(policyNode, "group-name", configuration.getGroupName(), Validators.NO_CHECK)); configuration.setAllowFailBack(getBoolean(policyNode, "allow-failback", configuration.isAllowFailBack())); configuration.setInitialReplicationSyncTimeout(getLong(policyNode, "initial-replication-sync-timeout", configuration.getInitialReplicationSyncTimeout(), Validators.GT_ZERO)); configuration.setClusterName(getString(policyNode, "cluster-name", configuration.getClusterName(), Validators.NO_CHECK)); configuration.setMaxSavedReplicatedJournalsSize(getInteger(policyNode, "max-saved-replicated-journals-size", configuration.getMaxSavedReplicatedJournalsSize(), Validators.MINUS_ONE_OR_GE_ZERO)); configuration.setScaleDownConfiguration(parseScaleDownConfig(policyNode)); return configuration; } private SharedStoreMasterPolicyConfiguration createSharedStoreMasterHaPolicy(Element policyNode) { SharedStoreMasterPolicyConfiguration configuration = new SharedStoreMasterPolicyConfiguration(); configuration.setFailoverOnServerShutdown(getBoolean(policyNode, "failover-on-shutdown", configuration.isFailoverOnServerShutdown())); return configuration; } private SharedStoreSlavePolicyConfiguration createSharedStoreSlaveHaPolicy(Element policyNode) { SharedStoreSlavePolicyConfiguration configuration = new SharedStoreSlavePolicyConfiguration(); configuration.setAllowFailBack(getBoolean(policyNode, "allow-failback", configuration.isAllowFailBack())); configuration.setFailoverOnServerShutdown(getBoolean(policyNode, "failover-on-shutdown", configuration.isFailoverOnServerShutdown())); configuration.setRestartBackup(getBoolean(policyNode, "restart-backup", configuration.isRestartBackup())); configuration.setScaleDownConfiguration(parseScaleDownConfig(policyNode)); return configuration; } private ColocatedPolicyConfiguration createColocatedHaPolicy(Element policyNode, boolean replicated) { ColocatedPolicyConfiguration configuration = new ColocatedPolicyConfiguration(); boolean requestBackup = getBoolean(policyNode, "request-backup", configuration.isRequestBackup()); configuration.setRequestBackup(requestBackup); int backupRequestRetries = getInteger(policyNode, "backup-request-retries", configuration.getBackupRequestRetries(), Validators.MINUS_ONE_OR_GE_ZERO); configuration.setBackupRequestRetries(backupRequestRetries); long backupRequestRetryInterval = getLong(policyNode, "backup-request-retry-interval", configuration.getBackupRequestRetryInterval(), Validators.GT_ZERO); configuration.setBackupRequestRetryInterval(backupRequestRetryInterval); int maxBackups = getInteger(policyNode, "max-backups", configuration.getMaxBackups(), Validators.GE_ZERO); configuration.setMaxBackups(maxBackups); int backupPortOffset = getInteger(policyNode, "backup-port-offset", configuration.getBackupPortOffset(), Validators.GT_ZERO); configuration.setBackupPortOffset(backupPortOffset); NodeList remoteConnectorNode = policyNode.getElementsByTagName("excludes"); if (remoteConnectorNode != null && remoteConnectorNode.getLength() > 0) { NodeList remoteConnectors = remoteConnectorNode.item(0).getChildNodes(); for (int i = 0; i < remoteConnectors.getLength(); i++) { Node child = remoteConnectors.item(i); if (child.getNodeName().equals("connector-ref")) { String connectorName = getTrimmedTextContent(child); configuration.getExcludedConnectors().add(connectorName); } } } NodeList masterNodeList = policyNode.getElementsByTagName("master"); if (masterNodeList.getLength() > 0) { Element masterNode = (Element) masterNodeList.item(0); configuration.setLiveConfig(replicated ? createReplicatedHaPolicy(masterNode) : createSharedStoreMasterHaPolicy(masterNode)); } NodeList slaveNodeList = policyNode.getElementsByTagName("slave"); if (slaveNodeList.getLength() > 0) { Element slaveNode = (Element) slaveNodeList.item(0); configuration.setBackupConfig(replicated ? createReplicaHaPolicy(slaveNode) : createSharedStoreSlaveHaPolicy(slaveNode)); } return configuration; } private ScaleDownConfiguration parseScaleDownConfig(Element policyNode) { NodeList scaleDownNode = policyNode.getElementsByTagName("scale-down"); if (scaleDownNode.getLength() > 0) { ScaleDownConfiguration scaleDownConfiguration = new ScaleDownConfiguration(); Element scaleDownElement = (Element) scaleDownNode.item(0); scaleDownConfiguration.setEnabled(getBoolean(scaleDownElement, "enabled", scaleDownConfiguration.isEnabled())); NodeList discoveryGroupRef = scaleDownElement.getElementsByTagName("discovery-group-ref"); if (discoveryGroupRef.item(0) != null) { scaleDownConfiguration.setDiscoveryGroup(discoveryGroupRef.item(0).getAttributes().getNamedItem("discovery-group-name").getNodeValue()); } String scaleDownGroupName = getString(scaleDownElement, "group-name", scaleDownConfiguration.getGroupName(), Validators.NO_CHECK); scaleDownConfiguration.setGroupName(scaleDownGroupName); NodeList scaleDownConnectorNode = scaleDownElement.getElementsByTagName("connectors"); if (scaleDownConnectorNode != null && scaleDownConnectorNode.getLength() > 0) { NodeList scaleDownConnectors = scaleDownConnectorNode.item(0).getChildNodes(); for (int i = 0; i < scaleDownConnectors.getLength(); i++) { Node child = scaleDownConnectors.item(i); if (child.getNodeName().equals("connector-ref")) { String connectorName = getTrimmedTextContent(child); scaleDownConfiguration.getConnectors().add(connectorName); } } } return scaleDownConfiguration; } return null; } private DatabaseStorageConfiguration createDatabaseStoreConfig(Element storeNode) { DatabaseStorageConfiguration conf = new DatabaseStorageConfiguration(); conf.setBindingsTableName(getString(storeNode, "bindings-table-name", conf.getBindingsTableName(), Validators.NO_CHECK)); conf.setMessageTableName(getString(storeNode, "message-table-name", conf.getMessageTableName(), Validators.NO_CHECK)); conf.setLargeMessageTableName(getString(storeNode, "large-message-table-name", conf.getJdbcConnectionUrl(), Validators.NO_CHECK)); conf.setJdbcConnectionUrl(getString(storeNode, "jdbc-connection-url", conf.getJdbcConnectionUrl(), Validators.NO_CHECK)); conf.setJdbcDriverClassName(getString(storeNode, "jdbc-driver-class-name", conf.getJdbcDriverClassName(), Validators.NO_CHECK)); return conf; } private FileStorageConfiguration createFileStoreConfig(Element storeNode) { return new FileStorageConfiguration(); } private void parseBroadcastGroupConfiguration(final Element e, final Configuration mainConfig) { String name = e.getAttribute("name"); List<String> connectorNames = new ArrayList<>(); NodeList children = e.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Node child = children.item(j); if (child.getNodeName().equals("connector-ref")) { String connectorName = getString(e, "connector-ref", null, Validators.NOT_NULL_OR_EMPTY); connectorNames.add(connectorName); } } long broadcastPeriod = getLong(e, "broadcast-period", ActiveMQDefaultConfiguration.getDefaultBroadcastPeriod(), Validators.GT_ZERO); String localAddress = getString(e, "local-bind-address", null, Validators.NO_CHECK); int localBindPort = getInteger(e, "local-bind-port", -1, Validators.MINUS_ONE_OR_GT_ZERO); String groupAddress = getString(e, "group-address", null, Validators.NO_CHECK); int groupPort = getInteger(e, "group-port", -1, Validators.MINUS_ONE_OR_GT_ZERO); String jgroupsFile = getString(e, "jgroups-file", null, Validators.NO_CHECK); String jgroupsChannel = getString(e, "jgroups-channel", null, Validators.NO_CHECK); // TODO: validate if either jgroups or UDP is being filled BroadcastEndpointFactory endpointFactory; if (jgroupsFile != null) { endpointFactory = new JGroupsFileBroadcastEndpointFactory().setFile(jgroupsFile).setChannelName(jgroupsChannel); } else { endpointFactory = new UDPBroadcastEndpointFactory().setGroupAddress(groupAddress).setGroupPort(groupPort).setLocalBindAddress(localAddress).setLocalBindPort(localBindPort); } BroadcastGroupConfiguration config = new BroadcastGroupConfiguration().setName(name).setBroadcastPeriod(broadcastPeriod).setConnectorInfos(connectorNames).setEndpointFactory(endpointFactory); mainConfig.getBroadcastGroupConfigurations().add(config); } private void parseDiscoveryGroupConfiguration(final Element e, final Configuration mainConfig) { String name = e.getAttribute("name"); long discoveryInitialWaitTimeout = getLong(e, "initial-wait-timeout", ActiveMQClient.DEFAULT_DISCOVERY_INITIAL_WAIT_TIMEOUT, Validators.GT_ZERO); long refreshTimeout = getLong(e, "refresh-timeout", ActiveMQDefaultConfiguration.getDefaultBroadcastRefreshTimeout(), Validators.GT_ZERO); String localBindAddress = getString(e, "local-bind-address", null, Validators.NO_CHECK); int localBindPort = getInteger(e, "local-bind-port", -1, Validators.MINUS_ONE_OR_GT_ZERO); String groupAddress = getString(e, "group-address", null, Validators.NO_CHECK); int groupPort = getInteger(e, "group-port", -1, Validators.MINUS_ONE_OR_GT_ZERO); String jgroupsFile = getString(e, "jgroups-file", null, Validators.NO_CHECK); String jgroupsChannel = getString(e, "jgroups-channel", null, Validators.NO_CHECK); // TODO: validate if either jgroups or UDP is being filled BroadcastEndpointFactory endpointFactory; if (jgroupsFile != null) { endpointFactory = new JGroupsFileBroadcastEndpointFactory().setFile(jgroupsFile).setChannelName(jgroupsChannel); } else { endpointFactory = new UDPBroadcastEndpointFactory().setGroupAddress(groupAddress).setGroupPort(groupPort).setLocalBindAddress(localBindAddress).setLocalBindPort(localBindPort); } DiscoveryGroupConfiguration config = new DiscoveryGroupConfiguration().setName(name).setRefreshTimeout(refreshTimeout).setDiscoveryInitialWaitTimeout(discoveryInitialWaitTimeout).setBroadcastEndpointFactory(endpointFactory); if (mainConfig.getDiscoveryGroupConfigurations().containsKey(name)) { ActiveMQServerLogger.LOGGER.discoveryGroupAlreadyDeployed(name); return; } else { mainConfig.getDiscoveryGroupConfigurations().put(name, config); } } private void parseClusterConnectionConfigurationURI(final Element e, final Configuration mainConfig) throws Exception { String name = e.getAttribute("name"); String uri = e.getAttribute("address"); ClusterConnectionConfiguration config = mainConfig.addClusterConfiguration(name, uri); System.out.println("Adding cluster connection :: " + config); } private void parseClusterConnectionConfiguration(final Element e, final Configuration mainConfig) throws Exception { String name = e.getAttribute("name"); String address = getString(e, "address", null, Validators.NOT_NULL_OR_EMPTY); String connectorName = getString(e, "connector-ref", null, Validators.NOT_NULL_OR_EMPTY); boolean duplicateDetection = getBoolean(e, "use-duplicate-detection", ActiveMQDefaultConfiguration.isDefaultClusterDuplicateDetection()); MessageLoadBalancingType messageLoadBalancingType; if (parameterExists(e, "forward-when-no-consumers")) { boolean forwardWhenNoConsumers = getBoolean(e, "forward-when-no-consumers", ActiveMQDefaultConfiguration.isDefaultClusterForwardWhenNoConsumers()); if (forwardWhenNoConsumers) { messageLoadBalancingType = MessageLoadBalancingType.STRICT; } else { messageLoadBalancingType = MessageLoadBalancingType.ON_DEMAND; } } else { messageLoadBalancingType = Enum.valueOf(MessageLoadBalancingType.class, getString(e, "message-load-balancing", ActiveMQDefaultConfiguration.getDefaultClusterMessageLoadBalancingType(), Validators.MESSAGE_LOAD_BALANCING_TYPE)); } int maxHops = getInteger(e, "max-hops", ActiveMQDefaultConfiguration.getDefaultClusterMaxHops(), Validators.GE_ZERO); long clientFailureCheckPeriod = getLong(e, "check-period", ActiveMQDefaultConfiguration.getDefaultClusterFailureCheckPeriod(), Validators.GT_ZERO); long connectionTTL = getLong(e, "connection-ttl", ActiveMQDefaultConfiguration.getDefaultClusterConnectionTtl(), Validators.GT_ZERO); long retryInterval = getLong(e, "retry-interval", ActiveMQDefaultConfiguration.getDefaultClusterRetryInterval(), Validators.GT_ZERO); long callTimeout = getLong(e, "call-timeout", ActiveMQClient.DEFAULT_CALL_TIMEOUT, Validators.GT_ZERO); long callFailoverTimeout = getLong(e, "call-failover-timeout", ActiveMQClient.DEFAULT_CALL_FAILOVER_TIMEOUT, Validators.MINUS_ONE_OR_GT_ZERO); double retryIntervalMultiplier = getDouble(e, "retry-interval-multiplier", ActiveMQDefaultConfiguration.getDefaultClusterRetryIntervalMultiplier(), Validators.GT_ZERO); int minLargeMessageSize = getInteger(e, "min-large-message-size", ActiveMQClient.DEFAULT_MIN_LARGE_MESSAGE_SIZE, Validators.GT_ZERO); long maxRetryInterval = getLong(e, "max-retry-interval", ActiveMQDefaultConfiguration.getDefaultClusterMaxRetryInterval(), Validators.GT_ZERO); int initialConnectAttempts = getInteger(e, "initial-connect-attempts", ActiveMQDefaultConfiguration.getDefaultClusterInitialConnectAttempts(), Validators.MINUS_ONE_OR_GE_ZERO); int reconnectAttempts = getInteger(e, "reconnect-attempts", ActiveMQDefaultConfiguration.getDefaultClusterReconnectAttempts(), Validators.MINUS_ONE_OR_GE_ZERO); int confirmationWindowSize = getInteger(e, "confirmation-window-size", ActiveMQDefaultConfiguration.getDefaultClusterConfirmationWindowSize(), Validators.GT_ZERO); int producerWindowSize = getInteger(e, "producer-window-size", ActiveMQDefaultConfiguration.getDefaultBridgeProducerWindowSize(), Validators.MINUS_ONE_OR_GT_ZERO); long clusterNotificationInterval = getLong(e, "notification-interval", ActiveMQDefaultConfiguration.getDefaultClusterNotificationInterval(), Validators.GT_ZERO); int clusterNotificationAttempts = getInteger(e, "notification-attempts", ActiveMQDefaultConfiguration.getDefaultClusterNotificationAttempts(), Validators.GT_ZERO); String scaleDownConnector = e.getAttribute("scale-down-connector"); String discoveryGroupName = null; List<String> staticConnectorNames = new ArrayList<>(); boolean allowDirectConnectionsOnly = false; NodeList children = e.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Node child = children.item(j); if (child.getNodeName().equals("discovery-group-ref")) { discoveryGroupName = child.getAttributes().getNamedItem("discovery-group-name").getNodeValue(); } else if (child.getNodeName().equals("static-connectors")) { Node attr = child.getAttributes().getNamedItem("allow-direct-connections-only"); if (attr != null) { allowDirectConnectionsOnly = "true".equalsIgnoreCase(attr.getNodeValue()) || allowDirectConnectionsOnly; } getStaticConnectors(staticConnectorNames, child); } } ClusterConnectionConfiguration config = new ClusterConnectionConfiguration() .setName(name) .setAddress(address) .setConnectorName(connectorName) .setMinLargeMessageSize(minLargeMessageSize) .setClientFailureCheckPeriod(clientFailureCheckPeriod) .setConnectionTTL(connectionTTL) .setRetryInterval(retryInterval) .setRetryIntervalMultiplier(retryIntervalMultiplier) .setMaxRetryInterval(maxRetryInterval) .setInitialConnectAttempts(initialConnectAttempts) .setReconnectAttempts(reconnectAttempts) .setCallTimeout(callTimeout) .setCallFailoverTimeout(callFailoverTimeout) .setDuplicateDetection(duplicateDetection) .setMessageLoadBalancingType(messageLoadBalancingType) .setMaxHops(maxHops) .setConfirmationWindowSize(confirmationWindowSize) .setProducerWindowSize(producerWindowSize) .setAllowDirectConnectionsOnly(allowDirectConnectionsOnly) .setClusterNotificationInterval(clusterNotificationInterval) .setClusterNotificationAttempts(clusterNotificationAttempts); if (discoveryGroupName == null) { config.setStaticConnectors(staticConnectorNames); } else { config.setDiscoveryGroupName(discoveryGroupName); } mainConfig.getClusterConfigurations().add(config); } private void parseGroupingHandlerConfiguration(final Element node, final Configuration mainConfiguration) { String name = node.getAttribute("name"); String type = getString(node, "type", null, Validators.NOT_NULL_OR_EMPTY); String address = getString(node, "address", null, Validators.NOT_NULL_OR_EMPTY); Integer timeout = getInteger(node, "timeout", ActiveMQDefaultConfiguration.getDefaultGroupingHandlerTimeout(), Validators.GT_ZERO); Long groupTimeout = getLong(node, "group-timeout", ActiveMQDefaultConfiguration.getDefaultGroupingHandlerGroupTimeout(), Validators.MINUS_ONE_OR_GT_ZERO); Long reaperPeriod = getLong(node, "reaper-period", ActiveMQDefaultConfiguration.getDefaultGroupingHandlerReaperPeriod(), Validators.GT_ZERO); mainConfiguration.setGroupingHandlerConfiguration(new GroupingHandlerConfiguration().setName(new SimpleString(name)).setType(type.equals(GroupingHandlerConfiguration.TYPE.LOCAL.getType()) ? GroupingHandlerConfiguration.TYPE.LOCAL : GroupingHandlerConfiguration.TYPE.REMOTE).setAddress(new SimpleString(address)).setTimeout(timeout).setGroupTimeout(groupTimeout).setReaperPeriod(reaperPeriod)); } private void parseBridgeConfiguration(final Element brNode, final Configuration mainConfig) throws Exception { String name = brNode.getAttribute("name"); String queueName = getString(brNode, "queue-name", null, Validators.NOT_NULL_OR_EMPTY); String forwardingAddress = getString(brNode, "forwarding-address", null, Validators.NO_CHECK); String transformerClassName = getString(brNode, "transformer-class-name", null, Validators.NO_CHECK); // Default bridge conf int confirmationWindowSize = getInteger(brNode, "confirmation-window-size", ActiveMQDefaultConfiguration.getDefaultBridgeConfirmationWindowSize(), Validators.GT_ZERO); int producerWindowSize = getInteger(brNode, "producer-window-size", ActiveMQDefaultConfiguration.getDefaultBridgeConfirmationWindowSize(), Validators.GT_ZERO); long retryInterval = getLong(brNode, "retry-interval", ActiveMQClient.DEFAULT_RETRY_INTERVAL, Validators.GT_ZERO); long clientFailureCheckPeriod = getLong(brNode, "check-period", ActiveMQClient.DEFAULT_CLIENT_FAILURE_CHECK_PERIOD, Validators.GT_ZERO); long connectionTTL = getLong(brNode, "connection-ttl", ActiveMQClient.DEFAULT_CONNECTION_TTL, Validators.GT_ZERO); int minLargeMessageSize = getInteger(brNode, "min-large-message-size", ActiveMQClient.DEFAULT_MIN_LARGE_MESSAGE_SIZE, Validators.GT_ZERO); long maxRetryInterval = getLong(brNode, "max-retry-interval", ActiveMQClient.DEFAULT_MAX_RETRY_INTERVAL, Validators.GT_ZERO); double retryIntervalMultiplier = getDouble(brNode, "retry-interval-multiplier", ActiveMQClient.DEFAULT_RETRY_INTERVAL_MULTIPLIER, Validators.GT_ZERO); int initialConnectAttempts = getInteger(brNode, "initial-connect-attempts", ActiveMQDefaultConfiguration.getDefaultBridgeInitialConnectAttempts(), Validators.MINUS_ONE_OR_GE_ZERO); int reconnectAttempts = getInteger(brNode, "reconnect-attempts", ActiveMQDefaultConfiguration.getDefaultBridgeReconnectAttempts(), Validators.MINUS_ONE_OR_GE_ZERO); int reconnectAttemptsSameNode = getInteger(brNode, "reconnect-attempts-same-node", ActiveMQDefaultConfiguration.getDefaultBridgeConnectSameNode(), Validators.MINUS_ONE_OR_GE_ZERO); boolean useDuplicateDetection = getBoolean(brNode, "use-duplicate-detection", ActiveMQDefaultConfiguration.isDefaultBridgeDuplicateDetection()); String user = getString(brNode, "user", ActiveMQDefaultConfiguration.getDefaultClusterUser(), Validators.NO_CHECK); NodeList clusterPassNodes = brNode.getElementsByTagName("password"); String password = null; boolean maskPassword = mainConfig.isMaskPassword(); SensitiveDataCodec<String> codec = null; if (clusterPassNodes.getLength() > 0) { Node passNode = clusterPassNodes.item(0); password = passNode.getTextContent(); } if (password != null) { if (maskPassword) { codec = PasswordMaskingUtil.getCodec(mainConfig.getPasswordCodec()); password = codec.decode(password); } } else { password = ActiveMQDefaultConfiguration.getDefaultClusterPassword(); } boolean ha = getBoolean(brNode, "ha", false); String filterString = null; List<String> staticConnectorNames = new ArrayList<>(); String discoveryGroupName = null; NodeList children = brNode.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Node child = children.item(j); if (child.getNodeName().equals("filter")) { filterString = child.getAttributes().getNamedItem("string").getNodeValue(); } else if (child.getNodeName().equals("discovery-group-ref")) { discoveryGroupName = child.getAttributes().getNamedItem("discovery-group-name").getNodeValue(); } else if (child.getNodeName().equals("static-connectors")) { getStaticConnectors(staticConnectorNames, child); } } BridgeConfiguration config = new BridgeConfiguration() .setName(name) .setQueueName(queueName) .setForwardingAddress(forwardingAddress) .setFilterString(filterString) .setTransformerClassName(transformerClassName) .setMinLargeMessageSize(minLargeMessageSize) .setClientFailureCheckPeriod(clientFailureCheckPeriod) .setConnectionTTL(connectionTTL) .setRetryInterval(retryInterval) .setMaxRetryInterval(maxRetryInterval) .setRetryIntervalMultiplier(retryIntervalMultiplier) .setInitialConnectAttempts(initialConnectAttempts) .setReconnectAttempts(reconnectAttempts) .setReconnectAttemptsOnSameNode(reconnectAttemptsSameNode) .setUseDuplicateDetection(useDuplicateDetection) .setConfirmationWindowSize(confirmationWindowSize) .setProducerWindowSize(producerWindowSize) .setHA(ha) .setUser(user) .setPassword(password); if (!staticConnectorNames.isEmpty()) { config.setStaticConnectors(staticConnectorNames); } else { config.setDiscoveryGroupName(discoveryGroupName); } mainConfig.getBridgeConfigurations().add(config); } private void getStaticConnectors(List<String> staticConnectorNames, Node child) { NodeList children2 = ((Element) child).getElementsByTagName("connector-ref"); for (int k = 0; k < children2.getLength(); k++) { Element child2 = (Element) children2.item(k); String connectorName = child2.getChildNodes().item(0).getNodeValue(); staticConnectorNames.add(connectorName); } } private void parseDivertConfiguration(final Element e, final Configuration mainConfig) { String name = e.getAttribute("name"); String routingName = getString(e, "routing-name", null, Validators.NO_CHECK); String address = getString(e, "address", null, Validators.NOT_NULL_OR_EMPTY); String forwardingAddress = getString(e, "forwarding-address", null, Validators.NOT_NULL_OR_EMPTY); boolean exclusive = getBoolean(e, "exclusive", ActiveMQDefaultConfiguration.isDefaultDivertExclusive()); String transformerClassName = getString(e, "transformer-class-name", null, Validators.NO_CHECK); String filterString = null; NodeList children = e.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Node child = children.item(j); if (child.getNodeName().equals("filter")) { filterString = getAttributeValue(child, "string"); } } DivertConfiguration config = new DivertConfiguration().setName(name).setRoutingName(routingName).setAddress(address).setForwardingAddress(forwardingAddress).setExclusive(exclusive).setFilterString(filterString).setTransformerClassName(transformerClassName); mainConfig.getDivertConfigurations().add(config); } private ConnectorServiceConfiguration parseConnectorService(final Element e) { Node nameNode = e.getAttributes().getNamedItem("name"); String name = nameNode != null ? nameNode.getNodeValue() : null; String clazz = getString(e, "factory-class", null, Validators.NOT_NULL_OR_EMPTY); Map<String, Object> params = new HashMap<>(); NodeList paramsNodes = e.getElementsByTagName("param"); for (int i = 0; i < paramsNodes.getLength(); i++) { Node paramNode = paramsNodes.item(i); NamedNodeMap attributes = paramNode.getAttributes(); Node nkey = attributes.getNamedItem("key"); String key = nkey.getTextContent(); Node nValue = attributes.getNamedItem("value"); params.put(key, nValue.getTextContent()); } return new ConnectorServiceConfiguration().setFactoryClassName(clazz).setParams(params).setName(name); } }
/* * Copyright 2014 - 2021 Blazebit. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.blazebit.persistence.impl.function.groupconcat; import com.blazebit.persistence.impl.function.Order; import com.blazebit.persistence.spi.FunctionRenderContext; import com.blazebit.persistence.spi.JpqlFunction; import com.blazebit.persistence.spi.TemplateRenderer; import java.util.ArrayList; import java.util.List; /** * * @author Christian Beikov * @since 1.0.0 */ public abstract class AbstractGroupConcatFunction implements JpqlFunction { public static final String FUNCTION_NAME = "group_concat"; protected final TemplateRenderer renderer; public AbstractGroupConcatFunction(String template) { this.renderer = new TemplateRenderer(template); } @Override public boolean hasArguments() { return true; } @Override public boolean hasParenthesesIfNoArguments() { return true; } @Override public Class<?> getReturnType(Class<?> firstArgumentType) { return String.class; } @Override public void render(FunctionRenderContext context) { render(context, getGroupConcat(context)); } public abstract void render(FunctionRenderContext context, GroupConcat groupConcat); protected GroupConcat getGroupConcat(FunctionRenderContext context) { if (context.getArgumentsSize() == 0) { throw new RuntimeException("The group concat function needs at least one argument! args=" + context); } boolean distinct = false; String expression; int startIndex = 0; int argsSize = context.getArgumentsSize(); String maybeDistinct = context.getArgument(0); if ("'DISTINCT'".equalsIgnoreCase(maybeDistinct)) { distinct = true; startIndex++; } if (startIndex >= argsSize) { throw new RuntimeException("The group concat function needs at least one expression to concatenate! args=" + context); } expression = context.getArgument(startIndex); String separator = null; String orderExpression = null; List<Order> orders = new ArrayList<Order>(); Mode mode = null; for (int i = startIndex + 1; i < argsSize; i++) { String argument = context.getArgument(i); if ("'SEPARATOR'".equalsIgnoreCase(argument)) { mode = Mode.SEPARATOR; } else if ("'ORDER BY'".equalsIgnoreCase(argument)) { mode = Mode.ORDER_BY; } else { if (mode == Mode.ORDER_BY) { Order order = getOrder(argument, orderExpression); if (order != null) { orders.add(order); orderExpression = null; } else { if (orderExpression != null) { orders.add(new Order(orderExpression, null, null)); } orderExpression = argument; } } else if (mode == Mode.SEPARATOR) { if (separator != null) { throw new IllegalArgumentException("Illegal multiple separators for group concat '" + argument + "'. Expected 'ORDER BY'!"); } separator = argument.substring(argument.indexOf('\'') + 1, argument.lastIndexOf('\'')); } else { throw new IllegalArgumentException("Illegal input for group concat '" + argument + "'. Expected 'SEPARATOR' or 'ORDER BY'!"); } } } if (orderExpression != null) { orders.add(new Order(orderExpression, null, null)); } if (separator == null) { separator = ","; } return new GroupConcat(distinct, expression, orders, separator); } /** * @author Christian Beikov * @since 1.2.0 */ private enum Mode { SEPARATOR, ORDER_BY } protected void render(StringBuilder sb, Order order) { sb.append(order.getExpression()); if (order.isAscending()) { sb.append(" ASC"); } else { sb.append(" DESC"); } if (order.isNullsFirst()) { sb.append(" NULLS FIRST"); } else { sb.append(" NULLS LAST"); } } protected void appendEmulatedOrderByElementWithNulls(StringBuilder sb, Order element) { sb.append("case when "); sb.append(element.getExpression()); sb.append(" is null then "); sb.append(element.isNullsFirst() ? 0 : 1); sb.append(" else "); sb.append(element.isNullsFirst() ? 1 : 0); sb.append(" end, "); sb.append(element.getExpression()); sb.append(element.isAscending() ? " asc" : " desc"); } private static Order getOrder(String s, String expression) { if (expression == null) { return null; } String type = s.trim().toUpperCase(); if ("'ASC'".equals(type)) { return new Order(expression, true, null); } else if ("'DESC'".equals(type)) { return new Order(expression, false, null); } else if ("'ASC NULLS FIRST'".equals(type)) { return new Order(expression, true, true); } else if ("'ASC NULLS LAST'".equals(type)) { return new Order(expression, true, false); } else if ("'DESC NULLS FIRST'".equals(type)) { return new Order(expression, false, true); } else if ("'DESC NULLS LAST'".equals(type)) { return new Order(expression, false, false); } return null; } /** * @author Christian Beikov * @since 1.2.0 */ public static final class GroupConcat { private final boolean distinct; private final String expression; private final List<Order> orderBys; private final String separator; public GroupConcat(boolean distinct, String expression, List<Order> orderBys, String separator) { this.distinct = distinct; this.expression = expression; this.orderBys = orderBys; this.separator = separator; } public boolean isDistinct() { return distinct; } public String getExpression() { return expression; } public List<Order> getOrderBys() { return orderBys; } public String getSeparator() { return separator; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dubbo.rpc.filter; import org.apache.dubbo.common.extension.Activate; import org.apache.dubbo.common.utils.NetUtils; import org.apache.dubbo.rpc.AsyncRpcResult; import org.apache.dubbo.rpc.Filter; import org.apache.dubbo.rpc.Invocation; import org.apache.dubbo.rpc.Invoker; import org.apache.dubbo.rpc.Result; import org.apache.dubbo.rpc.RpcContext; import org.apache.dubbo.rpc.RpcException; import org.apache.dubbo.rpc.RpcInvocation; import org.apache.dubbo.rpc.TimeoutCountDown; import static org.apache.dubbo.common.constants.CommonConstants.APPLICATION_KEY; import static org.apache.dubbo.common.constants.CommonConstants.CONSUMER; import static org.apache.dubbo.common.constants.CommonConstants.REMOTE_APPLICATION_KEY; import static org.apache.dubbo.common.constants.CommonConstants.TIME_COUNTDOWN_KEY; /** * ConsumerContextFilter set current RpcContext with invoker,invocation, local host, remote host and port * for consumer invoker.It does it to make the requires info available to execution thread's RpcContext. * * @see org.apache.dubbo.rpc.Filter * @see RpcContext * * 该过滤器做的是在当前的RpcContext中记录本地调用的一次状态信息。 */ @Activate(group = CONSUMER, order = -10000) public class ConsumerContextFilter implements Filter { @Override public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException { RpcContext context = RpcContext.getContext(); context.setInvoker(invoker) .setInvocation(invocation) .setLocalAddress(NetUtils.getLocalHost(), 0) .setRemoteAddress(invoker.getUrl().getHost(), invoker.getUrl().getPort()) .setRemoteApplicationName(invoker.getUrl().getParameter(REMOTE_APPLICATION_KEY)) .setAttachment(REMOTE_APPLICATION_KEY, invoker.getUrl().getParameter(APPLICATION_KEY)); if (invocation instanceof RpcInvocation) { ((RpcInvocation) invocation).setInvoker(invoker); } // pass default timeout set by end user (ReferenceConfig) Object countDown = context.get(TIME_COUNTDOWN_KEY); if (countDown != null) { TimeoutCountDown timeoutCountDown = (TimeoutCountDown) countDown; if (timeoutCountDown.isExpired()) { return AsyncRpcResult.newDefaultAsyncResult(new RpcException(RpcException.TIMEOUT_TERMINATE, "No time left for making the following call: " + invocation.getServiceName() + "." + invocation.getMethodName() + ", terminate directly."), invocation); } } return invoker.invoke(invocation); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.bookkeeper.stats.codahale; import com.codahale.metrics.CsvReporter; import com.codahale.metrics.JmxReporter; import com.codahale.metrics.MetricFilter; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.ScheduledReporter; import com.codahale.metrics.Slf4jReporter; import com.codahale.metrics.graphite.Graphite; import com.codahale.metrics.graphite.GraphiteReporter; import com.codahale.metrics.jvm.GarbageCollectorMetricSet; import com.codahale.metrics.jvm.MemoryUsageGaugeSet; import com.google.common.base.Strings; import com.google.common.net.HostAndPort; import java.io.File; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import org.apache.bookkeeper.stats.StatsLogger; import org.apache.bookkeeper.stats.StatsProvider; import org.apache.bookkeeper.stats.ThreadRegistry; import org.apache.commons.configuration.Configuration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A {@link StatsProvider} implemented based on <i>Codahale</i> metrics library. */ @SuppressWarnings("deprecation") public class CodahaleMetricsProvider implements StatsProvider { static final Logger LOG = LoggerFactory.getLogger(CodahaleMetricsProvider.class); MetricRegistry metrics = null; List<ScheduledReporter> reporters = new ArrayList<ScheduledReporter>(); JmxReporter jmx = null; synchronized void initIfNecessary() { if (metrics == null) { metrics = new MetricRegistry(); metrics.registerAll(new MemoryUsageGaugeSet()); metrics.registerAll(new GarbageCollectorMetricSet()); } } public synchronized MetricRegistry getMetrics() { return metrics; } @Override public void start(Configuration conf) { initIfNecessary(); int metricsOutputFrequency = conf.getInt("codahaleStatsOutputFrequencySeconds", 60); String prefix = conf.getString("codahaleStatsPrefix", ""); String graphiteHost = conf.getString("codahaleStatsGraphiteEndpoint"); String csvDir = conf.getString("codahaleStatsCSVEndpoint"); String slf4jCat = conf.getString("codahaleStatsSlf4jEndpoint"); String jmxDomain = conf.getString("codahaleStatsJmxEndpoint"); if (!Strings.isNullOrEmpty(graphiteHost)) { LOG.info("Configuring stats with graphite"); HostAndPort addr = HostAndPort.fromString(graphiteHost); final Graphite graphite = new Graphite( new InetSocketAddress(addr.getHost(), addr.getPort())); reporters.add(GraphiteReporter.forRegistry(getMetrics()) .prefixedWith(prefix) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .filter(MetricFilter.ALL) .build(graphite)); } if (!Strings.isNullOrEmpty(csvDir)) { // NOTE: 1/ metrics output files are exclusive to a given process // 2/ the output directory must exist // 3/ if output files already exist they are not overwritten and there is no metrics output File outdir; if (!Strings.isNullOrEmpty(prefix)) { outdir = new File(csvDir, prefix); } else { outdir = new File(csvDir); } LOG.info("Configuring stats with csv output to directory [{}]", outdir.getAbsolutePath()); reporters.add(CsvReporter.forRegistry(getMetrics()) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .build(outdir)); } if (!Strings.isNullOrEmpty(slf4jCat)) { LOG.info("Configuring stats with slf4j"); reporters.add(Slf4jReporter.forRegistry(getMetrics()) .outputTo(LoggerFactory.getLogger(slf4jCat)) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .build()); } if (!Strings.isNullOrEmpty(jmxDomain)) { LOG.info("Configuring stats with jmx"); jmx = JmxReporter.forRegistry(getMetrics()) .inDomain(jmxDomain) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .build(); jmx.start(); } for (ScheduledReporter r : reporters) { r.start(metricsOutputFrequency, TimeUnit.SECONDS); } } @Override public void stop() { for (ScheduledReporter r : reporters) { r.report(); r.stop(); } if (jmx != null) { jmx.stop(); } ThreadRegistry.clear(); } @Override public StatsLogger getStatsLogger(String name) { initIfNecessary(); return new CodahaleStatsLogger(getMetrics(), name); } @Override public String getStatsName(String... statsComponents) { if (statsComponents.length == 0) { return ""; } String baseName = statsComponents[0]; String[] names = new String[statsComponents.length - 1]; System.arraycopy(statsComponents, 1, names, 0, names.length); return MetricRegistry.name(baseName, names); } }
package com.linkedin.datahub.graphql.types.mlmodel.mappers; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.Status; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.MLFeatureTable; import com.linkedin.datahub.graphql.generated.MLFeatureTableEditableProperties; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.key.MLFeatureTableKey; import com.linkedin.ml.metadata.EditableMLFeatureTableProperties; import com.linkedin.ml.metadata.MLFeatureTableProperties; import javax.annotation.Nonnull; import static com.linkedin.metadata.Constants.*; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * */ public class MLFeatureTableMapper implements ModelMapper<EntityResponse, MLFeatureTable> { public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { return INSTANCE.apply(entityResponse); } @Override public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { final MLFeatureTable result = new MLFeatureTable(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.MLFEATURE_TABLE); EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<MLFeatureTable> mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlFeatureTable, dataMap) -> mlFeatureTable.setOwnership(OwnershipMapper.map(new Ownership(dataMap)))); mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); mappingHelper.mapToResult(ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureTableProperties); mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeatureTable, dataMap) -> mlFeatureTable.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap)))); mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlFeatureTable, dataMap) -> mlFeatureTable.setStatus(StatusMapper.map(new Status(dataMap)))); mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlFeatureTable, dataMap) -> mlFeatureTable.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, this::mapGlobalTags); mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap)))); mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); mappingHelper.mapToResult(ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); return mappingHelper.getResult(); } private void mapMLFeatureTableKey(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { MLFeatureTableKey mlFeatureTableKey = new MLFeatureTableKey(dataMap); mlFeatureTable.setName(mlFeatureTableKey.getName()); DataPlatform partialPlatform = new DataPlatform(); partialPlatform.setUrn(mlFeatureTableKey.getPlatform().toString()); mlFeatureTable.setPlatform(partialPlatform); } private void mapMLFeatureTableProperties(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); mlFeatureTable.setFeatureTableProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties)); mlFeatureTable.setProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties)); mlFeatureTable.setDescription(featureTableProperties.getDescription()); } private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap) { GlobalTags globalTags = new GlobalTags(dataMap); com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags); entity.setTags(graphQlGlobalTags); } private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. if (domains.getDomains().size() > 0) { entity.setDomain(Domain.builder() .setType(EntityType.DOMAIN) .setUrn(domains.getDomains().get(0).toString()).build()); } } private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { EditableMLFeatureTableProperties input = new EditableMLFeatureTableProperties(dataMap); MLFeatureTableEditableProperties editableProperties = new MLFeatureTableEditableProperties(); if (input.hasDescription()) { editableProperties.setDescription(input.getDescription()); } entity.setEditableProperties(editableProperties); } }
package com.jnape.palatable.lambda.semigroup.builtin; import com.jnape.palatable.lambda.functions.Fn1; import com.jnape.palatable.lambda.semigroup.Semigroup; import static com.jnape.palatable.lambda.functions.builtin.fn1.Id.id; import static com.jnape.palatable.lambda.semigroup.builtin.MinBy.minBy; /** * A {@link Semigroup} over <code>A</code> that chooses between two values <code>x</code> and <code>y</code> via the * following rules: * <ul> * <li>If <code>x</code> is strictly greater than <code>y</code>, return <code>y</code></li> * <li>Otherwise, return <code>x</code></li> * </ul> * * @param <A> the value type * @see MinBy * @see Max */ public final class Min<A extends Comparable<A>> implements Semigroup<A> { private static final Min<?> INSTANCE = new Min<>(); private Min() { } @Override public A apply(A x, A y) { return minBy(id(), x, y); } @SuppressWarnings("unchecked") public static <A extends Comparable<A>> Min<A> min() { return (Min<A>) INSTANCE; } public static <A extends Comparable<A>> Fn1<A, A> min(A x) { return Min.<A>min().apply(x); } public static <A extends Comparable<A>> A min(A x, A y) { return min(x).apply(y); } }
/////////////////////////////////////////////////////////////////////////////// // Copyright (c) 2002, Eric D. Friedman All Rights Reserved. // Copyright (c) 2009, Robert D. Eden All Rights Reserved. // Copyright (c) 2009, Jeff Randall All Rights Reserved. // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. /////////////////////////////////////////////////////////////////////////////// package gnu.trove.decorator; import gnu.trove.map.TShortObjectMap; import gnu.trove.iterator.TShortObjectIterator; import java.io.*; import java.util.*; ////////////////////////////////////////////////// // THIS IS A GENERATED CLASS. DO NOT HAND EDIT! // ////////////////////////////////////////////////// /** * Wrapper class to make a TShortObjectMap conform to the <tt>java.util.Map</tt> API. * This class simply decorates an underlying TShortObjectMap and translates the Object-based * APIs into their Trove primitive analogs. * <p/> * Note that wrapping and unwrapping primitive values is extremely inefficient. If * possible, users of this class should override the appropriate methods in this class * and use a table of canonical values. * <p/> * Created: Mon Sep 23 22:07:40 PDT 2002 * * @author Eric D. Friedman * @author Robert D. Eden * @author Jeff Randall */ public class TShortObjectMapDecorator<V> extends AbstractMap<Short, V> implements Map<Short, V>, Externalizable, Cloneable { static final long serialVersionUID = 1L; /** the wrapped primitive map */ protected TShortObjectMap<V> _map; /** * FOR EXTERNALIZATION ONLY!! */ public TShortObjectMapDecorator() {} /** * Creates a wrapper that decorates the specified primitive map. * * @param map the <tt>TShortObjectMap</tt> to wrap. */ public TShortObjectMapDecorator( TShortObjectMap<V> map ) { super(); this._map = map; } /** * Returns a reference to the map wrapped by this decorator. * * @return the wrapped <tt>TShortObjectMap</tt> instance. */ public TShortObjectMap<V> getMap() { return _map; } /** * Inserts a key/value pair into the map. * * @param key an <code>Short</code> value * @param value an <code>Object</code> value * @return the previous value associated with <tt>key</tt>, * or <tt>null</tt> if none was found. */ public V put( Short key, V value ) { short k; if ( key == null ) { k = _map.getNoEntryKey(); } else { k = unwrapKey( key ); } return _map.put( k, value ); } /** * Retrieves the value for <tt>key</tt> * * @param key an <code>Object</code> value * @return the value of <tt>key</tt> or null if no such mapping exists. */ public V get( Object key ) { short k; if ( key != null ) { if ( key instanceof Short ) { k = unwrapKey( ( Short ) key ); } else { return null; } } else { k = _map.getNoEntryKey(); } return _map.get( k ); } /** * Empties the map. */ public void clear() { this._map.clear(); } /** * Deletes a key/value pair from the map. * * @param key an <code>Object</code> value * @return the removed value, or Integer(0) if it was not found in the map */ public V remove( Object key ) { short k; if ( key != null ) { if ( key instanceof Short ) { k = unwrapKey( ( Short ) key ); } else { return null; } } else { k = _map.getNoEntryKey(); } return _map.remove( k ); } /** * Returns a Set view on the entries of the map. * * @return a <code>Set</code> value */ public Set<Map.Entry<Short,V>> entrySet() { return new AbstractSet<Map.Entry<Short,V>>() { public int size() { return _map.size(); } public boolean isEmpty() { return TShortObjectMapDecorator.this.isEmpty(); } public boolean contains( Object o ) { if ( o instanceof Map.Entry ) { Object k = ( ( Map.Entry ) o ).getKey(); Object v = ( ( Map.Entry ) o ).getValue(); return TShortObjectMapDecorator.this.containsKey( k ) && TShortObjectMapDecorator.this.get( k ).equals( v ); } else { return false; } } public Iterator<Map.Entry<Short,V>> iterator() { return new Iterator<Map.Entry<Short,V>>() { private final TShortObjectIterator<V> it = _map.iterator(); public Map.Entry<Short,V> next() { it.advance(); short k = it.key(); final Short key = (k == _map.getNoEntryKey()) ? null : wrapKey( k ); final V v = it.value(); return new Map.Entry<Short,V>() { private V val = v; public boolean equals( Object o ) { return o instanceof Map.Entry && ( ( Map.Entry ) o ).getKey().equals( key ) && ( ( Map.Entry ) o ).getValue().equals( val ); } public Short getKey() { return key; } public V getValue() { return val; } public int hashCode() { return key.hashCode() + val.hashCode(); } public V setValue( V value ) { val = value; return put( key, value ); } }; } public boolean hasNext() { return it.hasNext(); } public void remove() { it.remove(); } }; } public boolean add( Map.Entry<Short,V> o ) { throw new UnsupportedOperationException(); } public boolean remove( Object o ) { boolean modified = false; if ( contains( o ) ) { //noinspection unchecked Short key = ( ( Map.Entry<Short,V> ) o ).getKey(); _map.remove( unwrapKey( key ) ); modified = true; } return modified; } public boolean addAll( Collection<? extends Map.Entry<Short,V>> c ) { throw new UnsupportedOperationException(); } public void clear() { TShortObjectMapDecorator.this.clear(); } }; } /** * Checks for the presence of <tt>val</tt> in the values of the map. * * @param val an <code>Object</code> value * @return a <code>boolean</code> value */ public boolean containsValue( Object val ) { return _map.containsValue( val ); } /** * Checks for the present of <tt>key</tt> in the keys of the map. * * @param key an <code>Object</code> value * @return a <code>boolean</code> value */ public boolean containsKey( Object key ) { if ( key == null ) return _map.containsKey( _map.getNoEntryKey() ); return key instanceof Short && _map.containsKey( ( ( Short ) key ).shortValue() ); } /** * Returns the number of entries in the map. * * @return the map's size. */ public int size() { return this._map.size(); } /** * Indicates whether map has any entries. * * @return true if the map is empty */ public boolean isEmpty() { return size() == 0; } /** * Copies the key/value mappings in <tt>map</tt> into this map. * Note that this will be a <b>deep</b> copy, as storage is by * primitive value. * * @param map a <code>Map</code> value */ public void putAll( Map<? extends Short, ? extends V> map ) { Iterator<? extends Entry<? extends Short,? extends V>> it = map.entrySet().iterator(); for ( int i = map.size(); i-- > 0; ) { Entry<? extends Short,? extends V> e = it.next(); this.put( e.getKey(), e.getValue() ); } } /** * Wraps a key * * @param k key in the underlying map * @return an Object representation of the key */ protected Short wrapKey( short k ) { return Short.valueOf( k ); } /** * Unwraps a key * * @param key wrapped key * @return an unwrapped representation of the key */ protected short unwrapKey( Short key ) { return key.shortValue(); } // Implements Externalizable public void readExternal( ObjectInput in ) throws IOException, ClassNotFoundException { // VERSION in.readByte(); // MAP //noinspection unchecked _map = ( TShortObjectMap<V> ) in.readObject(); } // Implements Externalizable public void writeExternal( ObjectOutput out ) throws IOException { // VERSION out.writeByte( 0 ); // MAP out.writeObject( _map ); } } // TShortObjectHashMapDecorator
/* * Copyright © 2017 Packt Publishing - All Rights Reserved. * Unauthorized copying of this file, via any medium is strictly prohibited. */ package com.packtpub.rest.ch6.ext; import java.io.IOException; import java.util.logging.Level; import java.util.logging.Logger; import javax.ws.rs.container.ContainerRequestContext; import javax.ws.rs.container.ContainerRequestFilter; /** * This filter will be registered dynamically from DynamicFeatureRegister * * @author Jobinesh */ public class RequestLoggerFilter implements ContainerRequestFilter { private static final Logger logger = Logger.getLogger(RequestLoggerFilter.class.getName()); @Override public void filter(ContainerRequestContext requestContext) throws IOException { logger.log(Level.INFO, "----Requested---{0}", requestContext.getUriInfo().toString()); } }
/** * Java programming exercise 3-1 * @reproduced by Jason Oh */ public class Ex03_1 { public static void main(String[] args) { int x = 1; System.out.println("x: " + x); for (int i = 1; i <= 3; i++) { System.out.println("x: " + x++ + " "); } System.out.println("\nx: " + x); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.Shell; import java.io.BufferedReader; import java.io.File; import java.io.IOException; /** Filesystem disk space usage statistics. Uses the unix 'du' program */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public class DU extends CachingGetSpaceUsed { private DUShell duShell; @VisibleForTesting public DU(File path, long interval, long initialUsed) throws IOException { super(path, interval, initialUsed); } public DU(CachingGetSpaceUsed.Builder builder) throws IOException { this(builder.getPath(), builder.getInterval(), builder.getInitialUsed()); } @Override protected synchronized void refresh() { if (duShell == null) { duShell = new DUShell(); } try { duShell.startRefresh(); } catch (IOException ioe) { LOG.warn("Could not get disk usage information", ioe); } } private final class DUShell extends Shell { void startRefresh() throws IOException { run(); } @Override public String toString() { return "du -sk " + getDirPath() + "\n" + used.get() + "\t" + getDirPath(); } @Override protected String[] getExecString() { return new String[]{"du", "-sk", getDirPath()}; } @Override protected void parseExecResult(BufferedReader lines) throws IOException { String line = lines.readLine(); if (line == null) { throw new IOException("Expecting a line not the end of stream"); } String[] tokens = line.split("\t"); if (tokens.length == 0) { throw new IOException("Illegal du output"); } setUsed(Long.parseLong(tokens[0]) * 1024); } } public static void main(String[] args) throws Exception { String path = "."; if (args.length > 0) { path = args[0]; } GetSpaceUsed du = new GetSpaceUsed.Builder().setPath(new File(path)) .setConf(new Configuration()) .build(); String duResult = du.toString(); System.out.println(duResult); } }
package com.java.study; import static org.junit.Assert.assertTrue; import org.junit.Test; /** * Unit test for simple App. */ public class AppTest { /** * Rigorous Test :-) */ @Test public void shouldAnswerWithTrue() { assertTrue( true ); } }
/* * (C) Copyright 2006-2016 Nuxeo SA (http://nuxeo.com/) and others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Contributors: * Tiago Cardoso <tcardoso@nuxeo.com> * Miguel Nixo */ package org.nuxeo.ecm.platform.threed; import java.util.ArrayList; import java.util.List; public class ThreeDConstants { public static final String THREED_TYPE = "ThreeD"; public static final String THREED_FACET = "ThreeD"; public static final String THREED_SCHEMA = "threed"; public static final String THREED_CHANGED_EVENT = "threeDChanged"; public static final String EXTENSION_COLLADA = "dae"; public static final String EXTENSION_3DSTUDIO = "3ds"; public static final String EXTENSION_FILMBOX = "fbx"; public static final String EXTENSION_STANFORD = "ply"; public static final String EXTENSION_WAVEFRONT = "obj"; public static final String EXTENSION_EXTENSIBLE_3D_GRAPHICS = "x3d"; public static final String EXTENSION_STEREOLITHOGRAPHY = "stl"; public static final String EXTENSION_GLTF = "gltf"; public static final String EXTENSION_RENDER = "png"; public static final String THUMBNAIL_PICTURE_TITLE = "Thumbnail"; public static final String STATIC_3D_PCTURE_TITLE = "Static3D"; public static final List SUPPORTED_EXTENSIONS = new ArrayList<String>() { { add(EXTENSION_COLLADA); add(EXTENSION_3DSTUDIO); add(EXTENSION_FILMBOX); add(EXTENSION_STANFORD); add(EXTENSION_WAVEFRONT); add(EXTENSION_EXTENSIBLE_3D_GRAPHICS); add(EXTENSION_STEREOLITHOGRAPHY); add(EXTENSION_GLTF); } }; // Constant utility class private ThreeDConstants() { } }
/** Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.heliosapm.streams.metrics.router.nodes; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.kstream.Initializer; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.KStreamBuilder; import org.apache.kafka.streams.kstream.KTable; import org.apache.kafka.streams.kstream.KeyValueMapper; import org.apache.kafka.streams.kstream.TimeWindows; import org.apache.kafka.streams.kstream.Window; import org.apache.kafka.streams.kstream.Windowed; import org.cliffc.high_scale_lib.NonBlockingHashMap; import org.cliffc.high_scale_lib.NonBlockingHashSet; import org.springframework.jmx.export.annotation.ManagedAttribute; import com.heliosapm.streams.common.kafka.interceptor.SwitchableMonitoringInterceptor; import com.heliosapm.streams.metrics.StreamedMetric; import com.heliosapm.streams.metrics.StreamedMetricValue; import com.heliosapm.streams.metrics.router.StreamHubKafkaClientSupplier; import com.heliosapm.streams.serialization.HeliosSerdes; import com.heliosapm.utils.io.StdInCommandHandler; import com.heliosapm.utils.jmx.JMXHelper; import com.heliosapm.utils.tuples.NVP; /** * <p>Title: KMetricAggreagator</p> * <p>Description: Aggregates metered metrics into windows of a defined period</p> * @author Whitehead (nwhitehead AT heliosdev DOT org) * <p><code>com.heliosapm.streams.metrics.router.nodes.KMetricAggreagator</code></p> */ public class KMetricAggreagator extends AbstractMetricStreamNode implements Runnable { /** The aggregation window duration */ protected long windowDuration = 1000 * 5; /** The rate divisor to get TPS */ protected double rateDivisor = TimeUnit.MILLISECONDS.toSeconds(windowDuration); /** The idle time during which we send zero values for formerly active metrics */ protected long idleDuration = 1000 * 60 * 5; /** The aggregation table */ protected KTable<Windowed<String>, StreamedMetricValue> window = null; /** The last delivered metric by key */ protected final Set<NonBlockingHashMap<String, NVP<Window, StreamedMetricValue>>> lastEntries = new NonBlockingHashSet<NonBlockingHashMap<String, NVP<Window, StreamedMetricValue>>>(); /** The producer used to send messages from the background tasks */ protected Producer<String, StreamedMetricValue> producer = null; /** The background task scheduler */ protected ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(2, new ThreadFactory(){ final AtomicInteger serial = new AtomicInteger(); @Override public Thread newThread(final Runnable r) { final Thread t = new Thread(r, "KMetricAggregatorScheduler#" + serial.incrementAndGet()); t.setDaemon(true); return t; } }); /** The store name */ public static final String STORE_NAME = "StreamedMetricAggWindow"; /** The internal topic name where idle metrics are published */ public static final String IDLE_TOPIC_NAME = "__idle_metrics"; /** An empty key/value list const */ public static final List<KeyValue<String, StreamedMetricValue>> EMPTY_KVS = Collections.unmodifiableList(Collections.emptyList()); /** * @param args */ public static void main(String[] args) { JMXHelper.fireUpJMXMPServer(1423); System.setProperty("streams.debug", "true"); Properties streamsConfiguration = new Properties(); streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "KMetricAggreagatorX"); streamsConfiguration.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, SwitchableMonitoringInterceptor.class.getName()); //streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "pdk-pt-cltsdb-02.intcx.net:9092,pdk-pt-cltsdb-04.intcx.net:9092,pdk-pt-cltsdb-03.intcx.net:9092"); //streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092,localhost:9093,localhost:9094"); //streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "10.5.202.251:9092,10.5.202.251:9093,10.5.202.251:9094"); //streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "10.22.114.37:9092,10.22.114.37:9093,10.22.114.37:9094"); streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); //streamsConfiguration.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "pdk-pt-cltsdb-02.intcx.net:2181,pdk-pt-cltsdb-04.intcx.net:2181,pdk-pt-cltsdb-03.intcx.net:2181"); streamsConfiguration.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181"); streamsConfiguration.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 3); // Specify default (de)serializers for record keys and for record values. streamsConfiguration.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); streamsConfiguration.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); streamsConfiguration.put(StreamsConfig.TIMESTAMP_EXTRACTOR_CLASS_CONFIG, "com.heliosapm.streams.metrics.StreamedMetricTimestampExtractor"); //streamsConfiguration.put("auto.offset.reset", "earliest"); KStreamBuilder builder = new KStreamBuilder(); KMetricAggreagator aggregator = new KMetricAggreagator(); aggregator.sinkTopic = "tsdb.metrics.binary"; aggregator.sourceTopics = new String[]{"tsdb.metrics.meter"}; aggregator.configure(builder); builder.stream(HeliosSerdes.STRING_SERDE, HeliosSerdes.STRING_SERDE, "tsdb.metrics.text.meter") .map((k,v) -> { final StreamedMetric sm = StreamedMetric.fromString(v); return new KeyValue<String, StreamedMetric>(sm.metricKey(), sm); }) .to(HeliosSerdes.STRING_SERDE, HeliosSerdes.STREAMED_METRIC_SERDE, "tsdb.metrics.meter"); KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration); streams.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(final Thread t, final Throwable e) { System.err.println("Uncaught exception on [" + t + "]"); e.printStackTrace(System.err); } }); streams.start(); aggregator.onStart(new StreamHubKafkaClientSupplier(new StreamsConfig(streamsConfiguration), "KMetricAggreagatorX"), streams); StdInCommandHandler.getInstance().registerCommand("stop", new Runnable(){ @Override public void run() { System.err.println("\n\tSTOPPING...."); try { streams.close(); } catch (Exception x) {/* No Op */} System.exit(0); } }).run(); } static class SMAggInit implements Initializer<StreamedMetricValue> { @Override public StreamedMetricValue apply() { return null; } } class SMAgg implements org.apache.kafka.streams.kstream.Aggregator<String, StreamedMetric, StreamedMetricValue> { @Override public StreamedMetricValue apply(final String aggKey, final StreamedMetric value, final StreamedMetricValue aggregate) { if(aggregate==null) { //log.info("Initializing [{}]", aggKey); return value.forValue(1L); } return aggregate.forceToLong().increment( value.forValue(1L).getValueNumber().longValue() ); } } @Override public void onStart(final StreamHubKafkaClientSupplier clientSupplier, final KafkaStreams kafkaStreams) { super.onStart(clientSupplier, kafkaStreams); producer = clientSupplier.getProducer(HeliosSerdes.STRING_SERDE, HeliosSerdes.STREAMED_METRIC_VALUE_SERDE); scheduler.scheduleAtFixedRate(this, windowDuration, windowDuration, TimeUnit.MILLISECONDS); log.info("Scheduled background reaper every [{}] ms.", windowDuration); } /** * {@inheritDoc} * @see com.heliosapm.streams.metrics.router.nodes.AbstractMetricStreamNode#close() */ @Override public void close() { super.close(); scheduler.shutdownNow(); try { producer.close(); } catch (Exception x) {/* No Op */} } private NonBlockingHashMap<String, NVP<Window, StreamedMetricValue>> newlastEntry() { NonBlockingHashMap<String, NVP<Window, StreamedMetricValue>> acc = new NonBlockingHashMap<String, NVP<Window, StreamedMetricValue>>(); lastEntries.add(acc); return acc; } /** * {@inheritDoc} * @see com.heliosapm.streams.metrics.router.nodes.MetricStreamNode#configure(org.apache.kafka.streams.kstream.KStreamBuilder) */ @Override public void configure(final KStreamBuilder streamBuilder) { rateDivisor = TimeUnit.MILLISECONDS.toSeconds(windowDuration); KStream<String, StreamedMetric> rawMetrics = streamBuilder.stream(HeliosSerdes.STRING_SERDE, HeliosSerdes.STREAMED_METRIC_SERDE, sourceTopics); window = rawMetrics.aggregateByKey(new SMAggInit(), new SMAgg(), TimeWindows.of(STORE_NAME, windowDuration), HeliosSerdes.STRING_SERDE, HeliosSerdes.STREAMED_METRIC_VALUE_SERDE); window.toStream() .flatMap(new KeyValueMapper<Windowed<String>, StreamedMetricValue, Iterable<KeyValue<String,StreamedMetricValue>>>() { protected final NonBlockingHashMap<String, NVP<Window, StreamedMetricValue>> lastEntry = newlastEntry(); @Override public Iterable<KeyValue<String, StreamedMetricValue>> apply(final Windowed<String> key, final StreamedMetricValue value) { final NVP<Window, StreamedMetricValue> prior = lastEntry.put(key.key(), new NVP<Window, StreamedMetricValue>(key.window(), value)); if(prior!=null && !prior.getKey().equals(key.window())) { final StreamedMetricValue smv = prior.getValue(); // log.info("[{}]-[{}]:{}", new Date(smv.getTimestamp()), smv.metricKey(), smv.getValueNumber()); return Collections.singletonList(new KeyValue<String, StreamedMetricValue>(key.key(), adjust(smv, key.window()))); } return EMPTY_KVS; } }).to(HeliosSerdes.STRING_SERDE, HeliosSerdes.STREAMED_METRIC_VALUE_SERDE, sinkTopic); if(System.getProperties().containsKey("streams.debug")) { streamBuilder.stream(HeliosSerdes.STRING_SERDE, HeliosSerdes.STREAMED_METRIC_SERDE, sinkTopic) .foreach((k,v) -> System.err.println("[" + new Date() + "<" + Thread.currentThread() + ">] WWWW: [" + new Date(v.getTimestamp()) + "] [" + v.metricKey() + "]:" + v.forValue(0D).getValueNumber())); } } private StreamedMetricValue adjust(final StreamedMetricValue smv, final Window window) { return StreamedMetricValue.fromKey(window.end(), smv.metricKey(), calcTps(smv.getValueNumber().doubleValue(), rateDivisor)); } private static double calcTps(final double count, final double time) { if(count==0D || time==0D) return 0D; return count/time; } /** * <p>Runs the background task to send closing values, fill in zeroes, when the metric is idle and purges the last entry when they expire.</p> * {@inheritDoc} * @see java.lang.Runnable#run() */ public void run() { for(NonBlockingHashMap<String, NVP<Window, StreamedMetricValue>> lastEntry: lastEntries) { try { final long now = System.currentTimeMillis(); for(Map.Entry<String, NVP<Window, StreamedMetricValue>> entry: lastEntry.entrySet()) { final String key = entry.getKey(); final NVP<Window, StreamedMetricValue> value = entry.getValue(); final Window win = value.getKey(); final StreamedMetricValue smv = value.getValue(); final long diff = now - win.end(); if(diff > windowDuration) { if(smv.getValueNumber().doubleValue() > 0D) { producer.send(new ProducerRecord<String, StreamedMetricValue>(sinkTopic, key, adjust(smv, win))); entry.setValue(new NVP<Window, StreamedMetricValue>(win, StreamedMetricValue.fromKey(now, smv.metricKey(), 0D))); log.debug("Sent closing value for [{}]", smv.metricKey()); continue; } if(diff > idleDuration) { lastEntry.remove(key); log.info("Removed expired key [{}]", key); } else { producer.send(new ProducerRecord<String, StreamedMetricValue>(sinkTopic, key, StreamedMetricValue.fromKey(now, smv.metricKey(), 0L))); log.debug("Filling in for key [{}] until [{}]", key, new Date(win.end() + idleDuration)); } } } } catch (Exception ex) { log.error("Background task failure", ex); } } } /** * Returns the aggregation window duration in ms. * @return the aggregation window duration in ms. */ @ManagedAttribute(description="The aggregation window duration in ms.") public long getWindowDuration() { return windowDuration; } /** * Sets the aggregation window duration in ms. * @param windowDuration the aggregation window duration in ms. */ public void setWindowDuration(final long windowDuration) { if(windowDuration < 0) throw new IllegalArgumentException("The aggregation window duration [" + windowDuration + "] is invalid"); this.windowDuration = windowDuration; } /** * Returns the idle duration in ms. * @return the idle duration in ms. */ @ManagedAttribute(description="The idle duration in ms.") public long getIdleDuration() { return idleDuration; } /** * Sets the time in ms. that a metric can be idle (with zeroes being sent every period) * before it is purged from {@link #lastEntry} * @param idleDuration the idle duration in ms. */ public void setIdleDuration(final long idleDuration) { if(idleDuration < 0) throw new IllegalArgumentException("The idle duration [" + idleDuration + "] is invalid"); this.idleDuration = idleDuration; } /** * Returns the number of windows in the last entry cache * @return the number of windows in the last entry cache */ @ManagedAttribute(description="The number of windows in the last entry cache.") public int getStateEntryCount() { return lastEntries.stream().mapToInt(NonBlockingHashMap::size).sum(); } /** * Returns the number of accumulators created * @return the number of accumulators created */ @ManagedAttribute(description="The the number of accumulators created.") public int getAccumulatorCount() { return lastEntries.size(); } }
/****************************************************************************** * Copyright 2009-2018 Exactpro (Exactpro Systems Limited) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.exactpro.sf.configuration.netdumper; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @Retention (RetentionPolicy.RUNTIME) public @interface NetDumperListenHost { int index() default 0; }
package de.pkro; public class GreatestCommonDivisor { public static int getGreatestCommonDivisor(int first, int second) { if (first < 10 || second < 10) { return -1; } int divisor = first > second ? second : first; while (divisor > 1) { if (first % divisor == 0 && second % divisor == 0) { return divisor; } divisor--; } return 1; } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package cz.msebera.android.httpclient.impl.conn.tsccm; import java.lang.ref.ReferenceQueue; import java.lang.ref.WeakReference; import cz.msebera.android.httpclient.conn.routing.HttpRoute; import cz.msebera.android.httpclient.util.Args; /** * A weak reference to a {@link BasicPoolEntry BasicPoolEntry}. * This reference explicitly keeps the planned route, so the connection * can be reclaimed if it is lost to garbage collection. * * @since 4.0 * * @deprecated (4.2) do not use */ @Deprecated public class BasicPoolEntryRef extends WeakReference<BasicPoolEntry> { /** The planned route of the entry. */ private final HttpRoute route; // HttpRoute is @Immutable /** * Creates a new reference to a pool entry. * * @param entry the pool entry, must not be {@code null} * @param queue the reference queue, or {@code null} */ public BasicPoolEntryRef(final BasicPoolEntry entry, final ReferenceQueue<Object> queue) { super(entry, queue); Args.notNull(entry, "Pool entry"); route = entry.getPlannedRoute(); } /** * Obtain the planned route for the referenced entry. * The planned route is still available, even if the entry is gone. * * @return the planned route */ public final HttpRoute getRoute() { return this.route; } } // class BasicPoolEntryRef
// Copyright 2020 Goldman Sachs // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.finos.legend.pure.runtime.java.compiled.support; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import org.eclipse.collections.api.block.function.Function; import org.eclipse.collections.api.block.procedure.primitive.IntProcedure; import org.eclipse.collections.impl.block.factory.Functions; import org.eclipse.collections.impl.list.mutable.FastList; import org.eclipse.collections.impl.list.primitive.IntInterval; import org.finos.legend.pure.m3.exception.PureAssertFailException; import org.finos.legend.pure.m3.exception.PureExecutionException; import org.finos.legend.pure.m4.coreinstance.SourceInformation; import org.finos.legend.pure.runtime.java.compiled.generation.processors.support.map.PureCacheMap; import org.finos.legend.pure.runtime.java.compiled.generation.processors.support.map.PureCacheMapGetException; import org.finos.legend.pure.runtime.java.compiled.generation.processors.support.map.PureMap; import org.junit.Assert; import org.junit.Test; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; public class TestPureCacheMap { private Cache<Integer, String> getCache() { return CacheBuilder.newBuilder().recordStats().concurrencyLevel(1).expireAfterWrite(10, TimeUnit.MINUTES).build(); } @Test public void testSingleThreadedSimpleAccessWithDeprecatedConstructor() { PureCacheMap<Integer, String> cacheMap = new PureCacheMap<>(1, 10, TimeUnit.MINUTES); for (int i = 0; i <= 2; i++) { accessIntegerCacheKeys(cacheMap, 10); } Assert.assertEquals(10, cacheMap.getCache().stats().missCount()); Assert.assertEquals(20, cacheMap.getCache().stats().hitCount()); } @Test public void testSingleThreadedSimpleAccess() { PureCacheMap<Integer, String> cacheMap = PureCacheMap.newCacheMap(getCache()); for (int i = 0; i <= 2; i++) { accessIntegerCacheKeys(cacheMap, 10); } Assert.assertEquals(10, cacheMap.getCache().stats().missCount()); Assert.assertEquals(20, cacheMap.getCache().stats().hitCount()); } @Test public void testToStringDoesNotRaiseException() { PureCacheMap<Integer, String> cacheMap = PureCacheMap.newCacheMap(getCache()); accessIntegerCacheKeys(cacheMap, 10); PureMap pureMap = new PureMap(cacheMap); String result = pureMap.toString(); Assert.assertNotNull(result); } @Test public void testMultiThreadedSimpleAccess() { PureCacheMap<Integer, String> cacheMap = PureCacheMap.newCacheMap(getCache()); accessIntegerCacheKeys(cacheMap, 10); ExecutorService exec = Executors.newFixedThreadPool(4); try { exec.invokeAll(FastList.newListWith(getAccessIntegerCacheKeysCallable(cacheMap, 10), getAccessIntegerCacheKeysCallable(cacheMap, 10))); Assert.assertEquals(10, cacheMap.getCache().stats().missCount()); Assert.assertEquals(20, cacheMap.getCache().stats().hitCount()); } catch (InterruptedException e) { Assert.fail(e.getMessage()); } finally { exec.shutdownNow(); } } @Test public void testSingleThreadMultipleOperations() { PureCacheMap<Integer, String> cacheMap = PureCacheMap.newCacheMap(getCache()); int count = 10; for (int i = 0; i < count; i++) { String expected = Integer.toString(i); Assert.assertNull(cacheMap.get(i)); // miss Assert.assertEquals(expected, cacheMap.getIfAbsentPutWithKey(i, Functions.getToString())); // miss Assert.assertEquals(expected, cacheMap.get(i)); // hit Assert.assertEquals(expected, cacheMap.getIfAbsentPutWithKey(i, Functions.getToString())); // hit cacheMap.invalidate(i); Assert.assertNull(cacheMap.get(i)); // miss Assert.assertEquals(expected, cacheMap.getIfAbsentPutWithKey(i, Functions.getToString())); // miss Assert.assertEquals(expected, cacheMap.get(i)); // hit Assert.assertEquals(expected, cacheMap.getIfAbsentPutWithKey(i, Functions.getToString())); // hit } Assert.assertEquals(4 * count, cacheMap.getCache().stats().missCount()); Assert.assertEquals(4 * count, cacheMap.getCache().stats().hitCount()); Assert.assertEquals(count, cacheMap.size()); cacheMap.clear(); Assert.assertEquals(0, cacheMap.size()); } @Test public void testPureException() { PureCacheMap<Integer, String> cacheMap = PureCacheMap.newCacheMap(getCache()); SourceInformation sourceInfo = new SourceInformation("/fake/file.pure", 1, 2, 3, 4, 5, 6); assertGetException("Exception fetching Cache value for Key 1: Something bad just happened!", cacheMap, 1, new PureAssertFailException(sourceInfo, "Something bad just happened!")); assertGetException("Exception fetching Cache value for Key 2: Now something REALLY BAD just happened!", cacheMap, 2, new PureExecutionException(sourceInfo, "Now something REALLY BAD just happened!")); assertGetException("Exception fetching Cache value for Key 3: Oh no! The WORST!", cacheMap, 3, new RuntimeException("Oh no! The WORST!")); assertGetException("Exception fetching Cache value for Key 4", cacheMap, 4, new NullPointerException()); } private <K, V> void assertGetException(String expectedInfo, PureCacheMap<K, V> cacheMap, K key, final RuntimeException exception) { try { cacheMap.getIfAbsentPutWithKey(key, new Function<K, V>() { @Override public V valueOf(K key) { throw exception; } }); Assert.fail("Expected exception: " + exception); } catch (PureCacheMapGetException e) { Assert.assertSame(key, e.getKey()); Assert.assertSame(exception, e.getCause()); Assert.assertEquals(expectedInfo, e.getInfo()); } catch (Exception e) { Assert.fail("Expected PureCacheMapGetException, got: " + e); } } private void accessIntegerCacheKeys(final PureCacheMap<Integer, String> cacheMap, int numberOfInts) { IntInterval.oneTo(numberOfInts).forEach(new IntProcedure() { @Override public void value(int key) { cacheMap.getIfAbsentPutWithKey(key, Functions.getToString()); } }); } private Callable<Void> getAccessIntegerCacheKeysCallable(final PureCacheMap<Integer, String> cacheMap, final int numberOfInts) { return new Callable<Void>() { @Override public Void call() { accessIntegerCacheKeys(cacheMap, numberOfInts); return null; } }; } }
package com.google.android.gms.internal.ads; @zzard public final class zzauw { /* renamed from: a */ private zzavd f24848a; public zzauw(zzavd zzavd) { this.f24848a = zzavd; } }
package me.kaloyankys.wilderworld.entity.render; import me.kaloyankys.wilderworld.client.WilderworldClient; import me.kaloyankys.wilderworld.entity.ButterflyEntity; import net.minecraft.client.render.entity.EntityRendererFactory; import net.minecraft.client.render.entity.MobEntityRenderer; import net.minecraft.util.Identifier; public class ButterflyEntityRenderer extends MobEntityRenderer<ButterflyEntity, ButterflyModel> { private static final Identifier[] TEXTURE = { new Identifier("wilderworld", "textures/entity/butterfly/flame.png"), new Identifier("wilderworld", "textures/entity/butterfly/honey.png"), new Identifier("wilderworld", "textures/entity/butterfly/aqua.png"), new Identifier("wilderworld", "textures/entity/butterfly/pixie.png") }; private static final Identifier ROXANNE = new Identifier("wilderworld", "textures/entity/butterfly/roxanne.png"); public ButterflyEntityRenderer(EntityRendererFactory.Context context) { super(context, new ButterflyModel(context.getPart(WilderworldClient.BUTTERFLY_RENDER_LAYER)), 0.5f); } @Override public Identifier getTexture(ButterflyEntity entity) { if (entity.getCustomName() != null && entity.getCustomName().asString().equals("Roxanne")) { return ROXANNE; } else { return TEXTURE[entity.getVariant() % TEXTURE.length]; } } }
package net.minecraft.inventory.container; import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; import net.minecraft.block.Blocks; import net.minecraft.enchantment.Enchantment; import net.minecraft.enchantment.EnchantmentHelper; import net.minecraft.entity.item.ExperienceOrbEntity; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.entity.player.PlayerInventory; import net.minecraft.inventory.CraftResultInventory; import net.minecraft.inventory.IInventory; import net.minecraft.inventory.Inventory; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.item.Items; import net.minecraft.util.IWorldPosCallable; import net.minecraft.world.World; public class GrindstoneContainer extends Container { /** The inventory slot that stores the output of the crafting recipe. */ private final IInventory outputInventory = new CraftResultInventory(); private final IInventory inputInventory = new Inventory(2) { /** * For tile entities, ensures the chunk containing the tile entity is saved to disk later - the game won't think * it hasn't changed and skip it. */ public void markDirty() { super.markDirty(); GrindstoneContainer.this.onCraftMatrixChanged(this); } }; private final IWorldPosCallable worldPosCallable; public GrindstoneContainer(int p_i50080_1_, PlayerInventory playerInventoryIn) { this(p_i50080_1_, playerInventoryIn, IWorldPosCallable.DUMMY); } public GrindstoneContainer(int windowIdIn, PlayerInventory p_i50081_2_, final IWorldPosCallable worldPosCallableIn) { super(ContainerType.GRINDSTONE, windowIdIn); this.worldPosCallable = worldPosCallableIn; this.addSlot(new Slot(this.inputInventory, 0, 49, 19) { /** * Check if the stack is allowed to be placed in this slot, used for armor slots as well as furnace fuel. */ public boolean isItemValid(ItemStack stack) { return stack.isDamageable() || stack.getItem() == Items.ENCHANTED_BOOK || stack.isEnchanted(); } }); this.addSlot(new Slot(this.inputInventory, 1, 49, 40) { /** * Check if the stack is allowed to be placed in this slot, used for armor slots as well as furnace fuel. */ public boolean isItemValid(ItemStack stack) { return stack.isDamageable() || stack.getItem() == Items.ENCHANTED_BOOK || stack.isEnchanted(); } }); this.addSlot(new Slot(this.outputInventory, 2, 129, 34) { /** * Check if the stack is allowed to be placed in this slot, used for armor slots as well as furnace fuel. */ public boolean isItemValid(ItemStack stack) { return false; } public ItemStack onTake(PlayerEntity thePlayer, ItemStack stack) { worldPosCallableIn.consume((p_216944_1_, p_216944_2_) -> { int l = this.getEnchantmentXpFromInputs(p_216944_1_); while(l > 0) { int i1 = ExperienceOrbEntity.getXPSplit(l); l -= i1; p_216944_1_.addEntity(new ExperienceOrbEntity(p_216944_1_, (double)p_216944_2_.getX(), (double)p_216944_2_.getY() + 0.5D, (double)p_216944_2_.getZ() + 0.5D, i1)); } p_216944_1_.playEvent(1042, p_216944_2_, 0); }); GrindstoneContainer.this.inputInventory.setInventorySlotContents(0, ItemStack.EMPTY); GrindstoneContainer.this.inputInventory.setInventorySlotContents(1, ItemStack.EMPTY); return stack; } /** * Returns the total amount of XP stored in all of the input slots of this container. The return value is * randomized, so that it returns between 50% and 100% of the total XP. */ private int getEnchantmentXpFromInputs(World worldIn) { int l = 0; l = l + this.getEnchantmentXp(GrindstoneContainer.this.inputInventory.getStackInSlot(0)); l = l + this.getEnchantmentXp(GrindstoneContainer.this.inputInventory.getStackInSlot(1)); if (l > 0) { int i1 = (int)Math.ceil((double)l / 2.0D); return i1 + worldIn.rand.nextInt(i1); } else { return 0; } } /** * Returns the total amount of XP stored in the enchantments of this stack. */ private int getEnchantmentXp(ItemStack stack) { int l = 0; Map<Enchantment, Integer> map = EnchantmentHelper.getEnchantments(stack); for(Entry<Enchantment, Integer> entry : map.entrySet()) { Enchantment enchantment = entry.getKey(); Integer integer = entry.getValue(); if (!enchantment.isCurse()) { l += enchantment.getMinEnchantability(integer); } } return l; } }); for(int i = 0; i < 3; ++i) { for(int j = 0; j < 9; ++j) { this.addSlot(new Slot(p_i50081_2_, j + i * 9 + 9, 8 + j * 18, 84 + i * 18)); } } for(int k = 0; k < 9; ++k) { this.addSlot(new Slot(p_i50081_2_, k, 8 + k * 18, 142)); } } /** * Callback for when the crafting matrix is changed. */ public void onCraftMatrixChanged(IInventory inventoryIn) { super.onCraftMatrixChanged(inventoryIn); if (inventoryIn == this.inputInventory) { this.updateRecipeOutput(); } } private void updateRecipeOutput() { ItemStack itemstack = this.inputInventory.getStackInSlot(0); ItemStack itemstack1 = this.inputInventory.getStackInSlot(1); boolean flag = !itemstack.isEmpty() || !itemstack1.isEmpty(); boolean flag1 = !itemstack.isEmpty() && !itemstack1.isEmpty(); if (!flag) { this.outputInventory.setInventorySlotContents(0, ItemStack.EMPTY); } else { boolean flag2 = !itemstack.isEmpty() && itemstack.getItem() != Items.ENCHANTED_BOOK && !itemstack.isEnchanted() || !itemstack1.isEmpty() && itemstack1.getItem() != Items.ENCHANTED_BOOK && !itemstack1.isEnchanted(); if (itemstack.getCount() > 1 || itemstack1.getCount() > 1 || !flag1 && flag2) { this.outputInventory.setInventorySlotContents(0, ItemStack.EMPTY); this.detectAndSendChanges(); return; } int j = 1; int i; ItemStack itemstack2; if (flag1) { if (itemstack.getItem() != itemstack1.getItem()) { this.outputInventory.setInventorySlotContents(0, ItemStack.EMPTY); this.detectAndSendChanges(); return; } Item item = itemstack.getItem(); int k = itemstack.getMaxDamage() - itemstack.getDamage(); int l = itemstack.getMaxDamage() - itemstack1.getDamage(); int i1 = k + l + itemstack.getMaxDamage() * 5 / 100; i = Math.max(itemstack.getMaxDamage() - i1, 0); itemstack2 = this.copyEnchantments(itemstack, itemstack1); if (!itemstack2.isRepairable()) i = itemstack.getDamage(); if (!itemstack2.isDamageable() || !itemstack2.isRepairable()) { if (!ItemStack.areItemStacksEqual(itemstack, itemstack1)) { this.outputInventory.setInventorySlotContents(0, ItemStack.EMPTY); this.detectAndSendChanges(); return; } j = 2; } } else { boolean flag3 = !itemstack.isEmpty(); i = flag3 ? itemstack.getDamage() : itemstack1.getDamage(); itemstack2 = flag3 ? itemstack : itemstack1; } this.outputInventory.setInventorySlotContents(0, this.removeEnchantments(itemstack2, i, j)); } this.detectAndSendChanges(); } private ItemStack copyEnchantments(ItemStack copyTo, ItemStack copyFrom) { ItemStack itemstack = copyTo.copy(); Map<Enchantment, Integer> map = EnchantmentHelper.getEnchantments(copyFrom); for(Entry<Enchantment, Integer> entry : map.entrySet()) { Enchantment enchantment = entry.getKey(); if (!enchantment.isCurse() || EnchantmentHelper.getEnchantmentLevel(enchantment, itemstack) == 0) { itemstack.addEnchantment(enchantment, entry.getValue()); } } return itemstack; } /** * Removes all enchantments from the {@plainlink ItemStack}. Note that the curses are not removed. */ private ItemStack removeEnchantments(ItemStack stack, int damage, int count) { ItemStack itemstack = stack.copy(); itemstack.removeChildTag("Enchantments"); itemstack.removeChildTag("StoredEnchantments"); if (damage > 0) { itemstack.setDamage(damage); } else { itemstack.removeChildTag("Damage"); } itemstack.setCount(count); Map<Enchantment, Integer> map = EnchantmentHelper.getEnchantments(stack).entrySet().stream().filter((p_217012_0_) -> { return p_217012_0_.getKey().isCurse(); }).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); EnchantmentHelper.setEnchantments(map, itemstack); itemstack.setRepairCost(0); if (itemstack.getItem() == Items.ENCHANTED_BOOK && map.size() == 0) { itemstack = new ItemStack(Items.BOOK); if (stack.hasDisplayName()) { itemstack.setDisplayName(stack.getDisplayName()); } } for(int i = 0; i < map.size(); ++i) { itemstack.setRepairCost(RepairContainer.getNewRepairCost(itemstack.getRepairCost())); } return itemstack; } /** * Called when the container is closed. */ public void onContainerClosed(PlayerEntity playerIn) { super.onContainerClosed(playerIn); this.worldPosCallable.consume((p_217009_2_, p_217009_3_) -> { this.clearContainer(playerIn, p_217009_2_, this.inputInventory); }); } /** * Determines whether supplied player can use this container */ public boolean canInteractWith(PlayerEntity playerIn) { return isWithinUsableDistance(this.worldPosCallable, playerIn, Blocks.GRINDSTONE); } /** * Handle when the stack in slot {@code index} is shift-clicked. Normally this moves the stack between the player * inventory and the other inventory(s). */ public ItemStack transferStackInSlot(PlayerEntity playerIn, int index) { ItemStack itemstack = ItemStack.EMPTY; Slot slot = this.inventorySlots.get(index); if (slot != null && slot.getHasStack()) { ItemStack itemstack1 = slot.getStack(); itemstack = itemstack1.copy(); ItemStack itemstack2 = this.inputInventory.getStackInSlot(0); ItemStack itemstack3 = this.inputInventory.getStackInSlot(1); if (index == 2) { if (!this.mergeItemStack(itemstack1, 3, 39, true)) { return ItemStack.EMPTY; } slot.onSlotChange(itemstack1, itemstack); } else if (index != 0 && index != 1) { if (!itemstack2.isEmpty() && !itemstack3.isEmpty()) { if (index >= 3 && index < 30) { if (!this.mergeItemStack(itemstack1, 30, 39, false)) { return ItemStack.EMPTY; } } else if (index >= 30 && index < 39 && !this.mergeItemStack(itemstack1, 3, 30, false)) { return ItemStack.EMPTY; } } else if (!this.mergeItemStack(itemstack1, 0, 2, false)) { return ItemStack.EMPTY; } } else if (!this.mergeItemStack(itemstack1, 3, 39, false)) { return ItemStack.EMPTY; } if (itemstack1.isEmpty()) { slot.putStack(ItemStack.EMPTY); } else { slot.onSlotChanged(); } if (itemstack1.getCount() == itemstack.getCount()) { return ItemStack.EMPTY; } slot.onTake(playerIn, itemstack1); } return itemstack; } }
package com.sap.cloud.lm.sl.cf.core.persistence.query; import java.util.Date; import com.sap.cloud.lm.sl.cf.core.persistence.OrderDirection; import com.sap.cloud.lm.sl.cf.persistence.model.ProgressMessage; import com.sap.cloud.lm.sl.cf.persistence.model.ProgressMessage.ProgressMessageType; public interface ProgressMessageQuery extends Query<ProgressMessage, ProgressMessageQuery> { ProgressMessageQuery id(Long id); ProgressMessageQuery processId(String processId); ProgressMessageQuery taskId(String taskId); ProgressMessageQuery type(ProgressMessageType type); ProgressMessageQuery typeNot(ProgressMessageType type); ProgressMessageQuery text(String text); ProgressMessageQuery olderThan(Date time); ProgressMessageQuery orderById(OrderDirection orderDirection); }