repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
xasx/camunda-bpm-platform
engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/dto/runtime/EventSubscriptionDto.java
2151
/* * Copyright © 2013-2018 camunda services GmbH and various authors (info@camunda.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest.dto.runtime; import java.util.Date; import org.camunda.bpm.engine.runtime.EventSubscription; public class EventSubscriptionDto { private String id; private String eventType; private String eventName; private String executionId; private String processInstanceId; private String activityId; private Date createdDate; private String tenantId; public String getId() { return id; } public String getEventType() { return eventType; } public String getEventName() { return eventName; } public String getExecutionId() { return executionId; } public String getProcessInstanceId() { return processInstanceId; } public String getActivityId() { return activityId; } public Date getCreatedDate() { return createdDate; } public String getTenantId() { return tenantId; } public static EventSubscriptionDto fromEventSubscription(EventSubscription eventSubscription) { EventSubscriptionDto dto = new EventSubscriptionDto(); dto.id = eventSubscription.getId(); dto.eventType = eventSubscription.getEventType(); dto.eventName = eventSubscription.getEventName(); dto.executionId = eventSubscription.getExecutionId(); dto.processInstanceId = eventSubscription.getProcessInstanceId(); dto.activityId = eventSubscription.getActivityId(); dto.createdDate = eventSubscription.getCreated(); dto.tenantId = eventSubscription.getTenantId(); return dto; } }
apache-2.0
MICommunity/psi-jami
jami-bridges/jami-ontology-manager/src/main/java/psidev/psi/mi/jami/bridges/ontologymanager/impl/OntologyTermWrapper.java
9469
package psidev.psi.mi.jami.bridges.ontologymanager.impl; import psidev.psi.mi.jami.bridges.ontologymanager.MIOntologyTermI; import psidev.psi.mi.jami.model.Alias; import psidev.psi.mi.jami.model.OntologyTerm; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Ontology term for PSI-MI JAMI * * @author Marine Dumousseau (marine@ebi.ac.uk) * @version $Id$ * @since <pre>01/11/11</pre> */ public class OntologyTermWrapper implements MIOntologyTermI { private OntologyTerm delegate; /** * Obsolete message */ private String obsoleteMessage; private String remappedTerm; private Set<String> possibleTermsToRemapTo = new HashSet<String>(); private Collection<String> synonyms = new ArrayList<String>(); private static final String LINE_BREAK = "\n"; private static final String OBSOLETE_DEF = "OBSOLETE"; private static final String REMAP = "REMAP TO"; private static final String MAP = "MAP TO"; private static final String REPLACE = "REPLACE BY"; /** Constant <code>MOD_REGEXP</code> */ public final static Pattern MOD_REGEXP = Pattern.compile("MOD:[0-9]{5}+"); /** Constant <code>MI_REGEXP</code> */ public final static Pattern MI_REGEXP = Pattern.compile("MI:[0-9]{4}+"); /** Constant <code>ECO_REGEXP</code> */ public final static Pattern ECO_REGEXP = Pattern.compile("ECO:[0-9]+"); /** * <p>Constructor for OntologyTermWrapper.</p> * * @param cv a {@link psidev.psi.mi.jami.model.OntologyTerm} object. */ public OntologyTermWrapper(OntologyTerm cv){ if (cv == null){ throw new IllegalArgumentException("The cv term cannot be null"); } this.delegate = cv; // initialise synonyms for (Alias alias : cv.getSynonyms()){ synonyms.add(alias.getName()); } // initialise obsolete annotation String def = cv.getDefinition(); if (def != null && !def.isEmpty()){ processDefinition(def); } } /** * <p>getTermAccession.</p> * * @return a {@link java.lang.String} object. */ public String getTermAccession() { if (this.delegate.getMIIdentifier() != null){ return this.delegate.getMIIdentifier(); } else if (this.delegate.getMODIdentifier() != null){ return this.delegate.getMODIdentifier(); } else if (this.delegate.getPARIdentifier() != null){ return this.delegate.getPARIdentifier(); } else if (!this.delegate.getIdentifiers().isEmpty()){ return this.delegate.getIdentifiers().iterator().next().getId(); } return null; } /** * <p>getPreferredName.</p> * * @return a {@link java.lang.String} object. */ public String getPreferredName() { return this.delegate.getShortName(); } /** {@inheritDoc} */ public void setTermAccession(String accession) { throw new UnsupportedOperationException("The OntologyTerm Wrapper is readonly and cannot be modified"); } /** {@inheritDoc} */ public void setPreferredName(String preferredName) { throw new UnsupportedOperationException("The OntologyTerm Wrapper is readonly and cannot be modified"); } /** * <p>getNameSynonyms.</p> * * @return a {@link java.util.Collection} object. */ public Collection<String> getNameSynonyms() { List<String> synonyms = new ArrayList<String>(this.delegate.getSynonyms().size()); for (Alias alias : this.delegate.getSynonyms()){ synonyms.add(alias.getName()); } return synonyms; } /** {@inheritDoc} */ public void setNameSynonyms(Collection<String> nameSynonyms) { throw new UnsupportedOperationException("The OntologyTerm Wrapper is readonly and cannot be modified"); } /** * <p>Getter for the field <code>obsoleteMessage</code>.</p> * * @return a {@link java.lang.String} object. */ public String getObsoleteMessage() { return this.obsoleteMessage; } /** * <p>Getter for the field <code>remappedTerm</code>.</p> * * @return a {@link java.lang.String} object. */ public String getRemappedTerm() { return this.remappedTerm; } /** * <p>Getter for the field <code>possibleTermsToRemapTo</code>.</p> * * @return a {@link java.util.Set} object. */ public Set<String> getPossibleTermsToRemapTo() { return this.possibleTermsToRemapTo; } /** * <p>Getter for the field <code>delegate</code>.</p> * * @return a {@link psidev.psi.mi.jami.model.OntologyTerm} object. */ public OntologyTerm getDelegate() { return this.delegate; } /** * <p>Setter for the field <code>remappedTerm</code>.</p> * * @param remappedTerm a {@link java.lang.String} object. */ public void setRemappedTerm(String remappedTerm) { this.remappedTerm = remappedTerm; } /** * <p>Setter for the field <code>obsoleteMessage</code>.</p> * * @param obsoleteMessage a {@link java.lang.String} object. */ public void setObsoleteMessage(String obsoleteMessage) { this.obsoleteMessage = obsoleteMessage; } /** * Process the definition String * @param definition * @return */ private void processDefinition(String definition) { if ( definition.contains( LINE_BREAK ) ) { String[] defArray = definition.split( LINE_BREAK ); String otherInfoString = null; if ( defArray.length == 2 ) { otherInfoString = defArray[1]; processInfoInDescription(otherInfoString); } else if ( defArray.length > 2 ) { for (int i = 1; i < defArray.length; i++){ otherInfoString = defArray[i]; processInfoInDescription(otherInfoString); } } } else if (definition.contains( OBSOLETE_DEF )){ String[] defArray = definition.split( OBSOLETE_DEF ); if ( defArray.length == 2 ) { this.obsoleteMessage = OBSOLETE_DEF + defArray[1]; if (obsoleteMessage != null){ processObsoleteMessage(); } } else if ( defArray.length > 2 ) { this.obsoleteMessage = definition.substring(defArray[0].length()); if (obsoleteMessage != null){ processObsoleteMessage(); } } } else { processInfoInDescription(definition); } } /** * Process the other information in the description * @param otherInfoString * @return true if an obsolete annotation has been added */ private void processInfoInDescription(String otherInfoString) { // obsolete message if ( otherInfoString.startsWith( OBSOLETE_DEF )) { this.obsoleteMessage = otherInfoString; if (otherInfoString != null){ processObsoleteMessage(); } } } private void processObsoleteMessage() { String upperObsoleteMessage = this.obsoleteMessage.toUpperCase(); String remappingString = null; if (upperObsoleteMessage.contains(MAP)){ remappingString = upperObsoleteMessage.substring(upperObsoleteMessage.indexOf(MAP) + MAP.length()); } else if (upperObsoleteMessage.contains(REMAP)){ remappingString = upperObsoleteMessage.substring(upperObsoleteMessage.indexOf(REMAP) + REMAP.length()); } else if (upperObsoleteMessage.contains(REPLACE)){ remappingString = upperObsoleteMessage.substring(upperObsoleteMessage.indexOf(REPLACE) + REPLACE.length()); } if (remappingString != null){ Matcher miMatcher = MI_REGEXP.matcher(remappingString); Matcher modMatcher = MOD_REGEXP.matcher(remappingString); while (miMatcher.find()){ this.possibleTermsToRemapTo.add(miMatcher.group()); } while (modMatcher.find()){ this.possibleTermsToRemapTo.add(modMatcher.group()); } if (this.possibleTermsToRemapTo.size() == 1){ this.remappedTerm = this.possibleTermsToRemapTo.iterator().next(); // we do not need the remapped term to be kept twice this.possibleTermsToRemapTo.clear(); } } else { Matcher miMatcher = MI_REGEXP.matcher(upperObsoleteMessage); Matcher modMatcher = MOD_REGEXP.matcher(upperObsoleteMessage); while (miMatcher.find()){ this.possibleTermsToRemapTo.add(miMatcher.group()); } while (modMatcher.find()){ this.possibleTermsToRemapTo.add(modMatcher.group()); } } } /** {@inheritDoc} */ @Override public boolean equals(Object obj) { if (obj instanceof OntologyTermWrapper){ return getDelegate().equals(((OntologyTermWrapper)obj).getDelegate()); } return false; } /** {@inheritDoc} */ @Override public int hashCode() { return getDelegate().hashCode(); } }
apache-2.0
realityforge/arez
processor/src/test/fixtures/input/com/example/observe/DeriveTrackedModel.java
354
package com.example.observe; import arez.Observer; import arez.annotations.ArezComponent; import arez.annotations.ObserverRef; import arez.annotations.OnDepsChange; @ArezComponent abstract class DeriveTrackedModel { void render() { } @OnDepsChange void onRenderDepsChange() { } @ObserverRef abstract Observer getRenderObserver(); }
apache-2.0
nagyistoce/camunda-bpmn-model
src/main/java/org/camunda/bpm/model/bpmn/impl/instance/BusinessRuleTaskImpl.java
6503
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.model.bpmn.impl.instance; import org.camunda.bpm.model.bpmn.BpmnModelInstance; import org.camunda.bpm.model.bpmn.builder.BusinessRuleTaskBuilder; import org.camunda.bpm.model.bpmn.instance.BusinessRuleTask; import org.camunda.bpm.model.bpmn.instance.Rendering; import org.camunda.bpm.model.bpmn.instance.Task; import org.camunda.bpm.model.xml.ModelBuilder; import org.camunda.bpm.model.xml.impl.instance.ModelTypeInstanceContext; import org.camunda.bpm.model.xml.type.ModelElementTypeBuilder; import org.camunda.bpm.model.xml.type.attribute.Attribute; import org.camunda.bpm.model.xml.type.child.ChildElementCollection; import static org.camunda.bpm.model.bpmn.impl.BpmnModelConstants.*; import static org.camunda.bpm.model.xml.type.ModelElementTypeBuilder.ModelTypeInstanceProvider; /** * The BPMN businessRuleTask element * * @author Sebastian Menski */ public class BusinessRuleTaskImpl extends TaskImpl implements BusinessRuleTask { protected static Attribute<String> implementationAttribute; protected static ChildElementCollection<Rendering> renderingCollection; /** camunda extensions */ protected static Attribute<String> camundaClassAttribute; protected static Attribute<String> camundaDelegateExpressionAttribute; protected static Attribute<String> camundaExpressionAttribute; protected static Attribute<String> camundaResultVariableAttribute; protected static Attribute<String> camundaTypeAttribute; protected static Attribute<String> camundaDecisionRefAttribute; protected static Attribute<String> camundaDecisionRefBindingAttribute; protected static Attribute<String> camundaDecisionRefVersionAttribute; public static void registerType(ModelBuilder modelBuilder) { ModelElementTypeBuilder typeBuilder = modelBuilder.defineType(BusinessRuleTask.class, BPMN_ELEMENT_BUSINESS_RULE_TASK) .namespaceUri(BPMN20_NS) .extendsType(Task.class) .instanceProvider(new ModelTypeInstanceProvider<BusinessRuleTask>() { public BusinessRuleTask newInstance(ModelTypeInstanceContext instanceContext) { return new BusinessRuleTaskImpl(instanceContext); } }); implementationAttribute = typeBuilder.stringAttribute(BPMN_ATTRIBUTE_IMPLEMENTATION) .defaultValue("##unspecified") .build(); /** camunda extensions */ camundaClassAttribute = typeBuilder.stringAttribute(CAMUNDA_ATTRIBUTE_CLASS) .namespace(CAMUNDA_NS) .build(); camundaDelegateExpressionAttribute = typeBuilder.stringAttribute(CAMUNDA_ATTRIBUTE_DELEGATE_EXPRESSION) .namespace(CAMUNDA_NS) .build(); camundaExpressionAttribute = typeBuilder.stringAttribute(CAMUNDA_ATTRIBUTE_EXPRESSION) .namespace(CAMUNDA_NS) .build(); camundaResultVariableAttribute = typeBuilder.stringAttribute(CAMUNDA_ATTRIBUTE_RESULT_VARIABLE) .namespace(CAMUNDA_NS) .build(); camundaTypeAttribute = typeBuilder.stringAttribute(CAMUNDA_ATTRIBUTE_TYPE) .namespace(CAMUNDA_NS) .build(); camundaDecisionRefAttribute = typeBuilder.stringAttribute(CAMUNDA_ATTRIBUTE_DECISION_REF) .namespace(CAMUNDA_NS) .build(); camundaDecisionRefBindingAttribute = typeBuilder.stringAttribute(CAMUNDA_ATTRIBUTE_DECISION_REF_BINDING) .namespace(CAMUNDA_NS) .build(); camundaDecisionRefVersionAttribute = typeBuilder.stringAttribute(CAMUNDA_ATTRIBUTE_DECISION_REF_VERSION) .namespace(CAMUNDA_NS) .build(); typeBuilder.build(); } public BusinessRuleTaskImpl(ModelTypeInstanceContext context) { super(context); } public BusinessRuleTaskBuilder builder() { return new BusinessRuleTaskBuilder((BpmnModelInstance) modelInstance, this); } public String getImplementation() { return implementationAttribute.getValue(this); } public void setImplementation(String implementation) { implementationAttribute.setValue(this, implementation); } /** camunda extensions */ public String getCamundaClass() { return camundaClassAttribute.getValue(this); } public void setCamundaClass(String camundaClass) { camundaClassAttribute.setValue(this, camundaClass); } public String getCamundaDelegateExpression() { return camundaDelegateExpressionAttribute.getValue(this); } public void setCamundaDelegateExpression(String camundaExpression) { camundaDelegateExpressionAttribute.setValue(this, camundaExpression); } public String getCamundaExpression() { return camundaExpressionAttribute.getValue(this); } public void setCamundaExpression(String camundaExpression) { camundaExpressionAttribute.setValue(this, camundaExpression); } public String getCamundaResultVariable() { return camundaResultVariableAttribute.getValue(this); } public void setCamundaResultVariable(String camundaResultVariable) { camundaResultVariableAttribute.setValue(this, camundaResultVariable); } public String getCamundaType() { return camundaTypeAttribute.getValue(this); } public void setCamundaType(String camundaType) { camundaTypeAttribute.setValue(this, camundaType); } public String getCamundaDecisionRef() { return camundaDecisionRefAttribute.getValue(this); } public void setCamundaDecisionRef(String camundaDecisionRef) { camundaDecisionRefAttribute.setValue(this, camundaDecisionRef); } public String getCamundaDecisionRefBinding() { return camundaDecisionRefBindingAttribute.getValue(this); } public void setCamundaDecisionRefBinding(String camundaDecisionRefBinding) { camundaDecisionRefBindingAttribute.setValue(this, camundaDecisionRefBinding); } public String getCamundaDecisionRefVersion() { return camundaDecisionRefVersionAttribute.getValue(this); } public void setCamundaDecisionRefVersion(String camundaDecisionRefVersion) { camundaDecisionRefVersionAttribute.setValue(this, camundaDecisionRefVersion); } }
apache-2.0
ryoenji/libgdx
tests/gdx-tests/src/com/badlogic/gdx/tests/gles3/MultipleDrawBuffersTest.java
5996
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.tests.gles3; import java.nio.IntBuffer; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.GL30; import com.badlogic.gdx.graphics.GLTexture; import com.badlogic.gdx.graphics.Texture.TextureFilter; import com.badlogic.gdx.graphics.VertexAttributes.Usage; import com.badlogic.gdx.math.Matrix4; import com.badlogic.gdx.tests.utils.GdxTest; import com.badlogic.gdx.utils.BufferUtils; /** This test demonstrates rendering to multiple draw buffers (render targets) through the use of a frame buffer object (FBO). This * functionality is the key component of most deferred shading approaches. * <p> * The test first draws blue and red to 2 textures using a shader with 2 fragment outputs. This is done once, after which the * textures are used to fill in a quad and triangle shape, to show that the operation was successful. * @author mattijs driel */ public class MultipleDrawBuffersTest extends AbstractES3test { ShaderProgramES3 drawTexProgram; ShaderProgramES3 resultProgram; GenericTexture tex0; GenericTexture tex1; VBOGeometry quad; VBOGeometry tri; private final String vertexShader = "#version 300 es \n" + "layout(location = 0)in vec4 vPos; \n" + "void main() \n" + "{ \n" + " gl_Position = vPos; \n" + "} \n"; private final String drawTexturesShader = "#version 300 es \n" + "precision highp float; \n" + "layout(location = 0)out vec4 redOutput; \n" + "layout(location = 1)out vec4 blueOutput; \n" + "void main() \n" + "{ \n" + " redOutput = vec4(1,0,0,1); \n" + " blueOutput = vec4(0,0,1,1); \n" + "} \n"; private final String showResultShader = "#version 300 es \n" + "precision mediump float; \n" + "uniform sampler2D mytexture; \n" + "out vec4 fragColor; \n" + "void main() \n" + "{ \n" + " fragColor = texture2D(mytexture, gl_FragCoord.xy); \n" + "} \n"; @Override public boolean createLocal () { // create empty draw target textures TextureFormatES3 format = new TextureFormatES3(); format.width = format.height = 256; tex0 = new GenericTexture(format); tex1 = new GenericTexture(format); // load the shaders drawTexProgram = new ShaderProgramES3(vertexShader, drawTexturesShader); if (!drawTexProgram.isCompiled()) { System.out.println(drawTexProgram.getErrorLog()); return false; } resultProgram = new ShaderProgramES3(vertexShader, showResultShader); if (!resultProgram.isCompiled()) { System.out.println(resultProgram.getErrorLog()); return false; } resultProgram.registerTextureSampler("mytexture").setBinding(0); tri = VBOGeometry.triangle(Usage.Position); quad = VBOGeometry.quad(Usage.Position); Gdx.gl30.glClearColor(0, 0, 0, 0); return writeColorToTextures(); } boolean writeColorToTextures () { // generate and bind FBO FrameBufferObject fbo = new FrameBufferObject(GL30.GL_COLOR_ATTACHMENT0, GL30.GL_COLOR_ATTACHMENT1); // bind textures to current FBO fbo.bind(); tex0.setFBOBinding(GL30.GL_COLOR_ATTACHMENT0); tex1.setFBOBinding(GL30.GL_COLOR_ATTACHMENT1); VBOGeometry geom = VBOGeometry.fsQuad(Usage.Position); // render to texture drawTexProgram.use(); geom.bind(); geom.draw(); geom.dispose(); // bind back to normal buffer fbo.unbind(); fbo.dispose(); return true; } @Override public void renderLocal () { Gdx.gl30.glClear(GL30.GL_COLOR_BUFFER_BIT); // render results resultProgram.use(); tex0.bind(); tri.bind(); tri.draw(); tex1.bind(); quad.bind(); quad.draw(); } @Override protected void disposeLocal () { drawTexProgram.dispose(); resultProgram.dispose(); tex0.dispose(); tex1.dispose(); quad.dispose(); tri.dispose(); } }
apache-2.0
google/webauthndemo
src/main/java/com/google/webauthn/gaedemo/objects/PackedAttestationStatement.java
5011
// Copyright 2017 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.webauthn.gaedemo.objects; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; import com.googlecode.objectify.annotation.Subclass; import co.nstant.in.cbor.CborDecoder; import co.nstant.in.cbor.CborException; import co.nstant.in.cbor.model.Array; import co.nstant.in.cbor.model.ByteString; import co.nstant.in.cbor.model.DataItem; import co.nstant.in.cbor.model.Map; import co.nstant.in.cbor.model.NegativeInteger; import co.nstant.in.cbor.model.UnicodeString; @Subclass public class PackedAttestationStatement extends AttestationStatement { public byte[] sig; public byte[] attestnCert; public List<byte[]> caCert; public Algorithm alg; public byte[] ecdaaKeyId; /** * @param sig * @param attestnCert * @param caCert */ public PackedAttestationStatement(byte[] sig, byte[] attestnCert, List<byte[]> caCert, String alg) { super(); this.sig = sig; this.attestnCert = attestnCert; this.caCert = caCert; this.alg = Algorithm.decode(alg); this.ecdaaKeyId = null; } /** * @param sig * @param attestnCert * @param caCert */ public PackedAttestationStatement(byte[] sig, byte[] ecdaaKeyId, String alg) { super(); this.sig = sig; this.ecdaaKeyId = ecdaaKeyId; this.alg = Algorithm.decode(alg); this.caCert = null; this.attestnCert = null; } public PackedAttestationStatement() { this.sig = null; this.attestnCert = null; this.caCert = null; this.alg = null; this.ecdaaKeyId = null; } /** * @param attStmt * @return Decoded FidoU2fAttestationStatement */ public static PackedAttestationStatement decode(DataItem attStmt) { PackedAttestationStatement result = new PackedAttestationStatement(); Map given = null; if (attStmt instanceof ByteString) { byte[] temp = ((ByteString) attStmt).getBytes(); List<DataItem> dataItems = null; try { dataItems = CborDecoder.decode(temp); } catch (Exception e) { } given = (Map) dataItems.get(0); } else { given = (Map) attStmt; } for (DataItem data : given.getKeys()) { if (data instanceof UnicodeString) { switch (((UnicodeString) data).getString()) { case "x5c": Array array = (Array) given.get(data); List<DataItem> list = array.getDataItems(); if (list.size() > 0) { result.attestnCert = ((ByteString) list.get(0)).getBytes(); } result.caCert = new ArrayList<byte[]>(); for (int i = 1; i < list.size(); i++) { result.caCert.add(((ByteString) list.get(i)).getBytes()); } break; case "sig": result.sig = ((ByteString) (given.get(data))).getBytes(); break; case "alg": int algInt = new BigDecimal(((NegativeInteger) (given.get(data))).getValue()).intValueExact(); result.alg = Algorithm.decode(algInt); break; case "ecdaaKeyId": result.ecdaaKeyId = ((ByteString) (given.get(data))).getBytes(); break; } } } return result; } @Override DataItem encode() throws CborException { Map result = new Map(); if (attestnCert != null) { Array x5c = new Array(); x5c.add(new ByteString(attestnCert)); for (byte[] cert : this.caCert) { x5c.add(new ByteString(cert)); } result.put(new UnicodeString("x5c"), x5c); } if (ecdaaKeyId != null) { result.put(new UnicodeString("ecdaaKeyId"), new ByteString(ecdaaKeyId)); } result.put(new UnicodeString("sig"), new ByteString(sig)); result.put(new UnicodeString("alg"), new UnicodeString(alg.toString())); return result; } @Override public int hashCode() { return Objects.hash(Arrays.hashCode(sig), Arrays.hashCode(attestnCert), caCert, alg, Arrays.hashCode(ecdaaKeyId)); } @Override public boolean equals(Object obj) { if (!(obj instanceof PackedAttestationStatement)) { return false; } PackedAttestationStatement other = (PackedAttestationStatement) obj; try { return encode().equals(other.encode()); } catch (CborException e) { } return false; } @Override public String getName() { return "Packed Attestation"; } }
apache-2.0
chris115379/Recipr
data/src/main/java/de/androidbytes/recipr/data/repository/data/StepDataRepository.java
1907
package de.androidbytes.recipr.data.repository.data; import android.content.ContentResolver; import android.net.Uri; import de.androidbytes.recipr.data.entity.StepEntity; import de.androidbytes.recipr.data.exception.DatabaseException; import de.androidbytes.recipr.data.provider.step.StepColumns; import de.androidbytes.recipr.data.provider.step.StepContentValues; import de.androidbytes.recipr.data.provider.step.StepCursor; import de.androidbytes.recipr.data.provider.step.StepSelection; import javax.inject.Inject; import java.util.ArrayList; import java.util.List; public class StepDataRepository { private ContentResolver contentResolver; @Inject public StepDataRepository(ContentResolver contentResolver) { this.contentResolver = contentResolver; } public StepEntity createStep(StepEntity stepEntity) { StepContentValues contentValues = stepEntity.getContentValues(); Uri createdStepUri = contentResolver.insert(StepColumns.CONTENT_URI, contentValues.values()); if (createdStepUri != null) { StepCursor stepCursor = new StepCursor(contentResolver.query(createdStepUri, null, null, null, null)); if (stepCursor.moveToFirst()) return new StepEntity(stepCursor); } throw new DatabaseException("Could not create step with number '" + stepEntity.getNumber() + "'"); } public List<StepEntity> findStepsOfRecipe(long recipeId) { StepSelection stepSelection = new StepSelection(); stepSelection.recipeId(recipeId); StepCursor stepCursor = stepSelection.query(contentResolver); List<StepEntity> stepEntities = new ArrayList<>(stepCursor.getCount()); while (stepCursor.moveToNext()) { stepEntities.add(new StepEntity(stepCursor)); } return stepEntities; } }
apache-2.0
joewalnes/idea-community
java/idea-ui/src/com/intellij/peer/impl/PeerFactoryImpl.java
9923
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.peer.impl; import com.intellij.ide.ui.SplitterProportionsDataImpl; import com.intellij.ide.util.PackageChooserDialog; import com.intellij.lang.ASTNode; import com.intellij.lang.Language; import com.intellij.lang.PsiBuilder; import com.intellij.lang.PsiBuilderFactory; import com.intellij.lexer.Lexer; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diff.DiffRequestFactory; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.highlighter.EditorHighlighter; import com.intellij.openapi.editor.highlighter.EditorHighlighterFactory; import com.intellij.openapi.fileChooser.FileSystemTreeFactory; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.fileTypes.SyntaxHighlighter; import com.intellij.openapi.module.ModuleConfigurationEditor; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.SdkType; import com.intellij.openapi.projectRoots.impl.ProjectJdkImpl; import com.intellij.openapi.roots.ui.configuration.JavaContentEntriesEditor; import com.intellij.openapi.roots.ui.configuration.ModuleConfigurationState; import com.intellij.openapi.ui.DialogWrapperPeerFactory; import com.intellij.openapi.ui.PackageChooser; import com.intellij.openapi.ui.SplitterProportionsData; import com.intellij.openapi.vcs.FileStatusFactory; import com.intellij.openapi.vcs.actions.VcsContextFactory; import com.intellij.peer.PeerFactory; import com.intellij.psi.*; import com.intellij.psi.search.scope.packageSet.PackageSetFactory; import com.intellij.psi.util.PsiFormatUtil; import com.intellij.ui.*; import com.intellij.ui.TextComponent; import com.intellij.ui.content.ContentFactory; import com.intellij.ui.errorView.ErrorViewFactory; import com.intellij.ui.treeStructure.treetable.TreeTable; import com.intellij.util.EditSourceOnDoubleClickHandler; import com.intellij.util.EditSourceOnEnterKeyHandler; import com.intellij.util.Function; import com.intellij.util.containers.Convertor; import com.intellij.util.ui.Table; import com.intellij.util.ui.UIUtil; import org.apache.xmlrpc.IdeaAwareWebServer; import org.apache.xmlrpc.IdeaAwareXmlRpcServer; import org.apache.xmlrpc.WebServer; import org.apache.xmlrpc.XmlRpcServer; import javax.swing.*; import javax.swing.table.TableCellRenderer; import javax.swing.tree.TreeCellRenderer; import javax.swing.tree.TreePath; import java.awt.*; import java.net.InetAddress; public class PeerFactoryImpl extends PeerFactory { private final UIHelper myUIHelper = new MyUIHelper(); public FileStatusFactory getFileStatusFactory() { return ServiceManager.getService(FileStatusFactory.class); } public DialogWrapperPeerFactory getDialogWrapperPeerFactory() { return DialogWrapperPeerFactory.getInstance(); } public PackageSetFactory getPackageSetFactory() { return PackageSetFactory.getInstance(); } public UIHelper getUIHelper() { return myUIHelper; } public ErrorViewFactory getErrorViewFactory() { return ErrorViewFactory.SERVICE.getInstance(); } public ContentFactory getContentFactory() { return ServiceManager.getService(ContentFactory.class); } public FileSystemTreeFactory getFileSystemTreeFactory() { return FileSystemTreeFactory.SERVICE.getInstance(); } public DiffRequestFactory getDiffRequestFactory() { return DiffRequestFactory.getInstance(); } private static class MyUIHelper implements UIHelper { public void installToolTipHandler(JTree tree) { TreeUIHelper.getInstance().installToolTipHandler(tree); } public void installToolTipHandler(JTable table) { TreeUIHelper.getInstance().installToolTipHandler(table); } public void installEditSourceOnDoubleClick(JTree tree) { EditSourceOnDoubleClickHandler.install(tree); } public void installEditSourceOnDoubleClick(TreeTable tree) { EditSourceOnDoubleClickHandler.install(tree); } public void installEditSourceOnDoubleClick(Table table) { EditSourceOnDoubleClickHandler.install(table); } public void installTreeTableSpeedSearch(TreeTable treeTable) { new TreeTableSpeedSearch(treeTable); } public void installTreeTableSpeedSearch(final TreeTable treeTable, final Convertor<TreePath, String> convertor) { new TreeTableSpeedSearch(treeTable, convertor); } public void installTreeSpeedSearch(JTree tree) { new TreeSpeedSearch(tree); } public void installTreeSpeedSearch(final JTree tree, final Convertor<TreePath, String> convertor) { new TreeSpeedSearch(tree, convertor); } public void installListSpeedSearch(JList list) { new ListSpeedSearch(list); } public void installListSpeedSearch(final JList list, final Function<Object, String> elementTextDelegate) { new ListSpeedSearch(list, elementTextDelegate); } public void installEditSourceOnEnterKeyHandler(JTree tree) { EditSourceOnEnterKeyHandler.install(tree); } public SplitterProportionsData createSplitterProportionsData() { return new SplitterProportionsDataImpl(); } public TableCellRenderer createPsiElementRenderer(final PsiElement psiElement, final Project project) { return new ColoredTableCellRenderer() { protected void customizeCellRenderer(JTable table, Object value, boolean selected, boolean hasFocus, int row, int column) { append(getPsiElementText(psiElement), SimpleTextAttributes.REGULAR_ATTRIBUTES); setIcon(psiElement.getIcon(0)); } }; } public TreeCellRenderer createHighlightableTreeCellRenderer() { return new HighlightableCellRenderer(); } public void drawDottedRectangle(Graphics g, int x, int y, int i, int i1) { UIUtil.drawDottedRectangle(g,x,y,i,i1); } public void installSmartExpander(JTree tree) { SmartExpander.installOn(tree); } public void installSelectionSaver(JTree tree) { SelectionSaver.installOn(tree); } public TextComponent createTypedTextField(final String text, PsiType type, PsiElement context, final Project project) { final PsiExpressionCodeFragment fragment = JavaPsiFacade.getInstance(project).getElementFactory().createExpressionCodeFragment(text, context, type, true); final Document document = PsiDocumentManager.getInstance(project).getDocument(fragment); return new EditorTextField(document, project, StdFileTypes.JAVA); } public PackageChooser createPackageChooser(String title, Project project) { return new PackageChooserDialog(title, project); } private static String getPsiElementText(PsiElement psiElement) { if (psiElement instanceof PsiClass) { return PsiFormatUtil.formatClass((PsiClass)psiElement, PsiFormatUtil.SHOW_NAME | PsiFormatUtil.SHOW_FQ_NAME); } else if (psiElement instanceof PsiMethod) { return PsiFormatUtil.formatMethod((PsiMethod)psiElement, PsiSubstitutor.EMPTY, PsiFormatUtil.SHOW_NAME | PsiFormatUtil.SHOW_PARAMETERS | PsiFormatUtil.SHOW_CONTAINING_CLASS, 0); } else if (psiElement instanceof PsiField) { return PsiFormatUtil.formatVariable((PsiField)psiElement, PsiFormatUtil.SHOW_NAME | PsiFormatUtil.SHOW_TYPE | PsiFormatUtil.SHOW_CONTAINING_CLASS, PsiSubstitutor.EMPTY); } else { return psiElement.toString(); } } } public VcsContextFactory getVcsContextFactory() { return VcsContextFactory.SERVICE.getInstance(); } public PsiBuilder createBuilder(ASTNode tree, Language lang, CharSequence seq, final Project project) { return PsiBuilderFactory.getInstance().createBuilder(project, tree, lang, seq); } public PsiBuilder createBuilder(final ASTNode tree, final Lexer lexer, final Language lang, final CharSequence seq, final Project project) { return PsiBuilderFactory.getInstance().createBuilder(project, tree, lexer, lang, seq); } public XmlRpcServer createRpcServer() { return new IdeaAwareXmlRpcServer(); } public WebServer createWebServer(final int port, final InetAddress addr, final XmlRpcServer xmlrpc) { return new IdeaAwareWebServer(port, addr, xmlrpc); } public EditorHighlighter createEditorHighlighter(final SyntaxHighlighter syntaxHighlighter, final EditorColorsScheme colors) { return EditorHighlighterFactory.getInstance().createEditorHighlighter(syntaxHighlighter, colors); } public Sdk createProjectJdk(final String name, final String version, final String homePath, final SdkType sdkType) { final ProjectJdkImpl projectJdk = new ProjectJdkImpl(name, sdkType); projectJdk.setHomePath(homePath); projectJdk.setVersionString(version); return projectJdk; } public ModuleConfigurationEditor createModuleConfigurationEditor(final String moduleName, ModuleConfigurationState state) { return new JavaContentEntriesEditor(moduleName, state); } }
apache-2.0
magnoliales/magnolia-annotated-content-apps
magnolia-annotated-content-apps/src/main/java/com/magnoliales/annotatedapp/actions/ExportActionDefinitions.java
2365
package com.magnoliales.annotatedapp.actions; import info.magnolia.ui.api.action.ActionDefinition; import info.magnolia.ui.api.availability.AvailabilityDefinition; import info.magnolia.ui.api.availability.ConfiguredAvailabilityDefinition; import info.magnolia.ui.framework.action.ExportActionDefinition; import info.magnolia.ui.framework.action.OpenCreateDialogActionDefinition; import org.apache.commons.lang.WordUtils; import java.util.ArrayList; import java.util.List; public class ExportActionDefinitions extends AnnotatedActionDefinitions { @Override protected List<ActionDefinitionGroup> createActionDefinitionGroups() { List<ActionDefinitionGroup> groups = new ArrayList<>(); groups.add(new ActionDefinitionGroup("export", getExportActionDefinition(), getImportActionDefinition() )); return groups; } protected ActionDefinition getExportActionDefinition() { ExportActionDefinition actionDefinition = new ExportActionDefinition(); String actionName = "export" + WordUtils.capitalize(getAppName()); actionDefinition.setName(actionName); actionDefinition.setCommand("export"); actionDefinition.setIcon("icon-export"); actionDefinition.setAvailability(getExportActionAvailabilityDefinition()); return actionDefinition; } protected AvailabilityDefinition getExportActionAvailabilityDefinition() { ConfiguredAvailabilityDefinition availabilityDefinition = new ConfiguredAvailabilityDefinition(); availabilityDefinition.setRoot(true); availabilityDefinition.setNodes(true); return availabilityDefinition; } protected ActionDefinition getImportActionDefinition() { OpenCreateDialogActionDefinition actionDefinition = new OpenCreateDialogActionDefinition(); String actionName = "import" + WordUtils.capitalize(getAppName()); actionDefinition.setName(actionName); actionDefinition.setDialogName("ui-admincentral:import"); actionDefinition.setIcon("icon-import"); actionDefinition.setAvailability(getImportActionAvailabilityDefinition()); return actionDefinition; } protected AvailabilityDefinition getImportActionAvailabilityDefinition() { return getExportActionAvailabilityDefinition(); } }
apache-2.0
jkrasnay/panelized
src/main/java/ca/krasnay/panelized/AjaxAction.java
438
package ca.krasnay.panelized; import java.io.Serializable; import org.apache.wicket.ajax.AjaxRequestTarget; /** * Action invoked by AJAX. * * @author <a href="mailto:john@krasnay.ca">John Krasnay</a> */ public interface AjaxAction extends Serializable { /** * Invokes the action. * * @param target * AjaxRequestTarget for the action. */ public void invoke(AjaxRequestTarget target); }
apache-2.0
SpannaProject/SpannaAPI
src/main/java/org/spanna/block/BrewingStand.java
500
package org.spanna.block; import org.spanna.inventory.BrewerInventory; /** * Represents a brewing stand. */ public interface BrewingStand extends BlockState, ContainerBlock { /** * How much time is left in the brewing cycle * * @return Brew Time */ int getBrewingTime(); /** * Set the time left before brewing completes. * * @param brewTime Brewing time */ void setBrewingTime(int brewTime); public BrewerInventory getInventory(); }
apache-2.0
daedafusion/aniketos
aniketos-core/src/main/java/com/daedafusion/aniketos/services/admin/IdentityAdminService.java
11589
package com.daedafusion.aniketos.services.admin; import com.daedafusion.aniketos.exceptions.NotFoundException; import com.daedafusion.aniketos.exceptions.ServiceErrorException; import com.daedafusion.sf.ServiceFramework; import com.daedafusion.sf.ServiceFrameworkException; import com.daedafusion.sf.ServiceFrameworkFactory; import com.daedafusion.security.admin.IdentityAdmin; import com.daedafusion.security.authentication.Subject; import com.daedafusion.security.authentication.Token; import com.daedafusion.security.authentication.TokenExchange; import com.daedafusion.security.common.Capability; import com.daedafusion.security.common.Identity; import com.daedafusion.security.exceptions.*; import org.apache.log4j.Logger; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import java.util.List; /** * Created by mphilpot on 7/19/14. */ @Path("admin/identity") public class IdentityAdminService { private static final Logger log = Logger.getLogger(IdentityAdminService.class); @GET @Path("{domain}") @Produces(MediaType.APPLICATION_JSON) public List<Identity> listIdentitiesForDomain(@HeaderParam("authorization") String tokenString, @PathParam("domain") String domain) { try { ServiceFramework framework = ServiceFrameworkFactory.getInstance().getFramework(); TokenExchange tokenExchange = framework.getService(TokenExchange.class); IdentityAdmin admin = framework.getService(IdentityAdmin.class); Token token = tokenExchange.getToken(tokenString); Subject subject = tokenExchange.exchange(token); return admin.listIdentitiesForDomain(subject, domain); } catch (ServiceFrameworkException e) { log.error("", e); throw new ServiceErrorException(); } catch (UnauthorizedException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.UnauthorizedException(); } catch (InvalidTokenException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.BadRequestException("Invalid Token"); } } @POST @Path("{domain}") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Identity createIdentity(@HeaderParam("authorization") String tokenString, @PathParam("domain") String domain, Identity identity) { try { ServiceFramework framework = ServiceFrameworkFactory.getInstance().getFramework(); TokenExchange tokenExchange = framework.getService(TokenExchange.class); IdentityAdmin admin = framework.getService(IdentityAdmin.class); Token token = tokenExchange.getToken(tokenString); Subject subject = tokenExchange.exchange(token); return admin.createIdentity(subject, identity); } catch (ServiceFrameworkException e) { log.error("", e); throw new ServiceErrorException(); } catch (UnauthorizedException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.UnauthorizedException(); } catch (InvalidTokenException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.BadRequestException("Invalid Token"); } } @PUT @Path("{domain}") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Identity updateIdentity(@HeaderParam("authorization") String tokenString, @PathParam("domain") String domain, Identity identity) { try { ServiceFramework framework = ServiceFrameworkFactory.getInstance().getFramework(); TokenExchange tokenExchange = framework.getService(TokenExchange.class); IdentityAdmin admin = framework.getService(IdentityAdmin.class); Token token = tokenExchange.getToken(tokenString); Subject subject = tokenExchange.exchange(token); return admin.updateIdentity(subject, identity); } catch (ServiceFrameworkException e) { log.error("", e); throw new ServiceErrorException(); } catch (UnauthorizedException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.UnauthorizedException(); } catch (InvalidTokenException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.BadRequestException("Invalid Token"); } catch (com.daedafusion.security.exceptions.NotFoundException e) { log.error("", e); throw new NotFoundException(e.getMessage()); } } @DELETE @Path("{domain}/{user}") public void removeIdentity(@HeaderParam("authorization") String tokenString, @PathParam("domain") String domain, @PathParam("user") String user) { try { ServiceFramework framework = ServiceFrameworkFactory.getInstance().getFramework(); TokenExchange tokenExchange = framework.getService(TokenExchange.class); IdentityAdmin admin = framework.getService(IdentityAdmin.class); Token token = tokenExchange.getToken(tokenString); Subject subject = tokenExchange.exchange(token); admin.removeIdentity(subject, user, domain); } catch (ServiceFrameworkException e) { log.error("", e); throw new ServiceErrorException(); } catch (UnauthorizedException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.UnauthorizedException(); } catch (InvalidTokenException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.BadRequestException("Invalid Token"); } catch (com.daedafusion.security.exceptions.NotFoundException e) { log.error("", e); throw new NotFoundException(e.getMessage()); } } @GET @Path("capabilities") @Produces(MediaType.APPLICATION_JSON) public List<Capability> getCapabilities(@HeaderParam("authorization") String tokenString) { try { ServiceFramework framework = ServiceFrameworkFactory.getInstance().getFramework(); TokenExchange tokenExchange = framework.getService(TokenExchange.class); IdentityAdmin admin = framework.getService(IdentityAdmin.class); Token token = tokenExchange.getToken(tokenString); Subject subject = tokenExchange.exchange(token); return admin.listCapabilities(subject); } catch (ServiceFrameworkException e) { log.error("", e); throw new ServiceErrorException(); } catch (UnauthorizedException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.UnauthorizedException(); } catch (InvalidTokenException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.BadRequestException("Invalid Token"); } } @POST @Path("capabilities") @Consumes(MediaType.APPLICATION_JSON) public void addCapability(@HeaderParam("authorization") String tokenString, Capability capability) { try { ServiceFramework framework = ServiceFrameworkFactory.getInstance().getFramework(); TokenExchange tokenExchange = framework.getService(TokenExchange.class); IdentityAdmin admin = framework.getService(IdentityAdmin.class); Token token = tokenExchange.getToken(tokenString); Subject subject = tokenExchange.exchange(token); admin.addCapability(subject, capability); } catch (ServiceFrameworkException e) { log.error("", e); throw new ServiceErrorException(); } catch (UnauthorizedException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.UnauthorizedException(); } catch (InvalidTokenException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.BadRequestException("Invalid Token"); } } @PUT @Path("capabilities") @Consumes(MediaType.APPLICATION_JSON) public void updateCapability(@HeaderParam("authorization") String tokenString, Capability capability) { try { ServiceFramework framework = ServiceFrameworkFactory.getInstance().getFramework(); TokenExchange tokenExchange = framework.getService(TokenExchange.class); IdentityAdmin admin = framework.getService(IdentityAdmin.class); Token token = tokenExchange.getToken(tokenString); Subject subject = tokenExchange.exchange(token); admin.updateCapability(subject, capability); } catch (ServiceFrameworkException e) { log.error("", e); throw new ServiceErrorException(); } catch (UnauthorizedException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.UnauthorizedException(); } catch (InvalidTokenException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.BadRequestException("Invalid Token"); } catch (com.daedafusion.security.exceptions.NotFoundException e) { log.error("", e); throw new NotFoundException(e.getMessage()); } } @DELETE @Path("capabilities") public void deleteCapability(@HeaderParam("authorization") String tokenString, @HeaderParam("x-identity-capability") String capability) { try { ServiceFramework framework = ServiceFrameworkFactory.getInstance().getFramework(); TokenExchange tokenExchange = framework.getService(TokenExchange.class); IdentityAdmin admin = framework.getService(IdentityAdmin.class); Token token = tokenExchange.getToken(tokenString); Subject subject = tokenExchange.exchange(token); admin.deleteCapability(subject, capability); } catch (ServiceFrameworkException e) { log.error("", e); throw new ServiceErrorException(); } catch (UnauthorizedException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.UnauthorizedException(); } catch (InvalidTokenException e) { log.error("", e); throw new com.daedafusion.aniketos.exceptions.BadRequestException("Invalid Token"); } catch (com.daedafusion.security.exceptions.NotFoundException e) { log.error("", e); throw new NotFoundException(e.getMessage()); } } }
apache-2.0
howepeng/isis
core/viewer-wicket-ui/src/main/java/org/apache/isis/viewer/wicket/ui/panels/PanelAbstract.java
6931
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.viewer.wicket.ui.panels; import org.apache.wicket.Component; import org.apache.wicket.Session; import org.apache.wicket.markup.html.IHeaderContributor; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.IModel; import org.apache.isis.core.commons.authentication.AuthenticationSession; import org.apache.isis.core.commons.authentication.AuthenticationSessionProvider; import org.apache.isis.core.commons.authentication.AuthenticationSessionProviderAware; import org.apache.isis.core.metamodel.deployment.DeploymentCategory; import org.apache.isis.core.metamodel.deployment.DeploymentCategoryProvider; import org.apache.isis.core.metamodel.deployment.DeploymentCategoryAware; import org.apache.isis.core.runtime.system.context.IsisContext; import org.apache.isis.core.runtime.system.persistence.PersistenceSession; import org.apache.isis.viewer.wicket.model.hints.UiHintContainer; import org.apache.isis.viewer.wicket.model.isis.PersistenceSessionProvider; import org.apache.isis.viewer.wicket.ui.ComponentType; import org.apache.isis.viewer.wicket.ui.app.registry.ComponentFactoryRegistry; import org.apache.isis.viewer.wicket.ui.app.registry.ComponentFactoryRegistryAccessor; import org.apache.isis.viewer.wicket.ui.util.Components; /** * Convenience adapter for {@link Panel}s built up using {@link ComponentType}s. */ // TODO mgrigorov: extend GenericPanel and make T the type of the model object, not the model public abstract class PanelAbstract<T extends IModel<?>> extends Panel implements IHeaderContributor, PersistenceSessionProvider, AuthenticationSessionProvider, DeploymentCategoryProvider { private static final long serialVersionUID = 1L; private ComponentType componentType; public PanelAbstract(final ComponentType componentType) { this(componentType, null); } public PanelAbstract(final String id) { this(id, null); } public PanelAbstract(final ComponentType componentType, final T model) { this(componentType.getWicketId(), model); } public PanelAbstract(final String id, final T model) { super(id, model); this.componentType = ComponentType.lookup(id); } /** * Will be null if created using {@link #PanelAbstract(String, IModel)}. */ public ComponentType getComponentType() { return componentType; } @SuppressWarnings("unchecked") public T getModel() { return (T) getDefaultModel(); } /** * For subclasses * * @return */ protected Component addOrReplace(final ComponentType componentType, final IModel<?> model) { return getComponentFactoryRegistry().addOrReplaceComponent(this, componentType, model); } /** * For subclasses */ protected void permanentlyHide(final ComponentType... componentIds) { Components.permanentlyHide(this, componentIds); } /** * For subclasses */ public void permanentlyHide(final String... ids) { Components.permanentlyHide(this, ids); } // /////////////////////////////////////////////////////////////////// // Hint support // /////////////////////////////////////////////////////////////////// public UiHintContainer getUiHintContainer() { return UiHintContainer.Util.hintContainerOf(this); } public <T extends UiHintContainer> T getUiHintContainer(final Class<T> additionalConstraint) { return UiHintContainer.Util.hintContainerOf(this, additionalConstraint); } // /////////////////////////////////////////////////////////////////// // Convenience // /////////////////////////////////////////////////////////////////// /** * The underlying {@link AuthenticationSession Isis session} wrapped in the * {@link #getWebSession() Wicket session}. * * @return */ @Override public AuthenticationSession getAuthenticationSession() { final AuthenticationSessionProvider asa = (AuthenticationSessionProvider) Session.get(); return asa.getAuthenticationSession(); } @Override public DeploymentCategory getDeploymentCategory() { return IsisContext.getDeploymentType().getDeploymentCategory(); } // /////////////////////////////////////////////////////////////////// // Dependencies (from IsisContext) // /////////////////////////////////////////////////////////////////// @Override public PersistenceSession getPersistenceSession() { return IsisContext.getPersistenceSession(); } // ///////////////////////////////////////////////// // Dependency Injection // ///////////////////////////////////////////////// protected ComponentFactoryRegistry getComponentFactoryRegistry() { return ((ComponentFactoryRegistryAccessor) getApplication()).getComponentFactoryRegistry(); } // ///////////////////////////////////////////////// // *Provider impl. // ///////////////////////////////////////////////// @Override public void injectInto(final Object candidate) { if (AuthenticationSessionProviderAware.class.isAssignableFrom(candidate.getClass())) { final AuthenticationSessionProviderAware cast = AuthenticationSessionProviderAware.class.cast(candidate); cast.setAuthenticationSessionProvider(this); } if (DeploymentCategoryAware.class.isAssignableFrom(candidate.getClass())) { final DeploymentCategoryAware cast = DeploymentCategoryAware.class.cast(candidate); cast.setDeploymentCategory(this.getDeploymentCategory()); } } /** * Helper method that looks up a domain service by type * * @param serviceClass The class of the domain service to lookup * @param <S> The type of the domain service to lookup * @return The found domain service */ protected <S> S lookupService(final Class<S> serviceClass) { return getPersistenceSession().getServicesInjector().lookupService(serviceClass); } }
apache-2.0
allanbank/mongodb-async-driver
src/main/java/com/allanbank/mongodb/bson/element/DocumentElement.java
25331
/* * #%L * DocumentElement.java - mongodb-async-driver - Allanbank Consulting, Inc. * %% * Copyright (C) 2011 - 2014 Allanbank Consulting, Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.allanbank.mongodb.bson.element; import static com.allanbank.mongodb.util.Assertions.assertNotNull; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; import javax.annotation.concurrent.Immutable; import javax.annotation.concurrent.ThreadSafe; import com.allanbank.mongodb.bson.Document; import com.allanbank.mongodb.bson.DocumentReference; import com.allanbank.mongodb.bson.Element; import com.allanbank.mongodb.bson.ElementType; import com.allanbank.mongodb.bson.Visitor; import com.allanbank.mongodb.bson.builder.BuilderFactory; import com.allanbank.mongodb.bson.impl.EmptyDocument; import com.allanbank.mongodb.bson.impl.RootDocument; import com.allanbank.mongodb.bson.io.StringEncoder; import com.allanbank.mongodb.util.PatternUtils; /** * Wraps a single BSON document that may contain nested documents. * * @api.yes This class is part of the driver's API. Public and protected members * will be deprecated for at least 1 non-bugfix release (version * numbers are &lt;major&gt;.&lt;minor&gt;.&lt;bugfix&gt;) before being * removed or modified. * @copyright 2011-2013, Allanbank Consulting, Inc., All Rights Reserved */ @Immutable @ThreadSafe public class DocumentElement extends AbstractElement implements Document { /** * The {@link DocumentElement}'s class to avoid the * {@link Class#forName(String) Class.forName(...)} overhead. */ public static final Class<DocumentElement> DOCUMENT_CLASS = DocumentElement.class; /** The empty list of elements. */ public static final List<Element> EMPTY_ELEMENTS = Collections.emptyList(); /** The BSON type for a document. */ public static final ElementType TYPE = ElementType.DOCUMENT; /** Serialization version for the class. */ private static final long serialVersionUID = -564259598403040796L; /** * Computes and returns the number of bytes that are used to encode the * element. * * @param name * The name for the BSON array. * @param entries * The entries in the array. * @return The size of the element when encoded in bytes. */ private static long computeSize(final String name, final Collection<Element> entries) { long result = 7; // type (1) + name null byte (1) + int length (4) + // element null byte (1). result += StringEncoder.utf8Size(name); if ((entries != null) && !entries.isEmpty()) { for (final Element element : entries) { result += element.size(); } } return result; } /** * Computes and returns the number of bytes that are used to encode the * element. * * @param name * The name for the BSON array. * @param documentSize * The size of the document used to construct the element. * @return The size of the element when encoded in bytes. */ private static long computeSize(final String name, final long documentSize) { long result = 2; // type (1) + name null byte (1) result += StringEncoder.utf8Size(name); result += documentSize; return result; } /** * Constructed when a user tries to access the elements of the document by * name. */ private Map<String, Element> myElementMap; /** The elements of the document. */ private final List<Element> myElements; /** * Constructs a new {@link DocumentElement}. * * @param name * The name for the BSON document. * @param elements * The sub-elements for the document. * @throws IllegalArgumentException * If the {@code name} is <code>null</code>. */ public DocumentElement(final String name, final Collection<Element> elements) { this(name, (elements != null) ? new ArrayList<Element>(elements) : EMPTY_ELEMENTS, true); } /** * Constructs a new {@link DocumentElement}. * * @param name * The name for the BSON document. * @param value * The document to copy elements from. * @throws IllegalArgumentException * If the {@code name} or {@code value} is <code>null</code>. */ public DocumentElement(final String name, final Document value) { this(name, (value == null) ? EMPTY_ELEMENTS : value.getElements(), true, computeSize(name, (value == null) ? EmptyDocument.SIZE : value.size())); assertNotNull(value, "Document element's sub-document cannot be null."); } /** * Constructs a new {@link DocumentElement} with a single sub-document * element. * * @param name * The name for the BSON document. * @param value * The document to copy elements from. * @throws IllegalArgumentException * If the {@code name} or {@code value} is <code>null</code>. */ public DocumentElement(final String name, final DocumentElement value) { this(name, (value != null) ? Collections.singletonList((Element) value) : EMPTY_ELEMENTS, true); assertNotNull(value, "Document element's sub-document cannot be null."); } /** * Constructs a new {@link DocumentElement}. * * @param name * The name for the BSON document. * @param elements * The sub-elements for the document. * @throws IllegalArgumentException * If the {@code name} is <code>null</code>. */ public DocumentElement(final String name, final Element... elements) { this(name, Arrays.asList(elements)); } /** * Constructs a new {@link DocumentElement}. * * @param name * The name for the BSON document. * @param elements * The sub-elements for the document. * @throws IllegalArgumentException * If the {@code name} is <code>null</code>. */ public DocumentElement(final String name, final List<Element> elements) { this(name, elements, false); } /** * Constructs a new {@link DocumentElement}. * * @param name * The name for the BSON document. * @param elements * The sub-elements for the document. * @param takeOwnership * If true this element takes ownership of the list to avoid a * copy of the list. */ public DocumentElement(final String name, final List<Element> elements, final boolean takeOwnership) { this(name, elements, takeOwnership, computeSize(name, elements)); } /** * Constructs a new {@link DocumentElement}. * * @param name * The name for the BSON document. * @param elements * The sub-elements for the document. * @param takeOwnership * If true this element takes ownership of the list to avoid a * copy of the list. * @param size * The size of the element when encoded in bytes. If not known * then use the * {@link DocumentElement#DocumentElement(String, List, boolean)} * constructor instead. */ public DocumentElement(final String name, final List<Element> elements, final boolean takeOwnership, final long size) { super(name, size); if ((elements != null) && !elements.isEmpty()) { if (takeOwnership) { myElements = Collections.unmodifiableList(elements); } else { myElements = Collections .unmodifiableList(new ArrayList<Element>(elements)); } } else { myElements = EMPTY_ELEMENTS; } } /** * Accepts the visitor and calls the {@link Visitor#visitDocument} method. * * @see Element#accept(Visitor) */ @Override public void accept(final Visitor visitor) { if (visitor instanceof SizeAwareVisitor) { ((SizeAwareVisitor) visitor).visitDocument(getName(), getElements(), size()); } else { visitor.visitDocument(getName(), getElements()); } } /** * {@inheritDoc} * <p> * Returns this element. * </p> */ @Override public Document asDocument() { return this; } /** * Returns this sub-document as a {@link DocumentReference} if it conforms * to the MongoDB DBRef convention. Returns <code>null</code> otherwise. * <p> * A DocumentReference contains (order matters): * <ol> * <li>The name of the collection where the referenced document resides: * {@code $ref}.</li> * <li>The value of the _id field in the referenced document: {@code $id}.</li> * <li>The name of the database where the referenced document resides: * {@code $db} (Optional).</li> * </ol> * * @return This sub-document as a {@link DocumentReference} if it conforms * to the MongoDB DBRef convention. Returns <code>null</code> * otherwise. * * @see #isDocumentReference() * @see <a * href="http://docs.mongodb.org/manual/applications/database-references/#dbref">MongoDB * DBRef Information</a> */ public DocumentReference asDocumentReference() { final int elementCount = myElements.size(); if (elementCount == 2) { final Element element1 = myElements.get(0); final Element element2 = myElements.get(1); final String element1Name = element1.getName(); final ElementType element1Type = element1.getType(); final String element2Name = element2.getName(); if (DocumentReference.COLLECTION_FIELD_NAME.equals(element1Name) && DocumentReference.ID_FIELD_NAME.equals(element2Name)) { if (element1Type == ElementType.STRING) { return new DocumentReference( ((StringElement) element1).getValue(), element2); } else if (element1Type == ElementType.SYMBOL) { return new DocumentReference( ((SymbolElement) element1).getSymbol(), element2); } } } else if (myElements.size() == 3) { final Element element1 = myElements.get(0); final Element element2 = myElements.get(1); final Element element3 = myElements.get(2); final String element1Name = element1.getName(); final ElementType element1Type = element1.getType(); final String element2Name = element2.getName(); final String element3Name = element3.getName(); final ElementType element3Type = element3.getType(); if (DocumentReference.COLLECTION_FIELD_NAME.equals(element1Name) && DocumentReference.ID_FIELD_NAME.equals(element2Name) && DocumentReference.DATABASE_FIELD_NAME .equals(element3Name)) { if (element1Type == ElementType.STRING) { if (element3Type == ElementType.STRING) { return new DocumentReference( ((StringElement) element3).getValue(), ((StringElement) element1).getValue(), element2); } else if (element3Type == ElementType.SYMBOL) { return new DocumentReference( ((SymbolElement) element3).getSymbol(), ((StringElement) element1).getValue(), element2); } } else if (element1Type == ElementType.SYMBOL) { if (element3Type == ElementType.STRING) { return new DocumentReference( ((StringElement) element3).getValue(), ((SymbolElement) element1).getSymbol(), element2); } else if (element3Type == ElementType.SYMBOL) { return new DocumentReference( ((SymbolElement) element3).getSymbol(), ((SymbolElement) element1).getSymbol(), element2); } } } } return null; } /** * {@inheritDoc} * <p> * Overridden to compare the elements of the document if the base class * comparison is equals. * </p> */ @Override public int compareTo(final Element otherElement) { int result = super.compareTo(otherElement); if (result == 0) { final DocumentElement other = (DocumentElement) otherElement; final int length = Math.min(myElements.size(), other.myElements.size()); for (int i = 0; i < length; ++i) { result = myElements.get(i).compareTo(other.myElements.get(i)); if (result != 0) { return result; } } result = myElements.size() - other.myElements.size(); } return result; } /** * Returns true if the document contains an element with the specified name. * * @param name * The name of the element to locate. * @return True if the document contains an element with the given name, * false otherwise. * @see Document#contains(String) */ @Override public boolean contains(final String name) { return getElementMap().containsKey(name); } /** * Determines if the passed object is of this same type as this object and * if so that its fields are equal. * * @param object * The object to compare to. * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(final Object object) { boolean result = false; if (this == object) { result = true; } else if ((object != null) && (getClass() == object.getClass())) { final DocumentElement other = (DocumentElement) object; result = super.equals(object) && myElements.equals(other.myElements); } return result; } /** * {@inheritDoc} * <p> * Searches this sub-elements for matching elements on the path and are of * the right type. * </p> * * @see Element#find */ @Override public <E extends Element> List<E> find(final Class<E> clazz, final String... nameRegexs) { List<E> elements = Collections.emptyList(); if (0 < nameRegexs.length) { final String nameRegex = nameRegexs[0]; final String[] subNameRegexs = Arrays.copyOfRange(nameRegexs, 1, nameRegexs.length); elements = new ArrayList<E>(); try { final Pattern pattern = PatternUtils.toPattern(nameRegex); for (final Element element : myElements) { if (pattern.matcher(element.getName()).matches()) { elements.addAll(element.find(clazz, subNameRegexs)); } } } catch (final PatternSyntaxException pse) { // Assume a non-pattern? for (final Element element : myElements) { if (nameRegex.equals(element.getName())) { elements.addAll(element.find(clazz, subNameRegexs)); } } } } else { // End of the path -- are we the right type/element? if (clazz.isAssignableFrom(this.getClass())) { elements = Collections.singletonList(clazz.cast(this)); } } return elements; } /** * {@inheritDoc} * <p> * Searches this sub-elements for matching elements on the path and are of * the right type. * </p> * * @see Element#findFirst */ @Override public <E extends Element> E findFirst(final Class<E> clazz, final String... nameRegexs) { E element = null; if (0 < nameRegexs.length) { final String nameRegex = nameRegexs[0]; final String[] subNameRegexs = Arrays.copyOfRange(nameRegexs, 1, nameRegexs.length); try { final Pattern pattern = PatternUtils.toPattern(nameRegex); final Iterator<Element> iter = myElements.iterator(); while (iter.hasNext() && (element == null)) { final Element docElement = iter.next(); if (pattern.matcher(docElement.getName()).matches()) { element = docElement.findFirst(clazz, subNameRegexs); } } } catch (final PatternSyntaxException pse) { // Assume a non-pattern? final Iterator<Element> iter = myElements.iterator(); while (iter.hasNext() && (element == null)) { final Element docElement = iter.next(); if (nameRegex.equals(docElement.getName())) { element = docElement.findFirst(clazz, subNameRegexs); } } } } else { // End of the path -- are we the right type/element? if (clazz.isAssignableFrom(this.getClass())) { element = clazz.cast(this); } } return element; } /** * Returns the element with the specified name and type or null if no * element with that name and type exists. * * @param <E> * The type of element to get. * @param clazz * The class of element to get. * @param name * The name of the element to locate. * @return The sub-element in the document with the given name or null if * element exists with the given name. * @see Document#get(Class, String) */ @Override public <E extends Element> E get(final Class<E> clazz, final String name) { final Element element = get(name); if ((element != null) && clazz.isAssignableFrom(element.getClass())) { return clazz.cast(element); } return null; } /** * Returns the element with the specified name or null if no element with * that name exists. * * @param name * The name of the element to locate. * @return The sub-element in the document with the given name or null if * element exists with the given name. * @see Document#get(String) */ @Override public Element get(final String name) { return getElementMap().get(name); } /** * Returns the element's document. * * @return The document contained within the element. */ public Document getDocument() { return new RootDocument(myElements); } /** * Returns the elements in the document. * * @return The elements in the document. */ @Override public List<Element> getElements() { return myElements; } /** * {@inheritDoc} */ @Override public ElementType getType() { return TYPE; } /** * {@inheritDoc} * <p> * Returns a stand-alone {@link Document}. * </p> */ @Override public Document getValueAsObject() { return BuilderFactory.start(this).build(); } /** * Computes a reasonable hash code. * * @return The hash code value. */ @Override public int hashCode() { int result = 1; result = (31 * result) + super.hashCode(); result = (31 * result) + myElements.hashCode(); return result; } /** * Returns true if this sub-document conforms to the MongoDB DBRef * convention, false otherwise. * <p> * A DocumentReference contains (order matters): * <ol> * <li>The name (string or symbol) of the collection where the referenced * document resides: {@code $ref}.</li> * <li>The value of the _id field in the referenced document: {@code $id}.</li> * <li>The name (string or symbol) of the database where the referenced * document resides: {@code $db} (Optional).</li> * </ol> * * @return True if this sub-document conforms to the MongoDB DBRef * convention, false otherwise. * * @see #asDocumentReference() * @see DocumentReference * @see <a * href="http://docs.mongodb.org/manual/applications/database-references/#dbref">MongoDB * DBRef Information</a> */ public boolean isDocumentReference() { final int elementCount = myElements.size(); if (elementCount == 2) { final Element element1 = myElements.get(0); final Element element2 = myElements.get(1); final String element1Name = element1.getName(); final ElementType element1Type = element1.getType(); final String element2Name = element2.getName(); return DocumentReference.COLLECTION_FIELD_NAME.equals(element1Name) && ((element1Type == ElementType.STRING) || (element1Type == ElementType.SYMBOL)) && DocumentReference.ID_FIELD_NAME.equals(element2Name); } else if (myElements.size() == 3) { final Element element1 = myElements.get(0); final Element element2 = myElements.get(1); final Element element3 = myElements.get(2); final String element1Name = element1.getName(); final ElementType element1Type = element1.getType(); final String element2Name = element2.getName(); final String element3Name = element3.getName(); final ElementType element3Type = element3.getType(); return DocumentReference.COLLECTION_FIELD_NAME.equals(element1Name) && ((element1Type == ElementType.STRING) || (element1Type == ElementType.SYMBOL)) && DocumentReference.ID_FIELD_NAME.equals(element2Name) && DocumentReference.DATABASE_FIELD_NAME .equals(element3Name) && ((element3Type == ElementType.STRING) || (element3Type == ElementType.SYMBOL)); } return false; } /** * Returns an iterator over the documents elements. * * @see Iterable#iterator() */ @Override public Iterator<Element> iterator() { return getElements().iterator(); } /** * {@inheritDoc} * <p> * Returns a new {@link DocumentElement}. * </p> */ @Override public DocumentElement withName(final String name) { if (getName().equals(name)) { return this; } return new DocumentElement(name, myElements); } /** * Returns a map from the element names to the elements in the document. * Used for faster by-name access. * * @return The element name to element mapping. */ private Map<String, Element> getElementMap() { if (myElementMap == null) { final List<Element> elements = myElements; final Map<String, Element> mapping = new HashMap<String, Element>( elements.size() + elements.size()); for (final Element element : elements) { mapping.put(element.getName(), element); } // Swap the finished map into position. myElementMap = mapping; } return myElementMap; } }
apache-2.0
valikir/vturbin
chapter_004/src/main/java/list/ContainerList.java
1479
package list; import net.jcip.annotations.GuardedBy; import net.jcip.annotations.ThreadSafe; import java.util.*; @ThreadSafe public class ContainerList<E> implements DynamicContainer { @GuardedBy("container") private int size = 10; // default size Object[] container = new Object[size]; int index = 0; private void ensureCapacity(){ if (index == container.length - 1){ container = Arrays.copyOf(container,size = size + 10); } } private void rangeCheck(int index){ if (index > size){ throw new IndexOutOfBoundsException(); } } @Override synchronized public void add(Object value) { ensureCapacity(); container[index++] = value; } @Override synchronized public Object get(int index) { rangeCheck(index); return container[index]; } @Override public Iterator iterator() { return new Itr(); } private class Itr implements Iterator<E> { int cursor; // index of next element to return int lastRet = -1; // index of last element returned; -1 if no such public boolean hasNext() { return cursor < index; } public E next() { int i = cursor; if (i >= index) throw new NoSuchElementException(); Object[] data = container; cursor = i + 1; return (E) data[lastRet = i]; } } }
apache-2.0
alanfgates/hive
ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
22867
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec; import java.io.File; import java.io.FileInputStream; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import junit.framework.TestCase; import org.apache.hadoop.hive.metastore.Warehouse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc; import org.apache.hadoop.hive.ql.plan.LoadTableDesc.LoadFileType; import org.apache.hadoop.hive.ql.plan.MapredWork; import org.apache.hadoop.hive.ql.plan.PartitionDesc; import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc; import org.apache.hadoop.hive.ql.plan.ReduceWork; import org.apache.hadoop.hive.ql.plan.ScriptDesc; import org.apache.hadoop.hive.ql.plan.SelectDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.mapred.TextInputFormat; /** * Mimics the actual query compiler in generating end to end plans and testing * them out. * */ public class TestExecDriver extends TestCase { static QueryState queryState; static HiveConf conf; private static final String tmpdir; private static final Logger LOG = LoggerFactory.getLogger(TestExecDriver.class); private static final Path tmppath; private static Hive db; private static FileSystem fs; private static CompilationOpContext ctx = null; static { try { queryState = new QueryState.Builder().withHiveConf(new HiveConf(ExecDriver.class)).build(); conf = queryState.getConf(); conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true); conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, true); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); SessionState.start(conf); tmpdir = System.getProperty("test.tmp.dir"); tmppath = new Path(tmpdir); fs = FileSystem.get(conf); if (fs.exists(tmppath) && !ShimLoader.getHadoopShims().isDirectory(fs.getFileStatus(tmppath))) { throw new RuntimeException(tmpdir + " exists but is not a directory"); } if (!fs.exists(tmppath)) { if (!fs.mkdirs(tmppath)) { throw new RuntimeException("Could not make scratch directory " + tmpdir); } } LOG.info("Directory of actual files: " + tmppath); for (Object one : Utilities.makeList("mapplan1.out", "mapplan2.out", "mapredplan1.out", "mapredplan2.out", "mapredplan3.out", "mapredplan4.out", "mapredplan5.out", "mapredplan6.out")) { Path onedir = new Path(tmppath, (String) one); if (fs.exists(onedir)) { fs.delete(onedir, true); } } // copy the test files into hadoop if required. int i = 0; Path[] hadoopDataFile = new Path[2]; String[] testFiles = {"kv1.txt", "kv2.txt"}; String testFileDir = new Path(conf.get("test.data.files")).toUri().getPath(); LOG.info("Directory of expected files: " + testFileDir); for (String oneFile : testFiles) { Path localDataFile = new Path(testFileDir, oneFile); hadoopDataFile[i] = new Path(tmppath, oneFile); fs.copyFromLocalFile(false, true, localDataFile, hadoopDataFile[i]); i++; } // load the test files into tables i = 0; db = Hive.get(conf); String[] srctables = {"src", "src2"}; LinkedList<String> cols = new LinkedList<String>(); cols.add("key"); cols.add("value"); for (String src : srctables) { db.dropTable(Warehouse.DEFAULT_DATABASE_NAME, src, true, true); db.createTable(src, cols, null, TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class); db.loadTable(hadoopDataFile[i], src, LoadFileType.KEEP_EXISTING, true, false, false, true, null, 0, false); i++; } } catch (Throwable e) { throw new RuntimeException("Encountered throwable", e); } } MapredWork mr; @Override protected void setUp() { mr = PlanUtils.getMapRedWork(); ctx = new CompilationOpContext(); } public static void addMapWork(MapredWork mr, Table tbl, String alias, Operator<?> work) { mr.getMapWork().addMapWork(tbl.getDataLocation(), alias, work, new PartitionDesc( Utilities.getTableDesc(tbl), null)); } private static void fileDiff(String datafile, String testdir) throws Exception { String testFileDir = conf.get("test.data.files"); // inbuilt assumption that the testdir has only one output file. Path di_test = new Path(tmppath, testdir); if (!fs.exists(di_test)) { throw new RuntimeException(tmpdir + File.separator + testdir + " does not exist"); } if (!ShimLoader.getHadoopShims().isDirectory(fs.getFileStatus(di_test))) { throw new RuntimeException(tmpdir + File.separator + testdir + " is not a directory"); } FSDataInputStream fi_test = fs.open((fs.listStatus(di_test))[0].getPath()); FileInputStream fi_gold = new FileInputStream(new File(testFileDir,datafile)); if (!Utilities.contentsEqual(fi_gold, fi_test, false)) { LOG.error(di_test.toString() + " does not match " + datafile); assertEquals(false, true); } } private FilterDesc getTestFilterDesc(String column) throws Exception { ArrayList<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(); children1.add(new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, column, "", false)); ExprNodeDesc lhs = new ExprNodeGenericFuncDesc( TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo( serdeConstants.DOUBLE_TYPE_NAME).getGenericUDF(), children1); ArrayList<ExprNodeDesc> children2 = new ArrayList<ExprNodeDesc>(); children2.add(new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long .valueOf(100))); ExprNodeDesc rhs = new ExprNodeGenericFuncDesc( TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo( serdeConstants.DOUBLE_TYPE_NAME).getGenericUDF(), children2); ArrayList<ExprNodeDesc> children3 = new ArrayList<ExprNodeDesc>(); children3.add(lhs); children3.add(rhs); ExprNodeDesc desc = new ExprNodeGenericFuncDesc( TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getFunctionInfo("<") .getGenericUDF(), children3); return new FilterDesc(desc, false); } @SuppressWarnings("unchecked") private void populateMapPlan1(Table src) throws Exception { Operator<FileSinkDesc> op2 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator + "mapplan1.out"), Utilities.defaultTd, true)); Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"), op2); addMapWork(mr, src, "a", op1); } @SuppressWarnings("unchecked") private void populateMapPlan2(Table src) throws Exception { Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator + "mapplan2.out"), Utilities.defaultTd, false)); Operator<ScriptDesc> op2 = OperatorFactory.get(new ScriptDesc("cat", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), TextRecordWriter.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), TextRecordReader.class, TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key")), op3); Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"), op2); addMapWork(mr, src, "a", op1); } @SuppressWarnings("unchecked") private void populateMapRedPlan1(Table src) throws SemanticException { ArrayList<String> outputColumns = new ArrayList<String>(); for (int i = 0; i < 2; i++) { outputColumns.add("_col" + i); } // map-side work Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")), Utilities.makeList(getStringColumn("value")), outputColumns, true, -1, 1, -1, AcidUtils.Operation.NOT_ACID)); addMapWork(mr, src, "a", op1); ReduceWork rWork = new ReduceWork(); rWork.setNumReduceTasks(Integer.valueOf(1)); rWork.setKeyDesc(op1.getConf().getKeySerializeInfo()); rWork.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo()); mr.setReduceWork(rWork); // reduce side work Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator + "mapredplan1.out"), Utilities.defaultTd, false)); List<ExprNodeDesc> cols = new ArrayList<ExprNodeDesc>(); cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString()+"."+outputColumns.get(1))); List<String> colNames = new ArrayList<String>(); colNames.add(HiveConf.getColumnInternalName(2)); Operator<SelectDesc> op2 = OperatorFactory.get(new SelectDesc(cols, colNames), op3); rWork.setReducer(op2); } @SuppressWarnings("unchecked") private void populateMapRedPlan2(Table src) throws Exception { ArrayList<String> outputColumns = new ArrayList<String>(); for (int i = 0; i < 2; i++) { outputColumns.add("_col" + i); } // map-side work Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")), Utilities .makeList(getStringColumn("key"), getStringColumn("value")), outputColumns, false, -1, 1, -1, AcidUtils.Operation.NOT_ACID)); addMapWork(mr, src, "a", op1); ReduceWork rWork = new ReduceWork(); rWork.setNumReduceTasks(Integer.valueOf(1)); rWork.setKeyDesc(op1.getConf().getKeySerializeInfo()); rWork.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo()); mr.setReduceWork(rWork); // reduce side work Operator<FileSinkDesc> op4 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator + "mapredplan2.out"), Utilities.defaultTd, false)); Operator<FilterDesc> op3 = OperatorFactory.get(getTestFilterDesc("0"), op4); List<ExprNodeDesc> cols = new ArrayList<ExprNodeDesc>(); cols.add(getStringColumn(Utilities.ReduceField.KEY + ".reducesinkkey" + 0)); cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString()+"."+outputColumns.get(1))); Operator<SelectDesc> op2 = OperatorFactory.get(new SelectDesc(cols, outputColumns), op3); rWork.setReducer(op2); } /** * test reduce with multiple tagged inputs. */ @SuppressWarnings("unchecked") private void populateMapRedPlan3(Table src, Table src2) throws SemanticException { List<String> outputColumns = new ArrayList<String>(); for (int i = 0; i < 2; i++) { outputColumns.add("_col" + i); } // map-side work Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")), Utilities.makeList(getStringColumn("value")), outputColumns, true, Byte.valueOf((byte) 0), 1, -1, AcidUtils.Operation.NOT_ACID)); addMapWork(mr, src, "a", op1); Operator<ReduceSinkDesc> op2 = OperatorFactory.get(ctx, PlanUtils .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")), Utilities.makeList(getStringColumn("key")), outputColumns, true, Byte.valueOf((byte) 1), Integer.MAX_VALUE, -1, AcidUtils.Operation.NOT_ACID)); addMapWork(mr, src2, "b", op2); ReduceWork rWork = new ReduceWork(); rWork.setNumReduceTasks(Integer.valueOf(1)); rWork.setNeedsTagging(true); rWork.setKeyDesc(op1.getConf().getKeySerializeInfo()); rWork.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo()); mr.setReduceWork(rWork); rWork.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo()); // reduce side work Operator<FileSinkDesc> op4 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator + "mapredplan3.out"), Utilities.defaultTd, false)); Operator<SelectDesc> op5 = OperatorFactory.get(new SelectDesc(Utilities .makeList(new ExprNodeFieldDesc(TypeInfoFactory.stringTypeInfo, new ExprNodeColumnDesc(TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo), Utilities.ReduceField.VALUE.toString(), "", false), "0", false)), Utilities.makeList(outputColumns.get(0))), op4); rWork.setReducer(op5); } @SuppressWarnings("unchecked") private void populateMapRedPlan4(Table src) throws SemanticException { // map-side work ArrayList<String> outputColumns = new ArrayList<String>(); for (int i = 0; i < 2; i++) { outputColumns.add("_col" + i); } Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils .getReduceSinkDesc(Utilities.makeList(getStringColumn("tkey")), Utilities.makeList(getStringColumn("tkey"), getStringColumn("tvalue")), outputColumns, false, -1, 1, -1, AcidUtils.Operation.NOT_ACID)); Operator<ScriptDesc> op0 = OperatorFactory.get(new ScriptDesc("cat", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), TextRecordWriter.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"), TextRecordReader.class, TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key")), op1); Operator<SelectDesc> op4 = OperatorFactory.get(new SelectDesc(Utilities .makeList(getStringColumn("key"), getStringColumn("value")), outputColumns), op0); addMapWork(mr, src, "a", op4); ReduceWork rWork = new ReduceWork(); rWork.setKeyDesc(op1.getConf().getKeySerializeInfo()); rWork.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo()); rWork.setNumReduceTasks(Integer.valueOf(1)); mr.setReduceWork(rWork); // reduce side work Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator + "mapredplan4.out"), Utilities.defaultTd, false)); List<ExprNodeDesc> cols = new ArrayList<ExprNodeDesc>(); cols.add(getStringColumn(Utilities.ReduceField.KEY + ".reducesinkkey" + 0)); cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString()+"."+outputColumns.get(1))); Operator<SelectDesc> op2 = OperatorFactory.get(new SelectDesc(cols, outputColumns), op3); rWork.setReducer(op2); } public static ExprNodeColumnDesc getStringColumn(String columnName) { return new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, columnName, "", false); } @SuppressWarnings("unchecked") private void populateMapRedPlan5(Table src) throws SemanticException { // map-side work ArrayList<String> outputColumns = new ArrayList<String>(); for (int i = 0; i < 2; i++) { outputColumns.add("_col" + i); } Operator<ReduceSinkDesc> op0 = OperatorFactory.get(ctx, PlanUtils .getReduceSinkDesc(Utilities.makeList(getStringColumn("0")), Utilities .makeList(getStringColumn("0"), getStringColumn("1")), outputColumns, false, -1, 1, -1, AcidUtils.Operation.NOT_ACID)); Operator<SelectDesc> op4 = OperatorFactory.get(new SelectDesc(Utilities .makeList(getStringColumn("key"), getStringColumn("value")), outputColumns), op0); addMapWork(mr, src, "a", op4); ReduceWork rWork = new ReduceWork(); mr.setReduceWork(rWork); rWork.setNumReduceTasks(Integer.valueOf(1)); rWork.setKeyDesc(op0.getConf().getKeySerializeInfo()); rWork.getTagToValueDesc().add(op0.getConf().getValueSerializeInfo()); // reduce side work Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator + "mapredplan5.out"), Utilities.defaultTd, false)); List<ExprNodeDesc> cols = new ArrayList<ExprNodeDesc>(); cols.add(getStringColumn(Utilities.ReduceField.KEY + ".reducesinkkey" + 0)); cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString()+"."+outputColumns.get(1))); Operator<SelectDesc> op2 = OperatorFactory.get(new SelectDesc(cols, outputColumns), op3); rWork.setReducer(op2); } @SuppressWarnings("unchecked") private void populateMapRedPlan6(Table src) throws Exception { // map-side work ArrayList<String> outputColumns = new ArrayList<String>(); for (int i = 0; i < 2; i++) { outputColumns.add("_col" + i); } Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils .getReduceSinkDesc(Utilities.makeList(getStringColumn("tkey")), Utilities.makeList(getStringColumn("tkey"), getStringColumn("tvalue")), outputColumns, false, -1, 1, -1, AcidUtils.Operation.NOT_ACID)); Operator<ScriptDesc> op0 = OperatorFactory.get(new ScriptDesc( "\'cat\'", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"), TextRecordWriter.class, PlanUtils .getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"), TextRecordReader.class, TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key")), op1); Operator<SelectDesc> op4 = OperatorFactory.get(new SelectDesc(Utilities .makeList(getStringColumn("key"), getStringColumn("value")), outputColumns), op0); addMapWork(mr, src, "a", op4); ReduceWork rWork = new ReduceWork(); mr.setReduceWork(rWork); rWork.setNumReduceTasks(Integer.valueOf(1)); rWork.setKeyDesc(op1.getConf().getKeySerializeInfo()); rWork.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo()); // reduce side work Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator + "mapredplan6.out"), Utilities.defaultTd, false)); Operator<FilterDesc> op2 = OperatorFactory.get(getTestFilterDesc("0"), op3); List<ExprNodeDesc> cols = new ArrayList<ExprNodeDesc>(); cols.add(getStringColumn(Utilities.ReduceField.KEY + ".reducesinkkey" + 0)); cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString()+"."+outputColumns.get(1))); Operator<SelectDesc> op5 = OperatorFactory.get(new SelectDesc(cols, outputColumns), op2); rWork.setReducer(op5); } private void executePlan() throws Exception { String testName = new Exception().getStackTrace()[1].getMethodName(); MapRedTask mrtask = new MapRedTask(); DriverContext dctx = new DriverContext (); mrtask.setWork(mr); mrtask.initialize(queryState, null, dctx, null); int exitVal = mrtask.execute(dctx); if (exitVal != 0) { LOG.error(testName + " execution failed with exit status: " + exitVal); assertEquals(true, false); } LOG.info(testName + " execution completed successfully"); } public void testMapPlan1() throws Exception { LOG.info("Beginning testMapPlan1"); populateMapPlan1(db.getTable(Warehouse.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("lt100.txt.deflate", "mapplan1.out"); } public void testMapPlan2() throws Exception { LOG.info("Beginning testMapPlan2"); populateMapPlan2(db.getTable(Warehouse.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("lt100.txt", "mapplan2.out"); } public void testMapRedPlan1() throws Exception { LOG.info("Beginning testMapRedPlan1"); populateMapRedPlan1(db.getTable(Warehouse.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("kv1.val.sorted.txt", "mapredplan1.out"); } public void testMapRedPlan2() throws Exception { LOG.info("Beginning testMapPlan2"); populateMapRedPlan2(db.getTable(Warehouse.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("lt100.sorted.txt", "mapredplan2.out"); } public void testMapRedPlan3() throws Exception { LOG.info("Beginning testMapPlan3"); populateMapRedPlan3(db.getTable(Warehouse.DEFAULT_DATABASE_NAME, "src"), db.getTable(Warehouse.DEFAULT_DATABASE_NAME, "src2")); executePlan(); fileDiff("kv1kv2.cogroup.txt", "mapredplan3.out"); } public void testMapRedPlan4() throws Exception { LOG.info("Beginning testMapPlan4"); populateMapRedPlan4(db.getTable(Warehouse.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("kv1.string-sorted.txt", "mapredplan4.out"); } public void testMapRedPlan5() throws Exception { LOG.info("Beginning testMapPlan5"); populateMapRedPlan5(db.getTable(Warehouse.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("kv1.string-sorted.txt", "mapredplan5.out"); } public void testMapRedPlan6() throws Exception { LOG.info("Beginning testMapPlan6"); populateMapRedPlan6(db.getTable(Warehouse.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("lt100.sorted.txt", "mapredplan6.out"); } }
apache-2.0
remibergsma/cosmic
cosmic-core/api/src/main/java/com/cloud/api/command/admin/network/ListStorageNetworkIpRangeCmd.java
4077
package com.cloud.api.command.admin.network; import com.cloud.api.APICommand; import com.cloud.api.ApiConstants; import com.cloud.api.ApiErrorCode; import com.cloud.api.BaseListCmd; import com.cloud.api.Parameter; import com.cloud.api.ServerApiException; import com.cloud.api.response.ListResponse; import com.cloud.api.response.PodResponse; import com.cloud.api.response.StorageNetworkIpRangeResponse; import com.cloud.api.response.ZoneResponse; import com.cloud.dc.StorageNetworkIpRange; import com.cloud.exception.ConcurrentOperationException; import com.cloud.exception.InsufficientCapacityException; import com.cloud.exception.ResourceAllocationException; import com.cloud.exception.ResourceUnavailableException; import com.cloud.user.Account; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @APICommand(name = "listStorageNetworkIpRange", description = "List a storage network IP range.", responseObject = StorageNetworkIpRangeResponse.class, since = "3.0.0", requestHasSensitiveInfo = false, responseHasSensitiveInfo = false) public class ListStorageNetworkIpRangeCmd extends BaseListCmd { public static final Logger s_logger = LoggerFactory.getLogger(ListStorageNetworkIpRangeCmd.class); String _name = "liststoragenetworkiprangeresponse"; ///////////////////////////////////////////////////// //////////////// API parameters ///////////////////// ///////////////////////////////////////////////////// @Parameter(name = ApiConstants.ID, type = CommandType.UUID, entityType = StorageNetworkIpRangeResponse.class, description = "optional parameter. Storaget network IP range uuid, if specicied, using it to search the range.") private Long rangeId; @Parameter(name = ApiConstants.POD_ID, type = CommandType.UUID, entityType = PodResponse.class, description = "optional parameter. Pod uuid, if specicied and range uuid is absent, using it to search the range.") private Long podId; @Parameter(name = ApiConstants.ZONE_ID, type = CommandType.UUID, entityType = ZoneResponse.class, description = "optional parameter. Zone uuid, if specicied and both pod uuid and range uuid are absent, using it to search the range.") private Long zoneId; ///////////////////////////////////////////////////// /////////////////// Accessors /////////////////////// ///////////////////////////////////////////////////// @Override public void execute() throws ResourceUnavailableException, InsufficientCapacityException, ServerApiException, ConcurrentOperationException, ResourceAllocationException { try { final List<StorageNetworkIpRange> results = _storageNetworkService.listIpRange(this); final ListResponse<StorageNetworkIpRangeResponse> response = new ListResponse<>(); final List<StorageNetworkIpRangeResponse> resList = new ArrayList<>(results.size()); for (final StorageNetworkIpRange r : results) { final StorageNetworkIpRangeResponse resp = _responseGenerator.createStorageNetworkIpRangeResponse(r); resList.add(resp); } response.setResponses(resList); response.setResponseName(getCommandName()); this.setResponseObject(response); } catch (final Exception e) { s_logger.warn("Failed to list storage network ip range for rangeId=" + getRangeId() + " podId=" + getPodId() + " zoneId=" + getZoneId()); throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, e.getMessage()); } } public Long getRangeId() { return rangeId; } public Long getPodId() { return podId; } public Long getZoneId() { return zoneId; } @Override public String getCommandName() { return _name; } @Override public long getEntityOwnerId() { return Account.ACCOUNT_ID_SYSTEM; } }
apache-2.0
aglne/dubbo
dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/filter/GenericImplFilter.java
10135
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dubbo.rpc.filter; import org.apache.dubbo.common.beanutil.JavaBeanAccessor; import org.apache.dubbo.common.beanutil.JavaBeanDescriptor; import org.apache.dubbo.common.beanutil.JavaBeanSerializeUtil; import org.apache.dubbo.common.constants.CommonConstants; import org.apache.dubbo.common.extension.Activate; import org.apache.dubbo.common.logger.Logger; import org.apache.dubbo.common.logger.LoggerFactory; import org.apache.dubbo.common.utils.PojoUtils; import org.apache.dubbo.common.utils.ReflectUtils; import org.apache.dubbo.rpc.Invocation; import org.apache.dubbo.rpc.Invoker; import org.apache.dubbo.rpc.ListenableFilter; import org.apache.dubbo.rpc.Result; import org.apache.dubbo.rpc.RpcException; import org.apache.dubbo.rpc.RpcInvocation; import org.apache.dubbo.rpc.service.GenericException; import org.apache.dubbo.rpc.support.ProtocolUtils; import org.apache.dubbo.rpc.support.RpcUtils; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Type; import static org.apache.dubbo.common.constants.RpcConstants.$INVOKE; import static org.apache.dubbo.common.constants.RpcConstants.$INVOKE_ASYNC; import static org.apache.dubbo.rpc.Constants.GENERIC_KEY; /** * GenericImplInvokerFilter */ @Activate(group = CommonConstants.CONSUMER, value = GENERIC_KEY, order = 20000) public class GenericImplFilter extends ListenableFilter { private static final Logger logger = LoggerFactory.getLogger(GenericImplFilter.class); private static final Class<?>[] GENERIC_PARAMETER_TYPES = new Class<?>[]{String.class, String[].class, Object[].class}; public GenericImplFilter() { super.listener = new GenericImplListener(); } @Override public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException { String generic = invoker.getUrl().getParameter(GENERIC_KEY); if (ProtocolUtils.isGeneric(generic) && (!$INVOKE.equals(invocation.getMethodName()) && !$INVOKE_ASYNC.equals(invocation.getMethodName())) && invocation instanceof RpcInvocation) { RpcInvocation invocation2 = new RpcInvocation(invocation); String methodName = invocation2.getMethodName(); Class<?>[] parameterTypes = invocation2.getParameterTypes(); Object[] arguments = invocation2.getArguments(); String[] types = new String[parameterTypes.length]; for (int i = 0; i < parameterTypes.length; i++) { types[i] = ReflectUtils.getName(parameterTypes[i]); } Object[] args; if (ProtocolUtils.isBeanGenericSerialization(generic)) { args = new Object[arguments.length]; for (int i = 0; i < arguments.length; i++) { args[i] = JavaBeanSerializeUtil.serialize(arguments[i], JavaBeanAccessor.METHOD); } } else { args = PojoUtils.generalize(arguments); } if (RpcUtils.isReturnTypeFuture(invocation)) { invocation2.setMethodName($INVOKE_ASYNC); } else { invocation2.setMethodName($INVOKE); } invocation2.setParameterTypes(GENERIC_PARAMETER_TYPES); invocation2.setArguments(new Object[]{methodName, types, args}); return invoker.invoke(invocation2); } else if ((invocation.getMethodName().equals($INVOKE) || invocation.getMethodName().equals($INVOKE_ASYNC)) && invocation.getArguments() != null && invocation.getArguments().length == 3 && ProtocolUtils.isGeneric(generic)) { Object[] args = (Object[]) invocation.getArguments()[2]; if (ProtocolUtils.isJavaGenericSerialization(generic)) { for (Object arg : args) { if (!(byte[].class == arg.getClass())) { error(generic, byte[].class.getName(), arg.getClass().getName()); } } } else if (ProtocolUtils.isBeanGenericSerialization(generic)) { for (Object arg : args) { if (!(arg instanceof JavaBeanDescriptor)) { error(generic, JavaBeanDescriptor.class.getName(), arg.getClass().getName()); } } } invocation.setAttachment( GENERIC_KEY, invoker.getUrl().getParameter(GENERIC_KEY)); } return invoker.invoke(invocation); } private void error(String generic, String expected, String actual) throws RpcException { throw new RpcException("Generic serialization [" + generic + "] only support message type " + expected + " and your message type is " + actual); } static class GenericImplListener implements Listener { @Override public void onResponse(Result appResponse, Invoker<?> invoker, Invocation invocation) { String generic = invoker.getUrl().getParameter(GENERIC_KEY); String methodName = invocation.getMethodName(); Class<?>[] parameterTypes = invocation.getParameterTypes(); if (ProtocolUtils.isGeneric(generic) && (!$INVOKE.equals(invocation.getMethodName()) && !$INVOKE_ASYNC.equals(invocation.getMethodName())) && invocation instanceof RpcInvocation) { if (!appResponse.hasException()) { Object value = appResponse.getValue(); try { Method method = invoker.getInterface().getMethod(methodName, parameterTypes); if (ProtocolUtils.isBeanGenericSerialization(generic)) { if (value == null) { appResponse.setValue(value); } else if (value instanceof JavaBeanDescriptor) { appResponse.setValue(JavaBeanSerializeUtil.deserialize((JavaBeanDescriptor) value)); } else { throw new RpcException("The type of result value is " + value.getClass().getName() + " other than " + JavaBeanDescriptor.class.getName() + ", and the result is " + value); } } else { Type[] types = ReflectUtils.getReturnTypes(method); appResponse.setValue(PojoUtils.realize(value, (Class<?>) types[0], types[1])); } } catch (NoSuchMethodException e) { throw new RpcException(e.getMessage(), e); } } else if (appResponse.getException() instanceof GenericException) { GenericException exception = (GenericException) appResponse.getException(); try { String className = exception.getExceptionClass(); Class<?> clazz = ReflectUtils.forName(className); Throwable targetException = null; Throwable lastException = null; try { targetException = (Throwable) clazz.newInstance(); } catch (Throwable e) { lastException = e; for (Constructor<?> constructor : clazz.getConstructors()) { try { targetException = (Throwable) constructor.newInstance(new Object[constructor.getParameterTypes().length]); break; } catch (Throwable e1) { lastException = e1; } } } if (targetException != null) { try { Field field = Throwable.class.getDeclaredField("detailMessage"); if (!field.isAccessible()) { field.setAccessible(true); } field.set(targetException, exception.getExceptionMessage()); } catch (Throwable e) { logger.warn(e.getMessage(), e); } appResponse.setException(targetException); } else if (lastException != null) { throw lastException; } } catch (Throwable e) { throw new RpcException("Can not deserialize exception " + exception.getExceptionClass() + ", message: " + exception.getExceptionMessage(), e); } } } } @Override public void onError(Throwable t, Invoker<?> invoker, Invocation invocation) { } } }
apache-2.0
scarabresearch/EmarsysPredictSDKAndroid
predictsdk/src/main/java/com/emarsys/predict/ErrorParameter.java
946
/* * Copyright 2016 Scarab Research Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.emarsys.predict; /** * Holds error information for the query. */ class ErrorParameter { final String type; final String command; final String message; ErrorParameter(String type, String command, String message) { this.type = type; this.command = command; this.message = message; } }
apache-2.0
googleapis/java-dataproc
proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SparkBatch.java
67633
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataproc/v1/batches.proto package com.google.cloud.dataproc.v1; /** * * * <pre> * A configuration for running an [Apache Spark](http://spark.apache.org/) * batch workload. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1.SparkBatch} */ public final class SparkBatch extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.SparkBatch) SparkBatchOrBuilder { private static final long serialVersionUID = 0L; // Use SparkBatch.newBuilder() to construct. private SparkBatch(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SparkBatch() { args_ = com.google.protobuf.LazyStringArrayList.EMPTY; jarFileUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; fileUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; archiveUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SparkBatch(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SparkBatch( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); driverCase_ = 1; driver_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); driverCase_ = 2; driver_ = s; break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); if (!((mutable_bitField0_ & 0x00000001) != 0)) { args_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000001; } args_.add(s); break; } case 34: { java.lang.String s = input.readStringRequireUtf8(); if (!((mutable_bitField0_ & 0x00000002) != 0)) { jarFileUris_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000002; } jarFileUris_.add(s); break; } case 42: { java.lang.String s = input.readStringRequireUtf8(); if (!((mutable_bitField0_ & 0x00000004) != 0)) { fileUris_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000004; } fileUris_.add(s); break; } case 50: { java.lang.String s = input.readStringRequireUtf8(); if (!((mutable_bitField0_ & 0x00000008) != 0)) { archiveUris_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000008; } archiveUris_.add(s); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { args_ = args_.getUnmodifiableView(); } if (((mutable_bitField0_ & 0x00000002) != 0)) { jarFileUris_ = jarFileUris_.getUnmodifiableView(); } if (((mutable_bitField0_ & 0x00000004) != 0)) { fileUris_ = fileUris_.getUnmodifiableView(); } if (((mutable_bitField0_ & 0x00000008) != 0)) { archiveUris_ = archiveUris_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1.BatchesProto .internal_static_google_cloud_dataproc_v1_SparkBatch_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1.BatchesProto .internal_static_google_cloud_dataproc_v1_SparkBatch_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1.SparkBatch.class, com.google.cloud.dataproc.v1.SparkBatch.Builder.class); } private int driverCase_ = 0; private java.lang.Object driver_; public enum DriverCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { MAIN_JAR_FILE_URI(1), MAIN_CLASS(2), DRIVER_NOT_SET(0); private final int value; private DriverCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static DriverCase valueOf(int value) { return forNumber(value); } public static DriverCase forNumber(int value) { switch (value) { case 1: return MAIN_JAR_FILE_URI; case 2: return MAIN_CLASS; case 0: return DRIVER_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public DriverCase getDriverCase() { return DriverCase.forNumber(driverCase_); } public static final int MAIN_JAR_FILE_URI_FIELD_NUMBER = 1; /** * * * <pre> * Optional. The HCFS URI of the jar file that contains the main class. * </pre> * * <code>string main_jar_file_uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the mainJarFileUri field is set. */ public boolean hasMainJarFileUri() { return driverCase_ == 1; } /** * * * <pre> * Optional. The HCFS URI of the jar file that contains the main class. * </pre> * * <code>string main_jar_file_uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The mainJarFileUri. */ public java.lang.String getMainJarFileUri() { java.lang.Object ref = ""; if (driverCase_ == 1) { ref = driver_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (driverCase_ == 1) { driver_ = s; } return s; } } /** * * * <pre> * Optional. The HCFS URI of the jar file that contains the main class. * </pre> * * <code>string main_jar_file_uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for mainJarFileUri. */ public com.google.protobuf.ByteString getMainJarFileUriBytes() { java.lang.Object ref = ""; if (driverCase_ == 1) { ref = driver_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (driverCase_ == 1) { driver_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int MAIN_CLASS_FIELD_NUMBER = 2; /** * * * <pre> * Optional. The name of the driver main class. The jar file that contains the class * must be in the classpath or specified in `jar_file_uris`. * </pre> * * <code>string main_class = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the mainClass field is set. */ public boolean hasMainClass() { return driverCase_ == 2; } /** * * * <pre> * Optional. The name of the driver main class. The jar file that contains the class * must be in the classpath or specified in `jar_file_uris`. * </pre> * * <code>string main_class = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The mainClass. */ public java.lang.String getMainClass() { java.lang.Object ref = ""; if (driverCase_ == 2) { ref = driver_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (driverCase_ == 2) { driver_ = s; } return s; } } /** * * * <pre> * Optional. The name of the driver main class. The jar file that contains the class * must be in the classpath or specified in `jar_file_uris`. * </pre> * * <code>string main_class = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for mainClass. */ public com.google.protobuf.ByteString getMainClassBytes() { java.lang.Object ref = ""; if (driverCase_ == 2) { ref = driver_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (driverCase_ == 2) { driver_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ARGS_FIELD_NUMBER = 3; private com.google.protobuf.LazyStringList args_; /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return A list containing the args. */ public com.google.protobuf.ProtocolStringList getArgsList() { return args_; } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The count of args. */ public int getArgsCount() { return args_.size(); } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the element to return. * @return The args at the given index. */ public java.lang.String getArgs(int index) { return args_.get(index); } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the value to return. * @return The bytes of the args at the given index. */ public com.google.protobuf.ByteString getArgsBytes(int index) { return args_.getByteString(index); } public static final int JAR_FILE_URIS_FIELD_NUMBER = 4; private com.google.protobuf.LazyStringList jarFileUris_; /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return A list containing the jarFileUris. */ public com.google.protobuf.ProtocolStringList getJarFileUrisList() { return jarFileUris_; } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The count of jarFileUris. */ public int getJarFileUrisCount() { return jarFileUris_.size(); } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the element to return. * @return The jarFileUris at the given index. */ public java.lang.String getJarFileUris(int index) { return jarFileUris_.get(index); } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the value to return. * @return The bytes of the jarFileUris at the given index. */ public com.google.protobuf.ByteString getJarFileUrisBytes(int index) { return jarFileUris_.getByteString(index); } public static final int FILE_URIS_FIELD_NUMBER = 5; private com.google.protobuf.LazyStringList fileUris_; /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return A list containing the fileUris. */ public com.google.protobuf.ProtocolStringList getFileUrisList() { return fileUris_; } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The count of fileUris. */ public int getFileUrisCount() { return fileUris_.size(); } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the element to return. * @return The fileUris at the given index. */ public java.lang.String getFileUris(int index) { return fileUris_.get(index); } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the value to return. * @return The bytes of the fileUris at the given index. */ public com.google.protobuf.ByteString getFileUrisBytes(int index) { return fileUris_.getByteString(index); } public static final int ARCHIVE_URIS_FIELD_NUMBER = 6; private com.google.protobuf.LazyStringList archiveUris_; /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return A list containing the archiveUris. */ public com.google.protobuf.ProtocolStringList getArchiveUrisList() { return archiveUris_; } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The count of archiveUris. */ public int getArchiveUrisCount() { return archiveUris_.size(); } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the element to return. * @return The archiveUris at the given index. */ public java.lang.String getArchiveUris(int index) { return archiveUris_.get(index); } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the value to return. * @return The bytes of the archiveUris at the given index. */ public com.google.protobuf.ByteString getArchiveUrisBytes(int index) { return archiveUris_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (driverCase_ == 1) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, driver_); } if (driverCase_ == 2) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, driver_); } for (int i = 0; i < args_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, args_.getRaw(i)); } for (int i = 0; i < jarFileUris_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, jarFileUris_.getRaw(i)); } for (int i = 0; i < fileUris_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, fileUris_.getRaw(i)); } for (int i = 0; i < archiveUris_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, archiveUris_.getRaw(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (driverCase_ == 1) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, driver_); } if (driverCase_ == 2) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, driver_); } { int dataSize = 0; for (int i = 0; i < args_.size(); i++) { dataSize += computeStringSizeNoTag(args_.getRaw(i)); } size += dataSize; size += 1 * getArgsList().size(); } { int dataSize = 0; for (int i = 0; i < jarFileUris_.size(); i++) { dataSize += computeStringSizeNoTag(jarFileUris_.getRaw(i)); } size += dataSize; size += 1 * getJarFileUrisList().size(); } { int dataSize = 0; for (int i = 0; i < fileUris_.size(); i++) { dataSize += computeStringSizeNoTag(fileUris_.getRaw(i)); } size += dataSize; size += 1 * getFileUrisList().size(); } { int dataSize = 0; for (int i = 0; i < archiveUris_.size(); i++) { dataSize += computeStringSizeNoTag(archiveUris_.getRaw(i)); } size += dataSize; size += 1 * getArchiveUrisList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataproc.v1.SparkBatch)) { return super.equals(obj); } com.google.cloud.dataproc.v1.SparkBatch other = (com.google.cloud.dataproc.v1.SparkBatch) obj; if (!getArgsList().equals(other.getArgsList())) return false; if (!getJarFileUrisList().equals(other.getJarFileUrisList())) return false; if (!getFileUrisList().equals(other.getFileUrisList())) return false; if (!getArchiveUrisList().equals(other.getArchiveUrisList())) return false; if (!getDriverCase().equals(other.getDriverCase())) return false; switch (driverCase_) { case 1: if (!getMainJarFileUri().equals(other.getMainJarFileUri())) return false; break; case 2: if (!getMainClass().equals(other.getMainClass())) return false; break; case 0: default: } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getArgsCount() > 0) { hash = (37 * hash) + ARGS_FIELD_NUMBER; hash = (53 * hash) + getArgsList().hashCode(); } if (getJarFileUrisCount() > 0) { hash = (37 * hash) + JAR_FILE_URIS_FIELD_NUMBER; hash = (53 * hash) + getJarFileUrisList().hashCode(); } if (getFileUrisCount() > 0) { hash = (37 * hash) + FILE_URIS_FIELD_NUMBER; hash = (53 * hash) + getFileUrisList().hashCode(); } if (getArchiveUrisCount() > 0) { hash = (37 * hash) + ARCHIVE_URIS_FIELD_NUMBER; hash = (53 * hash) + getArchiveUrisList().hashCode(); } switch (driverCase_) { case 1: hash = (37 * hash) + MAIN_JAR_FILE_URI_FIELD_NUMBER; hash = (53 * hash) + getMainJarFileUri().hashCode(); break; case 2: hash = (37 * hash) + MAIN_CLASS_FIELD_NUMBER; hash = (53 * hash) + getMainClass().hashCode(); break; case 0: default: } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataproc.v1.SparkBatch parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.SparkBatch parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.SparkBatch parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.SparkBatch parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.SparkBatch parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.SparkBatch parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.SparkBatch parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.SparkBatch parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1.SparkBatch parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.SparkBatch parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1.SparkBatch parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.SparkBatch parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataproc.v1.SparkBatch prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A configuration for running an [Apache Spark](http://spark.apache.org/) * batch workload. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1.SparkBatch} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.SparkBatch) com.google.cloud.dataproc.v1.SparkBatchOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1.BatchesProto .internal_static_google_cloud_dataproc_v1_SparkBatch_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1.BatchesProto .internal_static_google_cloud_dataproc_v1_SparkBatch_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1.SparkBatch.class, com.google.cloud.dataproc.v1.SparkBatch.Builder.class); } // Construct using com.google.cloud.dataproc.v1.SparkBatch.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); args_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); jarFileUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); fileUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); archiveUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); driverCase_ = 0; driver_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataproc.v1.BatchesProto .internal_static_google_cloud_dataproc_v1_SparkBatch_descriptor; } @java.lang.Override public com.google.cloud.dataproc.v1.SparkBatch getDefaultInstanceForType() { return com.google.cloud.dataproc.v1.SparkBatch.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataproc.v1.SparkBatch build() { com.google.cloud.dataproc.v1.SparkBatch result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataproc.v1.SparkBatch buildPartial() { com.google.cloud.dataproc.v1.SparkBatch result = new com.google.cloud.dataproc.v1.SparkBatch(this); int from_bitField0_ = bitField0_; if (driverCase_ == 1) { result.driver_ = driver_; } if (driverCase_ == 2) { result.driver_ = driver_; } if (((bitField0_ & 0x00000001) != 0)) { args_ = args_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.args_ = args_; if (((bitField0_ & 0x00000002) != 0)) { jarFileUris_ = jarFileUris_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000002); } result.jarFileUris_ = jarFileUris_; if (((bitField0_ & 0x00000004) != 0)) { fileUris_ = fileUris_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000004); } result.fileUris_ = fileUris_; if (((bitField0_ & 0x00000008) != 0)) { archiveUris_ = archiveUris_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000008); } result.archiveUris_ = archiveUris_; result.driverCase_ = driverCase_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataproc.v1.SparkBatch) { return mergeFrom((com.google.cloud.dataproc.v1.SparkBatch) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataproc.v1.SparkBatch other) { if (other == com.google.cloud.dataproc.v1.SparkBatch.getDefaultInstance()) return this; if (!other.args_.isEmpty()) { if (args_.isEmpty()) { args_ = other.args_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureArgsIsMutable(); args_.addAll(other.args_); } onChanged(); } if (!other.jarFileUris_.isEmpty()) { if (jarFileUris_.isEmpty()) { jarFileUris_ = other.jarFileUris_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureJarFileUrisIsMutable(); jarFileUris_.addAll(other.jarFileUris_); } onChanged(); } if (!other.fileUris_.isEmpty()) { if (fileUris_.isEmpty()) { fileUris_ = other.fileUris_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureFileUrisIsMutable(); fileUris_.addAll(other.fileUris_); } onChanged(); } if (!other.archiveUris_.isEmpty()) { if (archiveUris_.isEmpty()) { archiveUris_ = other.archiveUris_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureArchiveUrisIsMutable(); archiveUris_.addAll(other.archiveUris_); } onChanged(); } switch (other.getDriverCase()) { case MAIN_JAR_FILE_URI: { driverCase_ = 1; driver_ = other.driver_; onChanged(); break; } case MAIN_CLASS: { driverCase_ = 2; driver_ = other.driver_; onChanged(); break; } case DRIVER_NOT_SET: { break; } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dataproc.v1.SparkBatch parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dataproc.v1.SparkBatch) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int driverCase_ = 0; private java.lang.Object driver_; public DriverCase getDriverCase() { return DriverCase.forNumber(driverCase_); } public Builder clearDriver() { driverCase_ = 0; driver_ = null; onChanged(); return this; } private int bitField0_; /** * * * <pre> * Optional. The HCFS URI of the jar file that contains the main class. * </pre> * * <code>string main_jar_file_uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the mainJarFileUri field is set. */ @java.lang.Override public boolean hasMainJarFileUri() { return driverCase_ == 1; } /** * * * <pre> * Optional. The HCFS URI of the jar file that contains the main class. * </pre> * * <code>string main_jar_file_uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The mainJarFileUri. */ @java.lang.Override public java.lang.String getMainJarFileUri() { java.lang.Object ref = ""; if (driverCase_ == 1) { ref = driver_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (driverCase_ == 1) { driver_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The HCFS URI of the jar file that contains the main class. * </pre> * * <code>string main_jar_file_uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for mainJarFileUri. */ @java.lang.Override public com.google.protobuf.ByteString getMainJarFileUriBytes() { java.lang.Object ref = ""; if (driverCase_ == 1) { ref = driver_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (driverCase_ == 1) { driver_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The HCFS URI of the jar file that contains the main class. * </pre> * * <code>string main_jar_file_uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The mainJarFileUri to set. * @return This builder for chaining. */ public Builder setMainJarFileUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } driverCase_ = 1; driver_ = value; onChanged(); return this; } /** * * * <pre> * Optional. The HCFS URI of the jar file that contains the main class. * </pre> * * <code>string main_jar_file_uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearMainJarFileUri() { if (driverCase_ == 1) { driverCase_ = 0; driver_ = null; onChanged(); } return this; } /** * * * <pre> * Optional. The HCFS URI of the jar file that contains the main class. * </pre> * * <code>string main_jar_file_uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for mainJarFileUri to set. * @return This builder for chaining. */ public Builder setMainJarFileUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); driverCase_ = 1; driver_ = value; onChanged(); return this; } /** * * * <pre> * Optional. The name of the driver main class. The jar file that contains the class * must be in the classpath or specified in `jar_file_uris`. * </pre> * * <code>string main_class = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the mainClass field is set. */ @java.lang.Override public boolean hasMainClass() { return driverCase_ == 2; } /** * * * <pre> * Optional. The name of the driver main class. The jar file that contains the class * must be in the classpath or specified in `jar_file_uris`. * </pre> * * <code>string main_class = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The mainClass. */ @java.lang.Override public java.lang.String getMainClass() { java.lang.Object ref = ""; if (driverCase_ == 2) { ref = driver_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (driverCase_ == 2) { driver_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The name of the driver main class. The jar file that contains the class * must be in the classpath or specified in `jar_file_uris`. * </pre> * * <code>string main_class = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for mainClass. */ @java.lang.Override public com.google.protobuf.ByteString getMainClassBytes() { java.lang.Object ref = ""; if (driverCase_ == 2) { ref = driver_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (driverCase_ == 2) { driver_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The name of the driver main class. The jar file that contains the class * must be in the classpath or specified in `jar_file_uris`. * </pre> * * <code>string main_class = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The mainClass to set. * @return This builder for chaining. */ public Builder setMainClass(java.lang.String value) { if (value == null) { throw new NullPointerException(); } driverCase_ = 2; driver_ = value; onChanged(); return this; } /** * * * <pre> * Optional. The name of the driver main class. The jar file that contains the class * must be in the classpath or specified in `jar_file_uris`. * </pre> * * <code>string main_class = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearMainClass() { if (driverCase_ == 2) { driverCase_ = 0; driver_ = null; onChanged(); } return this; } /** * * * <pre> * Optional. The name of the driver main class. The jar file that contains the class * must be in the classpath or specified in `jar_file_uris`. * </pre> * * <code>string main_class = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for mainClass to set. * @return This builder for chaining. */ public Builder setMainClassBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); driverCase_ = 2; driver_ = value; onChanged(); return this; } private com.google.protobuf.LazyStringList args_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureArgsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { args_ = new com.google.protobuf.LazyStringArrayList(args_); bitField0_ |= 0x00000001; } } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return A list containing the args. */ public com.google.protobuf.ProtocolStringList getArgsList() { return args_.getUnmodifiableView(); } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The count of args. */ public int getArgsCount() { return args_.size(); } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the element to return. * @return The args at the given index. */ public java.lang.String getArgs(int index) { return args_.get(index); } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the value to return. * @return The bytes of the args at the given index. */ public com.google.protobuf.ByteString getArgsBytes(int index) { return args_.getByteString(index); } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index to set the value at. * @param value The args to set. * @return This builder for chaining. */ public Builder setArgs(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureArgsIsMutable(); args_.set(index, value); onChanged(); return this; } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The args to add. * @return This builder for chaining. */ public Builder addArgs(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureArgsIsMutable(); args_.add(value); onChanged(); return this; } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param values The args to add. * @return This builder for chaining. */ public Builder addAllArgs(java.lang.Iterable<java.lang.String> values) { ensureArgsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, args_); onChanged(); return this; } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearArgs() { args_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Optional. The arguments to pass to the driver. Do not include arguments * that can be set as batch properties, such as `--conf`, since a collision * can occur that causes an incorrect batch submission. * </pre> * * <code>repeated string args = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes of the args to add. * @return This builder for chaining. */ public Builder addArgsBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureArgsIsMutable(); args_.add(value); onChanged(); return this; } private com.google.protobuf.LazyStringList jarFileUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureJarFileUrisIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { jarFileUris_ = new com.google.protobuf.LazyStringArrayList(jarFileUris_); bitField0_ |= 0x00000002; } } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return A list containing the jarFileUris. */ public com.google.protobuf.ProtocolStringList getJarFileUrisList() { return jarFileUris_.getUnmodifiableView(); } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The count of jarFileUris. */ public int getJarFileUrisCount() { return jarFileUris_.size(); } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the element to return. * @return The jarFileUris at the given index. */ public java.lang.String getJarFileUris(int index) { return jarFileUris_.get(index); } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the value to return. * @return The bytes of the jarFileUris at the given index. */ public com.google.protobuf.ByteString getJarFileUrisBytes(int index) { return jarFileUris_.getByteString(index); } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index to set the value at. * @param value The jarFileUris to set. * @return This builder for chaining. */ public Builder setJarFileUris(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureJarFileUrisIsMutable(); jarFileUris_.set(index, value); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The jarFileUris to add. * @return This builder for chaining. */ public Builder addJarFileUris(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureJarFileUrisIsMutable(); jarFileUris_.add(value); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param values The jarFileUris to add. * @return This builder for chaining. */ public Builder addAllJarFileUris(java.lang.Iterable<java.lang.String> values) { ensureJarFileUrisIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, jarFileUris_); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearJarFileUris() { jarFileUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of jar files to add to the classpath of the * Spark driver and tasks. * </pre> * * <code>repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes of the jarFileUris to add. * @return This builder for chaining. */ public Builder addJarFileUrisBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureJarFileUrisIsMutable(); jarFileUris_.add(value); onChanged(); return this; } private com.google.protobuf.LazyStringList fileUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureFileUrisIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { fileUris_ = new com.google.protobuf.LazyStringArrayList(fileUris_); bitField0_ |= 0x00000004; } } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return A list containing the fileUris. */ public com.google.protobuf.ProtocolStringList getFileUrisList() { return fileUris_.getUnmodifiableView(); } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The count of fileUris. */ public int getFileUrisCount() { return fileUris_.size(); } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the element to return. * @return The fileUris at the given index. */ public java.lang.String getFileUris(int index) { return fileUris_.get(index); } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the value to return. * @return The bytes of the fileUris at the given index. */ public com.google.protobuf.ByteString getFileUrisBytes(int index) { return fileUris_.getByteString(index); } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index to set the value at. * @param value The fileUris to set. * @return This builder for chaining. */ public Builder setFileUris(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureFileUrisIsMutable(); fileUris_.set(index, value); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The fileUris to add. * @return This builder for chaining. */ public Builder addFileUris(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureFileUrisIsMutable(); fileUris_.add(value); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param values The fileUris to add. * @return This builder for chaining. */ public Builder addAllFileUris(java.lang.Iterable<java.lang.String> values) { ensureFileUrisIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, fileUris_); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFileUris() { fileUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of files to be placed in the working directory of * each executor. * </pre> * * <code>repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes of the fileUris to add. * @return This builder for chaining. */ public Builder addFileUrisBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureFileUrisIsMutable(); fileUris_.add(value); onChanged(); return this; } private com.google.protobuf.LazyStringList archiveUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureArchiveUrisIsMutable() { if (!((bitField0_ & 0x00000008) != 0)) { archiveUris_ = new com.google.protobuf.LazyStringArrayList(archiveUris_); bitField0_ |= 0x00000008; } } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return A list containing the archiveUris. */ public com.google.protobuf.ProtocolStringList getArchiveUrisList() { return archiveUris_.getUnmodifiableView(); } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The count of archiveUris. */ public int getArchiveUrisCount() { return archiveUris_.size(); } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the element to return. * @return The archiveUris at the given index. */ public java.lang.String getArchiveUris(int index) { return archiveUris_.get(index); } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the value to return. * @return The bytes of the archiveUris at the given index. */ public com.google.protobuf.ByteString getArchiveUrisBytes(int index) { return archiveUris_.getByteString(index); } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index to set the value at. * @param value The archiveUris to set. * @return This builder for chaining. */ public Builder setArchiveUris(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureArchiveUrisIsMutable(); archiveUris_.set(index, value); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The archiveUris to add. * @return This builder for chaining. */ public Builder addArchiveUris(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureArchiveUrisIsMutable(); archiveUris_.add(value); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param values The archiveUris to add. * @return This builder for chaining. */ public Builder addAllArchiveUris(java.lang.Iterable<java.lang.String> values) { ensureArchiveUrisIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, archiveUris_); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearArchiveUris() { archiveUris_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. HCFS URIs of archives to be extracted into the working directory * of each executor. Supported file types: * `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. * </pre> * * <code>repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes of the archiveUris to add. * @return This builder for chaining. */ public Builder addArchiveUrisBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureArchiveUrisIsMutable(); archiveUris_.add(value); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.SparkBatch) } // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SparkBatch) private static final com.google.cloud.dataproc.v1.SparkBatch DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.SparkBatch(); } public static com.google.cloud.dataproc.v1.SparkBatch getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SparkBatch> PARSER = new com.google.protobuf.AbstractParser<SparkBatch>() { @java.lang.Override public SparkBatch parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SparkBatch(input, extensionRegistry); } }; public static com.google.protobuf.Parser<SparkBatch> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SparkBatch> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataproc.v1.SparkBatch getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache-2.0
SVADemoAPP/webServer
src/test/java/com/sva/test/dao/LocationDelayDaoTest.java
2381
/** * @Title: LocationDelayDaoTest.java * @Package com.sva.test.dao * @Description: LocationDelayDao测试类 * @author: LabelCS * @date: 2016年9月3日 下午9:20:39 * @version V1.0 */ package com.sva.test.dao; import java.math.BigDecimal; import java.util.List; import javax.annotation.Resource; import org.junit.Test; import com.sva.dao.LocationDelayDao; import com.sva.model.LocationDelayModel; import com.sva.model.MapsModel; import com.sva.model.StoreModel; import org.junit.Assert; /** * @ClassName: LocationDelayDaoTest * @Description: LocationDelayDao测试类 * @author: LabelCS * @date: 2016年9月3日 下午9:20:39 * */ public class LocationDelayDaoTest extends BasicDaoTest { @Resource LocationDelayDao locationDelayDao; @Test public void getAllDataTest(){ List<LocationDelayModel> result = locationDelayDao.getAllData(); Assert.assertEquals("结果为0",0, result.size()); } @Test public void getAllDatasTest(){ long startTime = 1L; long endTime = 1476925335001L; List<LocationDelayModel> result = locationDelayDao.getAllDatas(startTime, endTime); Assert.assertEquals("结果为0",0, result.size()); } @Test public void getDataByPlaceIdTimeTest(){ String placeId = "1"; long startTime = 1L; long endTime = 2L; List<LocationDelayModel> result = locationDelayDao.getDataByPlaceIdTime(startTime, endTime, placeId); Assert.assertEquals("结果为0",0, result.size()); } @Test public void getAllDataByStoreidTest(){ int storeid = 1; List<LocationDelayModel> result = locationDelayDao.getAllDataByStoreid(storeid); Assert.assertEquals("结果为0",0, result.size()); } @Test public void savaMessagePushTest(){ LocationDelayModel ldm = new LocationDelayModel(); ldm.setDataDelay(2D); ldm.setPositionDelay(2D); ldm.setUpdateTime(222L); MapsModel maps = new MapsModel(); maps.setFloorNo(new BigDecimal(1)); ldm.setMaps(maps); StoreModel store = new StoreModel(); store.setId(1); ldm.setStore(store); int result = locationDelayDao.savaMessagePush(ldm); Assert.assertEquals("结果为1",1, result); } }
apache-2.0
LeeKyoungIl/illuminati
illuminati/illuminati-common/src/main/java/me/phoboslabs/illuminati/common/properties/IlluminatiBaseProperties.java
1718
/* * Copyright 2017 Phoboslabs.me * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package me.phoboslabs.illuminati.common.properties; import me.phoboslabs.illuminati.common.constant.IlluminatiConstant; import me.phoboslabs.illuminati.common.util.StringObjectUtils; import java.io.Serializable; import java.lang.reflect.Field; import java.util.Properties; public abstract class IlluminatiBaseProperties implements IlluminatiProperties, Serializable { protected IlluminatiBaseProperties () {} protected IlluminatiBaseProperties (final Properties prop) { this.setProperties(prop); } @Override public void setProperties(final Properties prop) { if (prop == null) { return; } for (String keys : IlluminatiConstant.PROPERTIES_KEYS) { final String value = prop.getProperty(keys); if (prop.containsKey(keys) && StringObjectUtils.isValid(value)) { try { final Field field = this.getClass().getDeclaredField(keys); field.setAccessible(true); field.set(this, value); } catch (Exception ignored) { } } } } }
apache-2.0
shiver-me-timbers/smt-http-mock-parent
smt-http-mock-tomcat-parent/smt-http-mock-tomcat-common/src/main/java/shiver/me/timbers/http/servlet/tomcat/FileCleaner.java
1209
/* * Copyright 2016 Karl Bennett * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package shiver.me.timbers.http.servlet.tomcat; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; /** * @author Karl Bennett */ class FileCleaner { private final Logger log = LoggerFactory.getLogger(getClass()); void cleanUp(String tempDir) throws IOException { final File file = new File(tempDir); if (file.isFile()) { file.delete(); } FileUtils.deleteDirectory(file); log.info("Cleaned up the temporary tomcat directory ({}).", tempDir); } }
apache-2.0
rpmoore/ds3_autogen
ds3-autogen-java/src/test/java/com/spectralogic/ds3autogen/java/generators/responsemodels/CodesResponseGenerator_Test.java
9033
/* * ****************************************************************************** * Copyright 2014-2015 Spectra Logic Corporation. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"). You may not use * this file except in compliance with the License. A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * **************************************************************************** */ package com.spectralogic.ds3autogen.java.generators.responsemodels; import com.google.common.collect.ImmutableList; import com.spectralogic.ds3autogen.api.models.*; import org.junit.Test; import static com.spectralogic.ds3autogen.java.generators.responsemodels.CodesResponseGenerator.getResponseCodes; import static com.spectralogic.ds3autogen.java.generators.responsemodels.CodesResponseGenerator.hasExpectedResponseCodes; import static com.spectralogic.ds3autogen.testutil.Ds3ModelFixtures.getRequestMultiFileDelete; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; public class CodesResponseGenerator_Test { private final static CodesResponseGenerator generator = new CodesResponseGenerator(); private static ImmutableList<Ds3ResponseCode> getTestResponseCodes() { return ImmutableList.of( new Ds3ResponseCode(200, null), new Ds3ResponseCode(206, null), new Ds3ResponseCode(307, null), new Ds3ResponseCode(400, null), new Ds3ResponseCode(404, null)); } @Test public void getResponseCodes_NullList_Test() { final ImmutableList<Integer> result = getResponseCodes(null); assertThat(result.size(), is(0)); } @Test public void getResponseCodes_EmptyList_Test() { final ImmutableList<Integer> result = getResponseCodes(ImmutableList.of()); assertThat(result.size(), is(0)); } @Test public void getResponseCodes_FullList_Test() { final ImmutableList<Ds3ResponseCode> responseCodes = getTestResponseCodes(); final ImmutableList<Integer> result = getResponseCodes(responseCodes); assertThat(result.size(), is(5)); assertTrue(result.contains(200)); assertTrue(result.contains(206)); assertTrue(result.contains(307)); assertTrue(result.contains(400)); assertTrue(result.contains(404)); } @Test public void hasExpectedResponseCodes_NullList_Test() { assertFalse(hasExpectedResponseCodes( CodesResponseGenerator.ALLOCATE_JOB_CHUNK_EXPECTED_RESPONSE_CODES, null)); } @Test public void hasExpectedResponseCodes_EmptyList_Test() { assertFalse(hasExpectedResponseCodes( CodesResponseGenerator.ALLOCATE_JOB_CHUNK_EXPECTED_RESPONSE_CODES, ImmutableList.of())); } @Test public void hasExpectedResponseCodes_FullList_Test() { assertTrue(hasExpectedResponseCodes( CodesResponseGenerator.ALLOCATE_JOB_CHUNK_EXPECTED_RESPONSE_CODES, getTestResponseCodes())); } @Test(expected = IllegalArgumentException.class) public void toResponseCodes_Exception1_Test() { final Ds3Request multiFileDelete = getRequestMultiFileDelete(); generator.toResponseCodes(multiFileDelete); } @Test(expected = IllegalArgumentException.class) public void toResponseCodes_Exception2_Test() { final Ds3Request malformedAllocateJobChunk = new Ds3Request( "com.spectralogic.s3.server.handler.reqhandler.spectrads3.job.AllocateJobChunkRequestHandler", HttpVerb.PUT, Classification.spectrads3, null, null, Action.MODIFY, Resource.JOB_CHUNK, ResourceType.NON_SINGLETON, Operation.ALLOCATE, true, ImmutableList.of( new Ds3ResponseCode(200, null), new Ds3ResponseCode(400, null), new Ds3ResponseCode(504, null)), null, null); generator.toResponseCodes(malformedAllocateJobChunk); } @Test(expected = IllegalArgumentException.class) public void toResponseCodes_Exception3_Test() { final Ds3Request malformedHeadObject = new Ds3Request( "com.spectralogic.s3.server.handler.reqhandler.amazons3.HeadObjectRequestHandler", HttpVerb.HEAD, Classification.amazons3, Requirement.REQUIRED, Requirement.REQUIRED, null, null, null, null, false, ImmutableList.of( new Ds3ResponseCode(200, null), new Ds3ResponseCode(504, null)), null, null); generator.toResponseCodes(malformedHeadObject); } @Test(expected = IllegalArgumentException.class) public void toResponseCodes_Exception4_Test() { final Ds3Request malformedHeadBucket = new Ds3Request( "com.spectralogic.s3.server.handler.reqhandler.amazons3.HeadBucketRequestHandler", HttpVerb.HEAD, Classification.amazons3, Requirement.REQUIRED, Requirement.NOT_ALLOWED, null, null, null, null, false, ImmutableList.of( new Ds3ResponseCode(200, null), new Ds3ResponseCode(403, null), new Ds3ResponseCode(504, null)), null, null); generator.toResponseCodes(malformedHeadBucket); } @Test public void toResponseCodes_AllocateJobChunk_Test() { final Ds3Request allocateJobChunk = new Ds3Request( "com.spectralogic.s3.server.handler.reqhandler.spectrads3.job.AllocateJobChunkRequestHandler", HttpVerb.PUT, Classification.spectrads3, null, null, Action.MODIFY, Resource.JOB_CHUNK, ResourceType.NON_SINGLETON, Operation.ALLOCATE, true, ImmutableList.of( new Ds3ResponseCode(200, null), new Ds3ResponseCode(400, null), new Ds3ResponseCode(404, null)), null, null); final ImmutableList<Ds3ResponseCode> result = generator.toResponseCodes(allocateJobChunk); assertThat(result.size(), is(1)); assertThat(result.get(0).getCode(), is(200)); } @Test public void toResponseCodes_HeadObject_Test() { final Ds3Request headObject = new Ds3Request( "com.spectralogic.s3.server.handler.reqhandler.amazons3.HeadObjectRequestHandler", HttpVerb.HEAD, Classification.amazons3, Requirement.REQUIRED, Requirement.REQUIRED, null, null, null, null, false, ImmutableList.of( new Ds3ResponseCode(200, null), new Ds3ResponseCode(404, null)), null, null); final ImmutableList<Ds3ResponseCode> result = generator.toResponseCodes(headObject); assertThat(result.size(), is(1)); assertThat(result.get(0).getCode(), is(200)); } @Test public void toResponseCodes_HeadBucket_Test() { final Ds3Request headBucket = new Ds3Request( "com.spectralogic.s3.server.handler.reqhandler.amazons3.HeadBucketRequestHandler", HttpVerb.HEAD, Classification.amazons3, Requirement.REQUIRED, Requirement.NOT_ALLOWED, null, null, null, null, false, ImmutableList.of( new Ds3ResponseCode(200, null), new Ds3ResponseCode(403, null), new Ds3ResponseCode(404, null)), null, null); final ImmutableList<Ds3ResponseCode> result = generator.toResponseCodes(headBucket); assertThat(result.size(), is(1)); assertThat(result.get(0).getCode(), is(200)); } }
apache-2.0
googleapis/google-api-java-client-services
clients/google-api-services-dialogflow/v2/1.31.0/com/google/api/services/dialogflow/v2/model/GoogleCloudDialogflowCxV3RunContinuousTestMetadata.java
2383
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.dialogflow.v2.model; /** * Metadata returned for the Environments.RunContinuousTest long running operation. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Dialogflow API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleCloudDialogflowCxV3RunContinuousTestMetadata extends com.google.api.client.json.GenericJson { /** * The test errors. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<GoogleCloudDialogflowCxV3TestError> errors; /** * The test errors. * @return value or {@code null} for none */ public java.util.List<GoogleCloudDialogflowCxV3TestError> getErrors() { return errors; } /** * The test errors. * @param errors errors or {@code null} for none */ public GoogleCloudDialogflowCxV3RunContinuousTestMetadata setErrors(java.util.List<GoogleCloudDialogflowCxV3TestError> errors) { this.errors = errors; return this; } @Override public GoogleCloudDialogflowCxV3RunContinuousTestMetadata set(String fieldName, Object value) { return (GoogleCloudDialogflowCxV3RunContinuousTestMetadata) super.set(fieldName, value); } @Override public GoogleCloudDialogflowCxV3RunContinuousTestMetadata clone() { return (GoogleCloudDialogflowCxV3RunContinuousTestMetadata) super.clone(); } }
apache-2.0
Cao-Bin/oo
business/users/users-service/src/main/java/com/cb/users/service/disruptor/DisruptorEvent.java
376
package com.cb.users.service.disruptor; import com.cb.users.entity.kafka.UserKafka; public class DisruptorEvent { private UserKafka userKafka; public UserKafka getUserKafka() { return userKafka; } public void setUserKafka(UserKafka userKafka) { this.userKafka = userKafka; } public void clear() { userKafka = null; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-sagemaker/src/main/java/com/amazonaws/services/sagemaker/model/transform/RStudioServerProDomainSettingsJsonUnmarshaller.java
3784
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.sagemaker.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.sagemaker.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * RStudioServerProDomainSettings JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class RStudioServerProDomainSettingsJsonUnmarshaller implements Unmarshaller<RStudioServerProDomainSettings, JsonUnmarshallerContext> { public RStudioServerProDomainSettings unmarshall(JsonUnmarshallerContext context) throws Exception { RStudioServerProDomainSettings rStudioServerProDomainSettings = new RStudioServerProDomainSettings(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("DomainExecutionRoleArn", targetDepth)) { context.nextToken(); rStudioServerProDomainSettings.setDomainExecutionRoleArn(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("RStudioConnectUrl", targetDepth)) { context.nextToken(); rStudioServerProDomainSettings.setRStudioConnectUrl(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("RStudioPackageManagerUrl", targetDepth)) { context.nextToken(); rStudioServerProDomainSettings.setRStudioPackageManagerUrl(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("DefaultResourceSpec", targetDepth)) { context.nextToken(); rStudioServerProDomainSettings.setDefaultResourceSpec(ResourceSpecJsonUnmarshaller.getInstance().unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return rStudioServerProDomainSettings; } private static RStudioServerProDomainSettingsJsonUnmarshaller instance; public static RStudioServerProDomainSettingsJsonUnmarshaller getInstance() { if (instance == null) instance = new RStudioServerProDomainSettingsJsonUnmarshaller(); return instance; } }
apache-2.0
bpervan/master-thesis-app
src/main/java/hr/bpervan/mt/model/Item.java
3942
package hr.bpervan.mt.model; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.*; /** * Created by Branimir on 2.6.2015.. */ public class Item { private int itemId; private String itemName; private int location; public Map<String, Double> characteristicsMap; public static Set<String> cNames = new HashSet<>(); public Item(int itemId, String itemName, int location, Map<String, Double> inMap) { this.itemId = itemId; this.itemName = itemName; this.location = location; characteristicsMap = inMap; this.normalizeMap(); } private void normalizeMap(){ long nonZeroCount = characteristicsMap .entrySet() .stream() .filter(p -> Double.compare(p.getValue(), 0) != 0) .count(); double norm = 1 / Math.sqrt(nonZeroCount); characteristicsMap .entrySet() .stream() .filter(p -> Double.compare(p.getValue(), 0) != 0) .forEach(e -> e.setValue(norm)); } public int getItemId() { return itemId; } public void setItemId(int itemId) { this.itemId = itemId; } public String getItemName() { return itemName; } public void setItemName(String itemName) { this.itemName = itemName; } public int getLocation() { return location; } public void setLocation(int location) { this.location = location; } public static List<Item> fromCsv(String relativePath){ List<Item> helperList = new ArrayList<>(); ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); InputStream inputStream = classLoader.getResourceAsStream(relativePath); try(BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream))){ String line = bufferedReader.readLine(); String[] partsPreliminary = line.split("#"); String[] characteristicsNames = partsPreliminary[1].split(";"); Item.cNames.addAll(Arrays.asList(characteristicsNames)); while((line = bufferedReader.readLine()) != null){ String[] parts = line.split("#"); String itemData = parts[0]; String characteristicsData = parts[1]; String[] item = itemData.split(";"); String[] characteristic = characteristicsData.split(";"); Map<String, Double> tempMap = new HashMap<>(); for(int i = 0; i < characteristic.length; ++i){ tempMap.put(characteristicsNames[i], Double.parseDouble(characteristic[i])); } helperList.add(new Item( Integer.parseInt(item[0]), item[1], Integer.parseInt(item[2]), tempMap )); } bufferedReader.close(); } catch (Exception e){ e.printStackTrace(); } return helperList; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof Item)) return false; Item item = (Item) o; if (getItemId() != item.getItemId()) return false; return !(getItemName() != null ? !getItemName().equals(item.getItemName()) : item.getItemName() != null); } @Override public int hashCode() { int result = getItemId(); result = 31 * result + (getItemName() != null ? getItemName().hashCode() : 0); return result; } @Override public String toString() { return "Item{" + "itemId=" + itemId + ", itemName='" + itemName + '\'' + '}'; } }
apache-2.0
fransfilastap/pm5
src/main/java/id/franspratama/geol/core/services/IGisService.java
2846
package id.franspratama.geol.core.services; import java.util.List; import java.util.Set; import id.franspratama.geol.core.pojo.ActiveAlarmExport; import id.franspratama.geol.core.pojo.LebaranRoute; import id.franspratama.geol.core.pojo.Site; import id.franspratama.geol.core.pojo.VipGroup; import id.franspratama.geol.web.api.ActiveAlarmDTO; import id.franspratama.geol.web.api.GisDTO; import id.franspratama.geol.web.api.LocationDTO; import id.franspratama.geol.web.api.SiteAvailabilityDTO; /** * * GIS * * * * * * @author fransfilastap * */ public interface IGisService { public double DEFAULT_RADIUS = 0.5; // in kilometer /** * * * * * @param lat * @param lng * @param radius * @return */ public Set<GisDTO> getSiteStatus(double lat,double lng,double radius); /** * * * * * * * @param dto * @return */ public Set<GisDTO> getSiteStatusNearPath(List<LocationDTO> dto); /** * * * * * * * * @param site * @param radius * @return */ public Set<GisDTO> getSiteStatus(Site site,double radius); /** * * * * * * * * * @param site * @param radius * @return */ public Set<GisDTO> getSiteStatus(String site,double radius); /** * * * * * * * * * * @param group * @return */ public Set<GisDTO> getSiteStatus(VipGroup group); /** * * * * * * * * * * @param route * @return */ public Set<GisDTO> getSiteStatus(LebaranRoute route); /** * * * * * * * @param site * @return */ public Set<ActiveAlarmDTO> getSiteAlarms(String site); /** * * * * * * * * * * * @param latitude * @param longitude * @param radius * @return */ public Set<ActiveAlarmExport> getActiveAlarmExport(double latitude,double longitude,double radius); /** * * * * * * * * * * * @param dto * @return */ public Set<ActiveAlarmExport> getActiveAlarmExport(List<LocationDTO> dto); /** * * * * * * * * @param group * @return */ public Set<ActiveAlarmExport> getActiveAlarmExport(VipGroup group); /** * * * * * * * * @param group * @return */ public Set<ActiveAlarmExport> getActiveAlarmExport(LebaranRoute route); /** * * * * * * * * @param site * @return */ public GisDTO getBriefInformationOfSite(String site); /** * * * * * * * * @param siteId * @return */ public Site getFullInformationOfSite(String siteId); /** * * * * * * * * * @param siteid * @param technology * @return */ public Set<SiteAvailabilityDTO> getSiteAvailability(String siteid,String technology); }
apache-2.0
Breinify/brein-time-utilities
src/com/brein/time/timeintervals/intervals/IdInterval.java
4806
package com.brein.time.timeintervals.intervals; import com.brein.time.exceptions.IllegalConfiguration; import com.brein.time.exceptions.IllegalTimeInterval; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.Serializable; public class IdInterval<I extends Comparable<I> & Serializable, T extends Comparable<T> & Serializable> implements IInterval<T>, Externalizable { private I id; private IInterval<T> wrappedInterval; public IdInterval() { // just used for serialization } @SuppressWarnings("unchecked") public IdInterval(final I id, final T start, final T end) { final Class<T> clazz; if (start == null && end == null) { throw new IllegalTimeInterval("Please use the constructor with specified clazz, " + "if start and end are null."); } else if (start == null) { clazz = (Class<T>) end.getClass(); } else { clazz = (Class<T>) start.getClass(); } init(id, createInterval(start, end, clazz)); } public IdInterval(final I id, final T start, final T end, final Class<T> clazz) { init(id, createInterval(start, end, clazz)); } public IdInterval(final I id, final IInterval<T> wrappedInterval) { init(id, wrappedInterval); } @SuppressWarnings("unchecked") protected IInterval<T> createInterval(final T start, final T end, final Class<T> clazz) { if (Number.class.isAssignableFrom(clazz)) { final Class<? extends Number> numberClazz = (Class<? extends Number>) clazz; return new NumberInterval(numberClazz, numberClazz.cast(start), numberClazz.cast(end)); } else { throw new IllegalConfiguration("There is currently no default implementation available for the specified " + "clazz '" + clazz + "', you can override the `createInterval` method, if an interval-type is " + "known."); } } protected void init(final I id, final IInterval<T> wrappedInterval) { this.id = id; this.wrappedInterval = wrappedInterval; } public I getId() { return this.id; } @Override public T getNormStart() { return wrappedInterval.getNormStart(); } @Override public T getNormEnd() { return wrappedInterval.getNormEnd(); } @Override public String getUniqueIdentifier() { return wrappedInterval.getUniqueIdentifier(); } @Override @SuppressWarnings("NullableProblems") public int compareTo(final IInterval i) { final int cmp = this.wrappedInterval.compareTo(i); if (cmp == 0) { // the intervals are equal, so we must use the identifiers if (i instanceof IdInterval) { return compareId(IdInterval.class.cast(i)); } // we don't have any identifiers (the instance is of a different type) else { return getClass().getName().compareTo(i.getClass().getName()); } } else { return cmp; } } public int compareId(final IdInterval iId) { if (this.id == null && iId == null) { return 0; } else if (this.id == null) { return -1; } else if (iId == null) { return 1; } else if (this.id.getClass().isInstance(iId.id)) { //noinspection unchecked return this.id.compareTo((I) iId.id); } else { return this.id.toString().compareTo(iId.id.toString()); } } @SuppressWarnings("unchecked") public <X extends IInterval<T>> X interval() { return (X) wrappedInterval; } @Override public int hashCode() { return this.id.hashCode(); } @Override public boolean equals(final Object obj) { if (obj == this) { return true; } else if (obj == null) { return false; } else if (IInterval.class.isInstance(obj)) { return compareTo(IInterval.class.cast(obj)) == 0; } else { return false; } } @Override public String toString() { return String.format("%s@%s", this.id, this.wrappedInterval); } @Override public void writeExternal(final ObjectOutput out) throws IOException { out.writeObject(this.id); out.writeObject(this.wrappedInterval); } @Override @SuppressWarnings("unchecked") public void readExternal(final ObjectInput in) throws IOException, ClassNotFoundException { this.id = (I) in.readObject(); this.wrappedInterval = (IInterval<T>) in.readObject(); } }
apache-2.0
ueno-t/datacopy
src/jp/scriptkidie/datacopy/sql/model/educationdb/BulkReceptionsManages.java
399
package jp.scriptkidie.datacopy.sql.model.educationdb; public class BulkReceptionsManages extends BulkReceptionsManagesKey { private String bulkReceptionNo; public String getBulkReceptionNo() { return bulkReceptionNo; } public void setBulkReceptionNo(String bulkReceptionNo) { this.bulkReceptionNo = bulkReceptionNo == null ? null : bulkReceptionNo.trim(); } }
apache-2.0
J360-ME/j360-rpc
src/main/java/me/j360/rpc/codec/protostuff/RpcRequest.java
1454
package me.j360.rpc.codec.protostuff; import java.util.Map; /** * RPC Request * @author huangyong * @author min_xu */ public class RpcRequest { private Long requestId; private String className; //完成的类名 private String methodName; private Class<?>[] parameterTypes; private Object[] parameters; //为后期添加额外信息准备,链路、tcc等 private Map<String,String> headers; public Long getRequestId() { return requestId; } public void setRequestId(Long requestId) { this.requestId = requestId; } public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } public String getMethodName() { return methodName; } public void setMethodName(String methodName) { this.methodName = methodName; } public Class<?>[] getParameterTypes() { return parameterTypes; } public void setParameterTypes(Class<?>[] parameterTypes) { this.parameterTypes = parameterTypes; } public Object[] getParameters() { return parameters; } public void setParameters(Object[] parameters) { this.parameters = parameters; } public Map<String, String> getHeaders() { return headers; } public void setHeaders(Map<String, String> headers) { this.headers = headers; } }
apache-2.0
hastebrot/spock
spock-specs/src/test/java/org/spockframework/mock/InterfaceWithNestedClass.java
212
package org.spockframework.mock; public interface InterfaceWithNestedClass { class Service { private Service() {} public static InterfaceWithNestedClass getInstance() { return null; } } }
apache-2.0
futabooo/conference-app-2017
app/src/main/java/io/github/droidkaigi/confsched2017/viewmodel/ContributorsViewModel.java
4956
package io.github.droidkaigi.confsched2017.viewmodel; import android.databinding.BaseObservable; import android.databinding.Bindable; import android.databinding.ObservableArrayList; import android.databinding.ObservableList; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.StringRes; import android.view.View; import com.annimon.stream.Stream; import java.util.List; import javax.inject.Inject; import io.github.droidkaigi.confsched2017.BR; import io.github.droidkaigi.confsched2017.R; import io.github.droidkaigi.confsched2017.di.scope.FragmentScope; import io.github.droidkaigi.confsched2017.repository.contributors.ContributorsRepository; import io.github.droidkaigi.confsched2017.view.helper.ResourceResolver; import io.github.droidkaigi.confsched2017.view.helper.Navigator; import io.reactivex.android.schedulers.AndroidSchedulers; import io.reactivex.disposables.CompositeDisposable; import io.reactivex.disposables.Disposable; import io.reactivex.schedulers.Schedulers; import timber.log.Timber; @FragmentScope public final class ContributorsViewModel extends BaseObservable implements ViewModel { public static final String TAG = ContributorsViewModel.class.getSimpleName(); private final ResourceResolver resourceResolver; private final Navigator navigator; private final ToolbarViewModel toolbarViewModel; private final ContributorsRepository contributorsRepository; private final CompositeDisposable compositeDisposable; private ObservableList<ContributorViewModel> viewModels; private int loadingVisibility; private boolean refreshing; @Nullable private Callback callback; @Inject ContributorsViewModel( ResourceResolver resourceResolver, Navigator navigator, ToolbarViewModel toolbarViewModel, ContributorsRepository contributorsRepository, CompositeDisposable compositeDisposable) { this.resourceResolver = resourceResolver; this.navigator = navigator; this.toolbarViewModel = toolbarViewModel; this.contributorsRepository = contributorsRepository; this.compositeDisposable = compositeDisposable; this.viewModels = new ObservableArrayList<>(); } public void setCallback(@NonNull Callback callback) { this.callback = callback; } public void start() { loadContributors(false); } @Override public void destroy() { compositeDisposable.clear(); } @Bindable public int getLoadingVisibility() { return loadingVisibility; } private void setLoadingVisibility(int visibility) { this.loadingVisibility = visibility; notifyPropertyChanged(BR.loadingVisibility); } @Bindable public boolean getRefreshing() { return refreshing; } private void setRefreshing(boolean refreshing) { this.refreshing = refreshing; notifyPropertyChanged(BR.refreshing); } public void onSwipeRefresh() { loadContributors(true); } public void retry() { loadContributors(false); } public ObservableList<ContributorViewModel> getContributorViewModels() { return this.viewModels; } private void loadContributors(boolean refresh) { if (refresh) { contributorsRepository.setDirty(true); } else { setLoadingVisibility(View.VISIBLE); } Disposable disposable = contributorsRepository.findAll() .map(contributors -> Stream.of(contributors) .map(contributor -> new ContributorViewModel(navigator, contributor)) .toList()) .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe( this::renderContributors, throwable -> { setLoadingVisibility(View.GONE); if (callback != null) { callback.showError(R.string.contributors_load_failed); } Timber.tag(TAG).e(throwable, "Failed to show contributors."); }); compositeDisposable.add(disposable); } private void renderContributors(List<ContributorViewModel> contributorViewModels) { viewModels.clear(); viewModels.addAll(contributorViewModels); String title = resourceResolver.getString(R.string.contributors) + " " + resourceResolver.getString(R.string.contributors_people, contributorViewModels.size()); toolbarViewModel.setToolbarTitle(title); setLoadingVisibility(View.GONE); setRefreshing(false); } public interface Callback { void showError(@StringRes int textRes); } }
apache-2.0
citygml4j/citygml4j
src-gen/main/java/net/opengis/citygml/building/_2/CeilingSurfaceType.java
3519
// // Diese Datei wurde mit der JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.3.2 generiert // Siehe <a href="https://javaee.github.io/jaxb-v2/">https://javaee.github.io/jaxb-v2/</a> // Änderungen an dieser Datei gehen bei einer Neukompilierung des Quellschemas verloren. // Generiert: 2019.02.03 um 11:14:53 PM CET // package net.opengis.citygml.building._2; import java.util.ArrayList; import java.util.List; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlType; /** * <p>Java-Klasse für CeilingSurfaceType complex type. * * <p>Das folgende Schemafragment gibt den erwarteten Content an, der in dieser Klasse enthalten ist. * * <pre> * &lt;complexType name="CeilingSurfaceType"&gt; * &lt;complexContent&gt; * &lt;extension base="{http://www.opengis.net/citygml/building/2.0}AbstractBoundarySurfaceType"&gt; * &lt;sequence&gt; * &lt;element ref="{http://www.opengis.net/citygml/building/2.0}_GenericApplicationPropertyOfCeilingSurface" maxOccurs="unbounded" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;/extension&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "CeilingSurfaceType", propOrder = { "_GenericApplicationPropertyOfCeilingSurface" }) public class CeilingSurfaceType extends AbstractBoundarySurfaceType { @XmlElementRef(name = "_GenericApplicationPropertyOfCeilingSurface", namespace = "http://www.opengis.net/citygml/building/2.0", type = JAXBElement.class, required = false) protected List<JAXBElement<Object>> _GenericApplicationPropertyOfCeilingSurface; /** * Gets the value of the genericApplicationPropertyOfCeilingSurface property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the genericApplicationPropertyOfCeilingSurface property. * * <p> * For example, to add a new item, do as follows: * <pre> * get_GenericApplicationPropertyOfCeilingSurface().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link JAXBElement }{@code <}{@link Object }{@code >} * {@link JAXBElement }{@code <}{@link Object }{@code >} * * */ public List<JAXBElement<Object>> get_GenericApplicationPropertyOfCeilingSurface() { if (_GenericApplicationPropertyOfCeilingSurface == null) { _GenericApplicationPropertyOfCeilingSurface = new ArrayList<JAXBElement<Object>>(); } return this._GenericApplicationPropertyOfCeilingSurface; } public boolean isSet_GenericApplicationPropertyOfCeilingSurface() { return ((this._GenericApplicationPropertyOfCeilingSurface!= null)&&(!this._GenericApplicationPropertyOfCeilingSurface.isEmpty())); } public void unset_GenericApplicationPropertyOfCeilingSurface() { this._GenericApplicationPropertyOfCeilingSurface = null; } public void set_GenericApplicationPropertyOfCeilingSurface(List<JAXBElement<Object>> value) { this._GenericApplicationPropertyOfCeilingSurface = value; } }
apache-2.0
wind-clothes/web-common
src/main/java/com/web/common/web/common/util/msg/service/WeixinTemplateMsgService.java
1330
package com.web.common.web.common.util.msg.service; import java.util.LinkedList; import com.web.common.web.common.util.msg.data.TemplateMsg; import com.web.common.web.common.util.msg.send.result.WeixinTemplateMsgSendResult; /** * 微信模板消息服务接口 * * @author xiongchengwei */ public interface WeixinTemplateMsgService { /** * 发送微信模板消息 * * @param toUser 接收模板消息的openid * @param templateId 模板ID * @param url 开发者配置的服务器地址 * @param topColor 颜色值 * @param data 要发送的JSON格式数据 * @param userId 接收模板消息的用户ID * @param username 接收模板消息的用户名 * @param sysName 发送模板消息的系统名称 * @return 微信模板消息发送结果 */ WeixinTemplateMsgSendResult send(String toUser, String templateId, String url, String topColor, String data, Long userId, String username, String sysName); /** * 微信模板消息按顺序发送 * * @param tms 微信模板消息列表 * @param sysName 系统名称 * @return 返回发送结果 */ WeixinTemplateMsgSendResult sendByOrder(LinkedList<TemplateMsg> tms, String sysName); }
apache-2.0
uniqueid001/enunciate
jaxb/src/main/java/com/webcohesion/enunciate/modules/jaxb/model/types/XmlPrimitiveType.java
1653
/* * Copyright 2006-2008 Web Cohesion * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.webcohesion.enunciate.modules.jaxb.model.types; import javax.lang.model.type.PrimitiveType; import javax.xml.namespace.QName; /** * @author Ryan Heaton */ public class XmlPrimitiveType implements XmlType { private final PrimitiveType type; public XmlPrimitiveType(PrimitiveType delegate) { this.type = delegate; } public String getName() { switch (this.type.getKind()) { case BOOLEAN: return "boolean"; case BYTE: return "byte"; case DOUBLE: return "double"; case FLOAT: return "float"; case INT: return "int"; case LONG: return "long"; case SHORT: return "short"; case CHAR: return "unsignedShort"; } return null; } public String getNamespace() { return "http://www.w3.org/2001/XMLSchema"; } public QName getQname() { return new QName(getNamespace(), getName()); } public boolean isAnonymous() { return false; } public boolean isSimple() { return true; } }
apache-2.0
bonprix/solidsearch-manager
src/main/java/de/solidsearch/manager/dao/SignalManager.java
11866
package de.solidsearch.manager.dao; import java.io.Serializable; import java.math.BigInteger; import java.util.List; import org.apache.log4j.Logger; import org.hibernate.SQLQuery; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.transform.Transformers; import org.hibernate.type.IntegerType; import org.hibernate.type.LongType; import org.hibernate.type.ShortType; import org.hibernate.type.StringType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import de.solidsearch.manager.data.Domain; import de.solidsearch.manager.data.Signal; @Component("SignalManager") @Scope(value = "prototype") public class SignalManager implements Serializable { private static final long serialVersionUID = 2109936053042116371L; @Autowired SessionFactory sessionFactory; private final String tableRows = "(managerid smallint NOT NULL,"+ "sourceDomainName varchar(300) NOT NULL,"+ "destinationDomainName varchar(300) NOT NULL,"+ "sourceuri varchar(4000) NOT NULL,"+ "destinationuri varchar(4000) NOT NULL,"+ "sourcePartitionKey integer NOT NULL,"+ "lastfoundtimestamp bigint,"+ "signalPower smallint NOT NULL)"; private final String insertRows = "(managerid,"+ "sourceDomainName,"+ "destinationDomainName,"+ "sourceuri,"+ "destinationuri,"+ "sourcePartitionKey,"+ "lastfoundtimestamp,"+ "signalPower)"; private static final Logger logger = Logger.getLogger(SignalManager.class.getName()); private final int PARTITIONS = 8; private final int PARTITION_SIZE = -2147483648; private final int PARTITION_STEPS = 536870912; private final String insertParameter = "(?,?,?,?,?,?,?,?)"; public void createSignalTablesIfNotExists() { Session session = null; try { session = sessionFactory.openSession(); String sql = "SELECT EXISTS( SELECT * FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'signals')"; boolean tableExists = (Boolean)session.createSQLQuery(sql).list().get(0); if (!tableExists) { sql = "create table signals " + tableRows; session.createSQLQuery(sql).executeUpdate(); int lowerEnd = PARTITION_SIZE; int steps = PARTITION_STEPS; for (int i = 0; i < PARTITIONS; i++) { if (i == (PARTITIONS - 1)) { steps--; sql = "create table signals_child_" + i + " (CHECK ( sourcePartitionKey >= " + lowerEnd + " AND sourcePartitionKey <= " + (lowerEnd + steps) + " )) inherits (signals)"; } else { sql = "create table signals_child_" + i + " (CHECK ( sourcePartitionKey >= " + lowerEnd + " AND sourcePartitionKey < " + (lowerEnd + steps) + " )) inherits (signals)"; } lowerEnd = lowerEnd + steps; session.createSQLQuery(sql).executeUpdate(); sql = "alter table signals_child_" + i + " add primary key (sourceuri,destinationuri)"; session.createSQLQuery(sql).executeUpdate(); session.flush(); } } } finally { if (session != null) session.close(); } } public void saveSignal(Signal signal) { Session session = null; try { session = sessionFactory.openSession(); StringBuffer sql = new StringBuffer(); SQLQuery query; int lowerEnd = PARTITION_SIZE; int steps = PARTITION_STEPS; for (int y = 0; y < PARTITIONS; y++) { boolean lastPartition = false; if (y == (PARTITIONS - 1)) { steps--; lastPartition = true; } if (lastPartition) { if ((signal.getSourcePartitionKey() >= lowerEnd) && (signal.getSourcePartitionKey() <= (lowerEnd + steps))) { sql.append("insert into signals_child_"); sql.append(y); sql.append(insertRows); sql.append(" values "); sql.append(insertParameter); query = session.createSQLQuery(sql.toString()); setQueryParams(0, query, signal); query.executeUpdate(); session.flush(); break; } } else { if ((signal.getSourcePartitionKey() >= lowerEnd) && (signal.getSourcePartitionKey() < (lowerEnd + steps))) { sql.append("insert into signals_child_"); sql.append(y); sql.append(insertRows); sql.append(" values "); sql.append(insertParameter); query = session.createSQLQuery(sql.toString()); setQueryParams(0, query, signal); query.executeUpdate(); session.flush(); break; } } lowerEnd = lowerEnd + steps; } } catch (Exception e) { logger.error("Insert failed for signal-source: " + signal.getSourceUri() + " destination:" + signal.getDestinationDomainName()); e.printStackTrace(); } finally { if (session != null) session.close(); } } public void updateSignalPowerAndTimestamp(String sourceUri, String destinationUri, int sourcePartitionKey, short newSignalPower, long lastFoundTimestamp) { Session session = null; try { session = sessionFactory.openSession(); StringBuffer sql = new StringBuffer(); SQLQuery query; sql.append("update signals ") .append(" set signalpower=?,lastFoundTimestamp=? where sourceuri ='").append(sourceUri).append("' AND destinationuri ='").append(destinationUri).append("' AND sourcePartitionKey =").append(sourcePartitionKey); query = session.createSQLQuery(sql.toString()); query.setParameter(0, newSignalPower); query.setParameter(1, lastFoundTimestamp); query.executeUpdate(); session.flush(); } catch (Exception e) { logger.error("Problems during updateSignal. src: " + sourceUri + " dest:" + destinationUri); e.printStackTrace(); } finally { if (session != null) session.close(); } } public void deleteSignal(Signal signal) { Session session = null; try { session = sessionFactory.openSession(); StringBuffer sql = new StringBuffer(); SQLQuery query; sql.append("delete from signals ") .append(" where sourceuri ='").append(signal.getSourceUri()).append("' AND destinationuri ='").append(signal.getDestinationUri()).append("' AND sourcePartitionKey =").append(signal.getSourcePartitionKey()); query = session.createSQLQuery(sql.toString()); setQueryParams(0, query, signal); query.executeUpdate(); session.flush(); } catch (Exception e) { logger.error("Problems during deleteSignal. src: " + signal.getSourceUri() + " dest:" + signal.getDestinationUri()); e.printStackTrace(); } finally { if (session != null) session.close(); } } public void deleteLostSignalsForSourceDomain(Domain sourceDomain, int sourcePartitionKey) { Session session = null; try { session = sessionFactory.openSession(); StringBuffer sql = new StringBuffer(); SQLQuery query; sql.append("delete from signals ") .append(" where sourceDomainName ='").append(sourceDomain.getDomainName()).append("' AND lastFoundTimestamp <>").append(sourceDomain.getLastCrawledTimestamp()).append(" AND sourcePartitionKey =").append(sourcePartitionKey); query = session.createSQLQuery(sql.toString()); query.executeUpdate(); session.flush(); } catch (Exception e) { logger.error("Problems during deleteSignals. src: " + sourceDomain.getDomainName()); e.printStackTrace(); } finally { if (session != null) session.close(); } } public Signal getSignal(String sourceUri, String destinationUri, int sourcePartitionKey) { Session session = null; try { session = sessionFactory.openSession(); StringBuffer sql = new StringBuffer(); sql.append("select * from signals ").append(" where sourceuri ='").append(sourceUri).append("'").append(" AND destinationuri ='").append(destinationUri).append("'"); sql.append(" AND sourcePartitionKey =").append(sourcePartitionKey); SQLQuery query = session.createSQLQuery(sql.toString()); addScalars(query); query.setResultTransformer(Transformers.aliasToBean(Signal.class)); @SuppressWarnings("unchecked") List<Signal> signalList = (List<Signal>) query.list(); session.flush(); if (signalList.isEmpty()) return null; return signalList.get(0); } catch (Exception e) { logger.error("Problems during getSignal. sourceUri: " +sourceUri + " destinationUri: " + destinationUri + " sourcePartitionKey: " + sourcePartitionKey); e.printStackTrace(); return null; } finally { if (session != null) session.close(); } } public List<Signal> getSignalsForSourceDomainName(String sourceDomainName, int sourcePartitionKey) { Session session = null; try { session = sessionFactory.openSession(); StringBuffer sql = new StringBuffer(); sql.append("select * from signals ").append(" where sourceDomainName ='").append(sourceDomainName).append("'").append(" AND sourcePartitionKey ='").append(sourcePartitionKey).append("'"); sql.append(" AND sourcePartitionKey =").append(sourcePartitionKey); SQLQuery query = session.createSQLQuery(sql.toString()); addScalars(query); query.setResultTransformer(Transformers.aliasToBean(Signal.class)); @SuppressWarnings("unchecked") List<Signal> signalList = (List<Signal>) query.list(); session.flush(); if (signalList.isEmpty()) return null; return signalList; } finally { if (session != null) session.close(); } } public long getTotalCountOfSignals() { Session session = null; try { session = sessionFactory.openSession(); StringBuffer sql = new StringBuffer(); sql.append("select count(*) from signals"); SQLQuery query = session.createSQLQuery(sql.toString()); Object value = query.uniqueResult(); if (value == null) { return 0; } session.flush(); return ((BigInteger) value).longValue(); } finally { if (session != null) session.close(); } } public void truncateSignals() { Session session = null; try { session = sessionFactory.openSession(); for (int i = 0; i < PARTITIONS; i++) { String sql = "truncate signals_child_" + i; session.createSQLQuery(sql).executeUpdate(); session.flush(); } } finally { if (session != null) session.close(); } } private void addScalars(SQLQuery query) { query.addScalar("managerId", ShortType.INSTANCE); query.addScalar("sourceDomainName", StringType.INSTANCE); query.addScalar("destinationDomainName", StringType.INSTANCE); query.addScalar("sourceUri", StringType.INSTANCE); query.addScalar("destinationUri", StringType.INSTANCE); query.addScalar("sourcePartitionKey", IntegerType.INSTANCE); query.addScalar("lastFoundTimestamp", LongType.INSTANCE); query.addScalar("signalPower", ShortType.INSTANCE); } private int setQueryParams(int i, SQLQuery query, Signal entry) { query.setParameter(i++, entry.getManagerId()); query.setParameter(i++, entry.getSourceDomainName()); query.setParameter(i++, entry.getDestinationDomainName()); query.setParameter(i++, entry.getSourceUri()); query.setParameter(i++, entry.getDestinationUri()); query.setParameter(i++, entry.getSourcePartitionKey()); query.setParameter(i++, entry.getLastFoundTimestamp()); query.setParameter(i++, entry.getSignalPower()); return i; } }
apache-2.0
W0mpRat/JavaProjects
src/game/Holder.java
41
package game; public class Holder { }
apache-2.0
csgordon/SJS
sjsc/src/main/java/com/samsung/sjs/backend/asts/c/DoLoop.java
1238
/* * Copyright 2014-2016 Samsung Research America, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Representation of C do-while loop * * @author colin.gordon */ package com.samsung.sjs.backend.asts.c; public class DoLoop extends BlockStatement { private Expression test; public DoLoop(Expression test) { super(); this.test = test; } @Override public String toSource(int x) { StringBuilder sb = new StringBuilder(); indent(x,sb); sb.append("do {\n"); for (Statement s : body) { sb.append(s.toSource(x+1)); } indent(x,sb); sb.append("} while("+test.toSource(0)+");\n"); return sb.toString(); } }
apache-2.0
PATRIC3/p3_solr
solr/core/src/java/org/apache/solr/response/transform/ElevatedMarkerFactory.java
1876
package org.apache.solr.response.transform; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Set; import org.apache.solr.common.params.SolrParams; import org.apache.solr.handler.component.QueryElevationComponent; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.FieldType; import org.apache.solr.schema.SchemaField; /** * Used to mark whether a document has been elevated or not * @since solr 4.0 */ public class ElevatedMarkerFactory extends TransformerFactory { @Override public DocTransformer create(String field, SolrParams params, SolrQueryRequest req) { SchemaField uniqueKeyField = req.getSchema().getUniqueKeyField(); String idfield = uniqueKeyField.getName(); return new MarkTransformer(field,idfield, uniqueKeyField.getType()); } } class MarkTransformer extends BaseEditorialTransformer { MarkTransformer(String name, String idFieldName, FieldType ft) { super(name, idFieldName, ft); } @Override protected Set<String> getIdSet() { return (Set<String>) context.req.getContext().get(QueryElevationComponent.BOOSTED); } }
apache-2.0
adragomir/hbaseindex
src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
39232
/** * Copyright 2009 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeSet; import java.util.WeakHashMap; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor; import org.apache.hadoop.hbase.ipc.HBaseRPC; import org.apache.hadoop.hbase.ipc.HBaseRPCProtocolVersion; import org.apache.hadoop.hbase.ipc.HMasterInterface; import org.apache.hadoop.hbase.ipc.HRegionInterface; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.MetaUtils; import org.apache.hadoop.hbase.util.SoftValueSortedMap; import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWrapper; import org.apache.hadoop.ipc.RemoteException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.Watcher.Event.KeeperState; /** * A non-instantiable class that manages connections to multiple tables in * multiple HBase instances. * * Used by {@link HTable} and {@link HBaseAdmin} */ public class HConnectionManager implements HConstants { /* * Not instantiable. */ protected HConnectionManager() { super(); } // A Map of master HBaseConfiguration -> connection information for that // instance. Note that although the Map is synchronized, the objects it // contains are mutable and hence require synchronized access to them private static final Map<HBaseConfiguration, TableServers> HBASE_INSTANCES = new WeakHashMap<HBaseConfiguration, TableServers>(); /** * Get the connection object for the instance specified by the configuration * If no current connection exists, create a new connection for that instance * @param conf * @return HConnection object for the instance specified by the configuration */ public static HConnection getConnection(HBaseConfiguration conf) { TableServers connection; synchronized (HBASE_INSTANCES) { connection = HBASE_INSTANCES.get(conf); if (connection == null) { connection = new TableServers(conf); HBASE_INSTANCES.put(conf, connection); } } return connection; } /** * Delete connection information for the instance specified by configuration * @param conf * @param stopProxy */ public static void deleteConnectionInfo(HBaseConfiguration conf, boolean stopProxy) { synchronized (HBASE_INSTANCES) { TableServers t = HBASE_INSTANCES.remove(conf); if (t != null) { t.close(stopProxy); } } } /** * Delete information for all connections. * @param stopProxy */ public static void deleteAllConnections(boolean stopProxy) { synchronized (HBASE_INSTANCES) { for (TableServers t : HBASE_INSTANCES.values()) { if (t != null) { t.close(stopProxy); } } } } /* Encapsulates finding the servers for an HBase instance */ private static class TableServers implements ServerConnection, HConstants, Watcher { private static final Log LOG = LogFactory.getLog(TableServers.class); private final Class<? extends HRegionInterface> serverInterfaceClass; private final long pause; private final int numRetries; private final int maxRPCAttempts; private final long rpcTimeout; private final Object masterLock = new Object(); private volatile boolean closed; private volatile HMasterInterface master; private volatile boolean masterChecked; private final Object rootRegionLock = new Object(); private final Object metaRegionLock = new Object(); private final Object userRegionLock = new Object(); private volatile HBaseConfiguration conf; // Known region HServerAddress.toString() -> HRegionInterface private final Map<String, HRegionInterface> servers = new ConcurrentHashMap<String, HRegionInterface>(); // Used by master and region servers during safe mode only private volatile HRegionLocation rootRegionLocation; private final Map<Integer, SoftValueSortedMap<byte [], HRegionLocation>> cachedRegionLocations = new HashMap<Integer, SoftValueSortedMap<byte [], HRegionLocation>>(); private ZooKeeperWrapper zooKeeperWrapper; /** * constructor * @param conf Configuration object */ @SuppressWarnings("unchecked") public TableServers(HBaseConfiguration conf) { this.conf = conf; String serverClassName = conf.get(REGION_SERVER_CLASS, DEFAULT_REGION_SERVER_CLASS); this.closed = false; try { this.serverInterfaceClass = (Class<? extends HRegionInterface>) Class.forName(serverClassName); } catch (ClassNotFoundException e) { throw new UnsupportedOperationException( "Unable to find region server interface " + serverClassName, e); } this.pause = conf.getLong("hbase.client.pause", 2 * 1000); this.numRetries = conf.getInt("hbase.client.retries.number", 10); this.maxRPCAttempts = conf.getInt("hbase.client.rpc.maxattempts", 1); this.rpcTimeout = conf.getLong("hbase.regionserver.lease.period", 60000); this.master = null; this.masterChecked = false; } private long getPauseTime(int tries) { int ntries = tries; if (ntries >= HConstants.RETRY_BACKOFF.length) ntries = HConstants.RETRY_BACKOFF.length - 1; return this.pause * HConstants.RETRY_BACKOFF[ntries]; } /** * Called by ZooKeeper when an event occurs on our connection. We use this to * detect our session expiring. When our session expires, we have lost our * connection to ZooKeeper. Our handle is dead, and we need to recreate it. * * See http://hadoop.apache.org/zookeeper/docs/current/zookeeperProgrammers.html#ch_zkSessions * for more information. * * @param event WatchedEvent witnessed by ZooKeeper. */ public void process(WatchedEvent event) { KeeperState state = event.getState(); LOG.debug("Got ZooKeeper event, state: " + state + ", type: " + event.getType() + ", path: " + event.getPath()); if (state == KeeperState.Expired) { resetZooKeeper(); } } private synchronized void resetZooKeeper() { if (zooKeeperWrapper != null) { zooKeeperWrapper.close(); zooKeeperWrapper = null; } } // Used by master and region servers during safe mode only public void unsetRootRegionLocation() { this.rootRegionLocation = null; } // Used by master and region servers during safe mode only public void setRootRegionLocation(HRegionLocation rootRegion) { if (rootRegion == null) { throw new IllegalArgumentException( "Cannot set root region location to null."); } this.rootRegionLocation = rootRegion; } public HMasterInterface getMaster() throws MasterNotRunningException { ZooKeeperWrapper zk = null; try { zk = getZooKeeperWrapper(); } catch (IOException e) { throw new MasterNotRunningException(e); } HServerAddress masterLocation = null; synchronized (this.masterLock) { for (int tries = 0; !this.closed && !this.masterChecked && this.master == null && tries < numRetries; tries++) { try { masterLocation = zk.readMasterAddressOrThrow(); HMasterInterface tryMaster = (HMasterInterface)HBaseRPC.getProxy( HMasterInterface.class, HBaseRPCProtocolVersion.versionID, masterLocation.getInetSocketAddress(), this.conf); if (tryMaster.isMasterRunning()) { this.master = tryMaster; break; } } catch (IOException e) { if (tries == numRetries - 1) { // This was our last chance - don't bother sleeping break; } LOG.info("getMaster attempt " + tries + " of " + this.numRetries + " failed; retrying after sleep of " + getPauseTime(tries), e); } // Cannot connect to master or it is not running. Sleep & retry try { Thread.sleep(getPauseTime(tries)); } catch (InterruptedException e) { // continue } } this.masterChecked = true; } if (this.master == null) { if (masterLocation == null) { throw new MasterNotRunningException(); } throw new MasterNotRunningException(masterLocation.toString()); } return this.master; } public boolean isMasterRunning() { if (this.master == null) { try { getMaster(); } catch (MasterNotRunningException e) { return false; } } return true; } public boolean tableExists(final byte [] tableName) throws MasterNotRunningException { getMaster(); if (tableName == null) { throw new IllegalArgumentException("Table name cannot be null"); } if (isMetaTableName(tableName)) { return true; } boolean exists = false; try { HTableDescriptor[] tables = listTables(); for (int i = 0; i < tables.length; i++) { if (Bytes.equals(tables[i].getName(), tableName)) { exists = true; } } } catch (IOException e) { LOG.warn("Testing for table existence threw exception", e); } return exists; } /* * @param n * @return Truen if passed tablename <code>n</code> is equal to the name * of a catalog table. */ private static boolean isMetaTableName(final byte [] n) { return MetaUtils.isMetaTableName(n); } public HRegionLocation getRegionLocation(final byte [] name, final byte [] row, boolean reload) throws IOException { return reload? relocateRegion(name, row): locateRegion(name, row); } public HTableDescriptor[] listTables() throws IOException { getMaster(); final TreeSet<HTableDescriptor> uniqueTables = new TreeSet<HTableDescriptor>(); MetaScannerVisitor visitor = new MetaScannerVisitor() { public boolean processRow(Result result) throws IOException { HRegionInfo info = Writables.getHRegionInfo( result.getValue(CATALOG_FAMILY, REGIONINFO_QUALIFIER)); // Only examine the rows where the startKey is zero length if (info != null && info.getStartKey().length == 0) { uniqueTables.add(info.getTableDesc()); } return true; } }; MetaScanner.metaScan(conf, visitor); return uniqueTables.toArray(new HTableDescriptor[uniqueTables.size()]); } public boolean isTableEnabled(byte[] tableName) throws IOException { return testTableOnlineState(tableName, true); } public boolean isTableDisabled(byte[] tableName) throws IOException { return testTableOnlineState(tableName, false); } /* * If online == true * Returns true if all regions are online * Returns false in any other case * If online == false * Returns true if all regions are offline * Returns false in any other case */ private boolean testTableOnlineState(byte[] tableName, boolean online) throws IOException { if (!tableExists(tableName)) { throw new TableNotFoundException(Bytes.toString(tableName)); } if (Bytes.equals(tableName, HConstants.ROOT_TABLE_NAME)) { // The root region is always enabled return true; } int rowsScanned = 0; int rowsOffline = 0; byte[] startKey = HRegionInfo.createRegionName(tableName, null, HConstants.ZEROES); byte[] endKey = null; HRegionInfo currentRegion = null; Scan scan = new Scan(startKey); scan.addColumn(CATALOG_FAMILY, REGIONINFO_QUALIFIER); ScannerCallable s = new ScannerCallable(this, (Bytes.equals(tableName, HConstants.META_TABLE_NAME) ? HConstants.ROOT_TABLE_NAME : HConstants.META_TABLE_NAME), scan.getStartRow(), scan); try { // Open scanner getRegionServerWithRetries(s); do { HRegionInfo oldRegion = currentRegion; if (oldRegion != null) { startKey = oldRegion.getEndKey(); } currentRegion = s.getHRegionInfo(); Result r = null; Result [] rrs = null; while ((rrs = getRegionServerWithRetries(s)) != null) { r = rrs[0]; byte [] value = r.getValue(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); if (value != null) { HRegionInfo info = Writables.getHRegionInfoOrNull(value); if (info != null) { if (Bytes.equals(info.getTableDesc().getName(), tableName)) { rowsScanned += 1; rowsOffline += info.isOffline() ? 1 : 0; } } } } endKey = currentRegion.getEndKey(); } while (!(endKey == null || Bytes.equals(endKey, HConstants.EMPTY_BYTE_ARRAY))); } finally { s.setClose(); } boolean onlineOffline = online ? rowsOffline == 0 : rowsOffline == rowsScanned; return rowsScanned > 0 && onlineOffline; } private static class HTableDescriptorFinder implements MetaScanner.MetaScannerVisitor { byte[] tableName; HTableDescriptor result; protected HTableDescriptorFinder(byte[] tableName) { this.tableName = tableName; } public boolean processRow(Result rowResult) throws IOException { HRegionInfo info = Writables.getHRegionInfo( rowResult.getValue(CATALOG_FAMILY, REGIONINFO_QUALIFIER)); HTableDescriptor desc = info.getTableDesc(); if (Bytes.compareTo(desc.getName(), tableName) == 0) { result = desc; return false; } return true; } HTableDescriptor getResult() { return result; } } public HTableDescriptor getHTableDescriptor(final byte[] tableName) throws IOException { if (Bytes.equals(tableName, HConstants.ROOT_TABLE_NAME)) { return new UnmodifyableHTableDescriptor(HTableDescriptor.ROOT_TABLEDESC); } if (Bytes.equals(tableName, HConstants.META_TABLE_NAME)) { return HTableDescriptor.META_TABLEDESC; } HTableDescriptorFinder finder = new HTableDescriptorFinder(tableName); MetaScanner.metaScan(conf, finder); HTableDescriptor result = finder.getResult(); if (result == null) { throw new TableNotFoundException(Bytes.toString(tableName)); } return result; } public HRegionLocation locateRegion(final byte [] tableName, final byte [] row) throws IOException{ return locateRegion(tableName, row, true); } public HRegionLocation relocateRegion(final byte [] tableName, final byte [] row) throws IOException{ return locateRegion(tableName, row, false); } private HRegionLocation locateRegion(final byte [] tableName, final byte [] row, boolean useCache) throws IOException{ if (tableName == null || tableName.length == 0) { throw new IllegalArgumentException( "table name cannot be null or zero length"); } if (Bytes.equals(tableName, ROOT_TABLE_NAME)) { synchronized (rootRegionLock) { // This block guards against two threads trying to find the root // region at the same time. One will go do the find while the // second waits. The second thread will not do find. if (!useCache || rootRegionLocation == null) { return locateRootRegion(); } return rootRegionLocation; } } else if (Bytes.equals(tableName, META_TABLE_NAME)) { synchronized (metaRegionLock) { // This block guards against two threads trying to load the meta // region at the same time. The first will load the meta region and // the second will use the value that the first one found. return locateRegionInMeta(ROOT_TABLE_NAME, tableName, row, useCache); } } else { synchronized(userRegionLock){ return locateRegionInMeta(META_TABLE_NAME, tableName, row, useCache); } } } /* * Search one of the meta tables (-ROOT- or .META.) for the HRegionLocation * info that contains the table and row we're seeking. */ private HRegionLocation locateRegionInMeta(final byte [] parentTable, final byte [] tableName, final byte [] row, boolean useCache) throws IOException{ HRegionLocation location = null; // If supposed to be using the cache, then check it for a possible hit. // Otherwise, delete any existing cached location so it won't interfere. if (useCache) { location = getCachedLocation(tableName, row); if (location != null) { return location; } } else { deleteCachedLocation(tableName, row); } // build the key of the meta region we should be looking for. // the extra 9's on the end are necessary to allow "exact" matches // without knowing the precise region names. byte [] metaKey = HRegionInfo.createRegionName(tableName, row, HConstants.NINES); for (int tries = 0; true; tries++) { if (tries >= numRetries) { throw new NoServerForRegionException("Unable to find region for " + Bytes.toStringBinary(row) + " after " + numRetries + " tries."); } try { // locate the root or meta region HRegionLocation metaLocation = locateRegion(parentTable, metaKey); HRegionInterface server = getHRegionConnection(metaLocation.getServerAddress()); // Query the root or meta region for the location of the meta region Result regionInfoRow = server.getClosestRowBefore( metaLocation.getRegionInfo().getRegionName(), metaKey, HConstants.CATALOG_FAMILY); if (regionInfoRow == null) { throw new TableNotFoundException(Bytes.toString(tableName)); } byte [] value = regionInfoRow.getValue(CATALOG_FAMILY, REGIONINFO_QUALIFIER); if (value == null || value.length == 0) { throw new IOException("HRegionInfo was null or empty in " + Bytes.toString(parentTable)); } // convert the row result into the HRegionLocation we need! HRegionInfo regionInfo = (HRegionInfo) Writables.getWritable( value, new HRegionInfo()); // possible we got a region of a different table... if (!Bytes.equals(regionInfo.getTableDesc().getName(), tableName)) { throw new TableNotFoundException( "Table '" + Bytes.toString(tableName) + "' was not found."); } if (regionInfo.isOffline()) { throw new RegionOfflineException("region offline: " + regionInfo.getRegionNameAsString()); } value = regionInfoRow.getValue(CATALOG_FAMILY, SERVER_QUALIFIER); String serverAddress = ""; if(value != null) { serverAddress = Bytes.toString(value); } if (serverAddress.equals("")) { throw new NoServerForRegionException("No server address listed " + "in " + Bytes.toString(parentTable) + " for region " + regionInfo.getRegionNameAsString()); } // instantiate the location location = new HRegionLocation(regionInfo, new HServerAddress(serverAddress)); LOG.debug(location); cacheLocation(tableName, location); return location; } catch (TableNotFoundException e) { // if we got this error, probably means the table just plain doesn't // exist. rethrow the error immediately. this should always be coming // from the HTable constructor. throw e; } catch (IOException e) { if (e instanceof RemoteException) { e = RemoteExceptionHandler.decodeRemoteException( (RemoteException) e); } if (tries < numRetries - 1) { if (LOG.isDebugEnabled()) { LOG.debug("locateRegionInMeta attempt " + tries + " of " + this.numRetries + " failed; retrying after sleep of " + getPauseTime(tries), e); } relocateRegion(parentTable, metaKey); } else { throw e; } } try{ Thread.sleep(getPauseTime(tries)); } catch (InterruptedException e){ // continue } } } /* * Search the cache for a location that fits our table and row key. * Return null if no suitable region is located. TODO: synchronization note * * <p>TODO: This method during writing consumes 15% of CPU doing lookup * into the Soft Reference SortedMap. Improve. * * @param tableName * @param row * @return Null or region location found in cache. */ private HRegionLocation getCachedLocation(final byte [] tableName, final byte [] row) { SoftValueSortedMap<byte [], HRegionLocation> tableLocations = getTableLocations(tableName); // start to examine the cache. we can only do cache actions // if there's something in the cache for this table. if (tableLocations.isEmpty()) { return null; } HRegionLocation rl = tableLocations.get(row); if (rl != null) { if (LOG.isDebugEnabled()) { LOG.debug("Cache hit for row <" + Bytes.toString(row) + "> in tableName " + Bytes.toString(tableName) + ": location server " + rl.getServerAddress() + ", location region name " + rl.getRegionInfo().getRegionNameAsString()); } return rl; } // Cut the cache so that we only get the part that could contain // regions that match our key SoftValueSortedMap<byte[], HRegionLocation> matchingRegions = tableLocations.headMap(row); // if that portion of the map is empty, then we're done. otherwise, // we need to examine the cached location to verify that it is // a match by end key as well. if (!matchingRegions.isEmpty()) { HRegionLocation possibleRegion = matchingRegions.get(matchingRegions.lastKey()); // there is a possibility that the reference was garbage collected // in the instant since we checked isEmpty(). if (possibleRegion != null) { byte[] endKey = possibleRegion.getRegionInfo().getEndKey(); // make sure that the end key is greater than the row we're looking // for, otherwise the row actually belongs in the next region, not // this one. the exception case is when the endkey is EMPTY_START_ROW, // signifying that the region we're checking is actually the last // region in the table. if (Bytes.equals(endKey, HConstants.EMPTY_END_ROW) || KeyValue.getRowComparator(tableName).compareRows(endKey, 0, endKey.length, row, 0, row.length) > 0) { return possibleRegion; } } } // Passed all the way through, so we got nothin - complete cache miss return null; } /* * Delete a cached location, if it satisfies the table name and row * requirements. */ private void deleteCachedLocation(final byte [] tableName, final byte [] row) { SoftValueSortedMap<byte [], HRegionLocation> tableLocations = getTableLocations(tableName); // start to examine the cache. we can only do cache actions // if there's something in the cache for this table. if (!tableLocations.isEmpty()) { // cut the cache so that we only get the part that could contain // regions that match our key SoftValueSortedMap<byte [], HRegionLocation> matchingRegions = tableLocations.headMap(row); // if that portion of the map is empty, then we're done. otherwise, // we need to examine the cached location to verify that it is // a match by end key as well. if (!matchingRegions.isEmpty()) { HRegionLocation possibleRegion = matchingRegions.get(matchingRegions.lastKey()); byte [] endKey = possibleRegion.getRegionInfo().getEndKey(); // by nature of the map, we know that the start key has to be < // otherwise it wouldn't be in the headMap. if (KeyValue.getRowComparator(tableName).compareRows(endKey, 0, endKey.length, row, 0, row.length) <= 0) { // delete any matching entry HRegionLocation rl = tableLocations.remove(matchingRegions.lastKey()); if (rl != null && LOG.isDebugEnabled()) { LOG.debug("Removed " + rl.getRegionInfo().getRegionNameAsString() + " for tableName=" + Bytes.toString(tableName) + " from cache " + "because of " + Bytes.toString(row)); } } } } } /* * @param tableName * @return Map of cached locations for passed <code>tableName</code> */ private SoftValueSortedMap<byte [], HRegionLocation> getTableLocations( final byte [] tableName) { // find the map of cached locations for this table Integer key = Bytes.mapKey(tableName); SoftValueSortedMap<byte [], HRegionLocation> result = null; synchronized (this.cachedRegionLocations) { result = this.cachedRegionLocations.get(key); // if tableLocations for this table isn't built yet, make one if (result == null) { result = new SoftValueSortedMap<byte [], HRegionLocation>( Bytes.BYTES_COMPARATOR); this.cachedRegionLocations.put(key, result); } } return result; } /* * Put a newly discovered HRegionLocation into the cache. */ private void cacheLocation(final byte [] tableName, final HRegionLocation location) { byte [] startKey = location.getRegionInfo().getStartKey(); SoftValueSortedMap<byte [], HRegionLocation> tableLocations = getTableLocations(tableName); tableLocations.put(startKey, location); } public HRegionInterface getHRegionConnection( HServerAddress regionServer, boolean getMaster) throws IOException { if(getMaster) { getMaster(); } HRegionInterface server; synchronized (this.servers) { // See if we already have a connection server = this.servers.get(regionServer.toString()); if (server == null) { // Get a connection try { server = (HRegionInterface)HBaseRPC.waitForProxy( serverInterfaceClass, HBaseRPCProtocolVersion.versionID, regionServer.getInetSocketAddress(), this.conf, this.maxRPCAttempts, this.rpcTimeout); } catch (RemoteException e) { throw RemoteExceptionHandler.decodeRemoteException(e); } this.servers.put(regionServer.toString(), server); } } return server; } public HRegionInterface getHRegionConnection( HServerAddress regionServer) throws IOException { return getHRegionConnection(regionServer, false); } public synchronized ZooKeeperWrapper getZooKeeperWrapper() throws IOException { if (zooKeeperWrapper == null) { zooKeeperWrapper = new ZooKeeperWrapper(conf, this); } return zooKeeperWrapper; } /* * Repeatedly try to find the root region in ZK * @return HRegionLocation for root region if found * @throws NoServerForRegionException - if the root region can not be * located after retrying * @throws IOException */ private HRegionLocation locateRootRegion() throws IOException { // We lazily instantiate the ZooKeeper object because we don't want to // make the constructor have to throw IOException or handle it itself. ZooKeeperWrapper zk = getZooKeeperWrapper(); HServerAddress rootRegionAddress = null; for (int tries = 0; tries < numRetries; tries++) { int localTimeouts = 0; // ask the master which server has the root region while (rootRegionAddress == null && localTimeouts < numRetries) { // Don't read root region until we're out of safe mode so we know // that the meta regions have been assigned. boolean outOfSafeMode = zk.checkOutOfSafeMode(); if (outOfSafeMode) { rootRegionAddress = zk.readRootRegionLocation(); } if (rootRegionAddress == null) { try { if (LOG.isDebugEnabled()) { LOG.debug("Sleeping " + getPauseTime(tries) + "ms, waiting for root region."); } Thread.sleep(getPauseTime(tries)); } catch (InterruptedException iex) { // continue } localTimeouts++; } } if (rootRegionAddress == null) { throw new NoServerForRegionException( "Timed out trying to locate root region"); } // get a connection to the region server HRegionInterface server = getHRegionConnection(rootRegionAddress); try { // if this works, then we're good, and we have an acceptable address, // so we can stop doing retries and return the result. server.getRegionInfo(HRegionInfo.ROOT_REGIONINFO.getRegionName()); if (LOG.isDebugEnabled()) { LOG.debug("Found ROOT at " + rootRegionAddress); } break; } catch (IOException e) { if (tries == numRetries - 1) { // Don't bother sleeping. We've run out of retries. if (e instanceof RemoteException) { e = RemoteExceptionHandler.decodeRemoteException( (RemoteException) e); } throw e; } // Sleep and retry finding root region. try { if (LOG.isDebugEnabled()) { LOG.debug("Root region location changed. Sleeping."); } Thread.sleep(getPauseTime(tries)); if (LOG.isDebugEnabled()) { LOG.debug("Wake. Retry finding root region."); } } catch (InterruptedException iex) { // continue } } rootRegionAddress = null; } // if the address is null by this point, then the retries have failed, // and we're sort of sunk if (rootRegionAddress == null) { throw new NoServerForRegionException( "unable to locate root region server"); } // return the region location return new HRegionLocation( HRegionInfo.ROOT_REGIONINFO, rootRegionAddress); } public <T> T getRegionServerWithRetries(ServerCallable<T> callable) throws IOException, RuntimeException { List<Throwable> exceptions = new ArrayList<Throwable>(); for(int tries = 0; tries < numRetries; tries++) { try { callable.instantiateServer(tries != 0); return callable.call(); } catch (Throwable t) { if (t instanceof UndeclaredThrowableException) { t = t.getCause(); } if (t instanceof RemoteException) { t = RemoteExceptionHandler.decodeRemoteException((RemoteException)t); } if (t instanceof DoNotRetryIOException) { throw (DoNotRetryIOException)t; } exceptions.add(t); if (tries == numRetries - 1) { throw new RetriesExhaustedException(callable.getServerName(), callable.getRegionName(), callable.getRow(), tries, exceptions); } } try { Thread.sleep(getPauseTime(tries)); } catch (InterruptedException e) { // continue } } return null; } public <T> T getRegionServerForWithoutRetries(ServerCallable<T> callable) throws IOException, RuntimeException { try { callable.instantiateServer(false); return callable.call(); } catch (Throwable t) { if (t instanceof UndeclaredThrowableException) { t = t.getCause(); } if (t instanceof RemoteException) { t = RemoteExceptionHandler.decodeRemoteException((RemoteException) t); } if (t instanceof DoNotRetryIOException) { throw (DoNotRetryIOException) t; } } return null; } private HRegionLocation getRegionLocationForRowWithRetries(byte[] tableName, byte[] rowKey, boolean reload) throws IOException { boolean reloadFlag = reload; List<Throwable> exceptions = new ArrayList<Throwable>(); HRegionLocation location = null; int tries = 0; while (tries < numRetries) { try { location = getRegionLocation(tableName, rowKey, reloadFlag); } catch (Throwable t) { exceptions.add(t); } if (location != null) { break; } reloadFlag = true; tries++; try { Thread.sleep(getPauseTime(tries)); } catch (InterruptedException e) { // continue } } if (location == null) { throw new RetriesExhaustedException("Some server", HConstants.EMPTY_BYTE_ARRAY, rowKey, tries, exceptions); } return location; } public void processBatchOfRows(ArrayList<Put> list, byte[] tableName) throws IOException { if (list.isEmpty()) { return; } boolean retryOnlyOne = false; int tries = 0; Collections.sort(list); List<Put> currentPuts = new ArrayList<Put>(); HRegionLocation location = getRegionLocationForRowWithRetries(tableName, list.get(0).getRow(), false); byte [] currentRegion = location.getRegionInfo().getRegionName(); byte [] region = currentRegion; boolean isLastRow = false; for (int i = 0; i < list.size() && tries < numRetries; i++) { Put put = list.get(i); currentPuts.add(put); isLastRow = (i + 1) == list.size(); if (!isLastRow) { location = getRegionLocationForRowWithRetries(tableName, list.get(i+1).getRow(), false); region = location.getRegionInfo().getRegionName(); } if (!Bytes.equals(currentRegion, region) || isLastRow || retryOnlyOne) { final Put [] puts = currentPuts.toArray(new Put[0]); int index = getRegionServerWithRetries(new ServerCallable<Integer>( this, tableName, put.getRow()) { public Integer call() throws IOException { int i = server.put(location.getRegionInfo() .getRegionName(), puts); return i; } }); if (index != -1) { if (tries == numRetries - 1) { throw new RetriesExhaustedException("Some server", currentRegion, put.getRow(), tries, new ArrayList<Throwable>()); } long sleepTime = getPauseTime(tries); if (LOG.isDebugEnabled()) { LOG.debug("Reloading region " + Bytes.toString(currentRegion) + " location because regionserver didn't accept updates; " + "tries=" + tries + " of max=" + this.numRetries + ", waiting=" + sleepTime + "ms"); } try { Thread.sleep(sleepTime); tries++; } catch (InterruptedException e) { // continue } i = i - puts.length + index; retryOnlyOne = true; location = getRegionLocationForRowWithRetries(tableName, list.get(i + 1).getRow(), true); region = location.getRegionInfo().getRegionName(); } else { retryOnlyOne = false; } currentRegion = region; currentPuts.clear(); } } } void close(boolean stopProxy) { if (master != null) { if (stopProxy) { HBaseRPC.stopProxy(master); } master = null; masterChecked = false; } resetZooKeeper(); if (stopProxy) { synchronized (servers) { for (HRegionInterface i: servers.values()) { HBaseRPC.stopProxy(i); } } } } } }
apache-2.0
GuardTime/ksi-java-sdk
ksi-api/src/main/java/com/guardtime/ksi/unisignature/verifier/policies/UserProvidedPublicationBasedVerificationPolicy.java
4694
/* * Copyright 2013-2018 Guardtime, Inc. * * This file is part of the Guardtime client SDK. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES, CONDITIONS, OR OTHER LICENSES OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * "Guardtime" and "KSI" are trademarks or registered trademarks of * Guardtime, Inc., and no license to trademarks is granted; Guardtime * reserves and retains all trademark rights. * */ package com.guardtime.ksi.unisignature.verifier.policies; import com.guardtime.ksi.unisignature.verifier.rules.CalendarAuthenticationRecordAggregationHashRule; import com.guardtime.ksi.unisignature.verifier.rules.CalendarHashChainAlgorithmDeprecatedRule; import com.guardtime.ksi.unisignature.verifier.rules.CompositeRule; import com.guardtime.ksi.unisignature.verifier.rules.ExtendingPermittedVerificationRule; import com.guardtime.ksi.unisignature.verifier.rules.NotRule; import com.guardtime.ksi.unisignature.verifier.rules.Rule; import com.guardtime.ksi.unisignature.verifier.rules.SignaturePublicationRecordExistenceRule; import com.guardtime.ksi.unisignature.verifier.rules.UserProvidedPublicationCalendarHashChainAlgorithmDeprecatedRule; import com.guardtime.ksi.unisignature.verifier.rules.UserProvidedPublicationCreationTimeVerificationRule; import com.guardtime.ksi.unisignature.verifier.rules.UserProvidedPublicationExistenceRule; import com.guardtime.ksi.unisignature.verifier.rules.UserProvidedPublicationExtendedSignatureInputHashRule; import com.guardtime.ksi.unisignature.verifier.rules.UserProvidedPublicationHashEqualsToSignaturePublicationHashRule; import com.guardtime.ksi.unisignature.verifier.rules.UserProvidedPublicationHashMatchesExtendedResponseRule; import com.guardtime.ksi.unisignature.verifier.rules.UserProvidedPublicationTimeEqualsToSignaturePublicationTimeRule; import com.guardtime.ksi.unisignature.verifier.rules.UserProvidedPublicationTimeMatchesExtendedResponseRule; /** * KSI Signature verification policy. Can be used to verify signatures using user provided publication. */ public class UserProvidedPublicationBasedVerificationPolicy extends InternalVerificationPolicy { private static final String TYPE_USER_PROVIDED_PUBLICATION_BASED_POLICY = "USER_PROVIDED_PUBLICATION_POLICY"; public UserProvidedPublicationBasedVerificationPolicy() { Rule useExtendingRule = new CompositeRule(false, new UserProvidedPublicationCreationTimeVerificationRule(), new ExtendingPermittedVerificationRule(), new UserProvidedPublicationCalendarHashChainAlgorithmDeprecatedRule(), new UserProvidedPublicationHashMatchesExtendedResponseRule(), new UserProvidedPublicationTimeMatchesExtendedResponseRule(), new UserProvidedPublicationExtendedSignatureInputHashRule()); Rule publicationsEqual = new CompositeRule(false, new UserProvidedPublicationExistenceRule(), new SignaturePublicationRecordExistenceRule(), new UserProvidedPublicationTimeEqualsToSignaturePublicationTimeRule(), new UserProvidedPublicationHashEqualsToSignaturePublicationHashRule(), new CalendarHashChainAlgorithmDeprecatedRule()); Rule publicationTimesNotEqualDoExtending = new CompositeRule(false, new UserProvidedPublicationExistenceRule(), new SignaturePublicationRecordExistenceRule(), new NotRule(new UserProvidedPublicationTimeEqualsToSignaturePublicationTimeRule()), useExtendingRule); Rule signatureDoesNotContainPublicationDoExtending = new CompositeRule(false, new UserProvidedPublicationExistenceRule(), new NotRule(new SignaturePublicationRecordExistenceRule()), useExtendingRule); addRule(new CompositeRule(true, publicationsEqual, publicationTimesNotEqualDoExtending, signatureDoesNotContainPublicationDoExtending)); } public String getName() { return "User provided publication based verification policy"; } public String getType() { return TYPE_USER_PROVIDED_PUBLICATION_BASED_POLICY; } }
apache-2.0
internetisalie/lua-for-idea
src/main/java/com/sylvanaar/idea/Lua/lang/psi/impl/statements/LuaBlockImpl.java
7457
/* * Copyright 2010 Jon S Akhtar (Sylvanaar) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sylvanaar.idea.Lua.lang.psi.impl.statements; import com.intellij.lang.ASTNode; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiElementVisitor; import com.intellij.psi.ResolveState; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.psi.util.CachedValue; import com.intellij.psi.util.CachedValueProvider; import com.intellij.psi.util.CachedValuesManager; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.IncorrectOperationException; import com.sylvanaar.idea.Lua.lang.psi.controlFlow.Instruction; import com.sylvanaar.idea.Lua.lang.psi.controlFlow.impl.ControlFlowBuilder; import com.sylvanaar.idea.Lua.lang.psi.expressions.LuaDeclarationExpression; import com.sylvanaar.idea.Lua.lang.psi.impl.LuaPsiElementImpl; import com.sylvanaar.idea.Lua.lang.psi.lists.LuaExpressionList; import com.sylvanaar.idea.Lua.lang.psi.statements.LuaBlock; import com.sylvanaar.idea.Lua.lang.psi.statements.LuaDeclarationStatement; import com.sylvanaar.idea.Lua.lang.psi.statements.LuaReturnStatement; import com.sylvanaar.idea.Lua.lang.psi.statements.LuaStatementElement; import com.sylvanaar.idea.Lua.lang.psi.symbols.LuaIdentifier; import com.sylvanaar.idea.Lua.lang.psi.symbols.LuaLocalDeclaration; import com.sylvanaar.idea.Lua.lang.psi.util.LuaBlockVariablesProvider; import com.sylvanaar.idea.Lua.lang.psi.util.LuaPsiUtils; import com.sylvanaar.idea.Lua.lang.psi.visitor.LuaElementVisitor; import com.sylvanaar.idea.Lua.lang.psi.visitor.LuaRecursiveElementVisitor; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.List; /** * Created by IntelliJ IDEA. * User: Jon S Akhtar * Date: Jun 12, 2010 * Time: 10:17:49 PM */ public class LuaBlockImpl extends LuaPsiElementImpl implements LuaBlock { public LuaBlockImpl(ASTNode node) { super(node); } public void accept(LuaElementVisitor visitor) { visitor.visitBlock(this); } public void accept(@NotNull PsiElementVisitor visitor) { if (visitor instanceof LuaElementVisitor) { ((LuaElementVisitor) visitor).visitBlock(this); } else { visitor.visitElement(this); } } public LuaStatementElement[] getStatements() { return findChildrenByClass(LuaStatementElement.class); } @Override public LuaExpressionList getReturnedValue() { // This only works for the last statement in the file LuaStatementElement[] stmts = getStatements(); if (stmts.length==0) return null; LuaStatementElement s = stmts[stmts.length-1]; if (! (s instanceof LuaReturnStatement)) return null; return ((LuaReturnStatement) s).getReturnValue(); } @Override public PsiElement getOpenElement() { return getPrevSibling(); } @Override public PsiElement getCloseElement() { return getNextSibling(); } @Override public LuaLocalDeclaration[] getLocals() { List<LuaLocalDeclaration> locals = new ArrayList<>(); LuaElementVisitor visitor = new LuaRecursiveElementVisitor() { @Override public void visitBlock(LuaBlock e) { } @Override public void visitDeclarationExpression(LuaDeclarationExpression e) { super.visitDeclarationExpression(e); if (e instanceof LuaLocalDeclaration) { locals.add((LuaLocalDeclaration) e); } } }; visitor.visitLuaElement(this); final LuaBlockVariablesProvider provider = PsiTreeUtil.getParentOfType(this, LuaBlockVariablesProvider.class); if (provider != null) { locals.addAll(provider.getProvidedVariables()); } return locals.toArray(new LuaLocalDeclaration[locals.size()]); } // public boolean processDeclarations(@NotNull PsiScopeProcessor processor, // @NotNull ResolveState resolveState, // PsiElement lastParent, // @NotNull PsiElement place) { // // PsiElement parent = place.getParent(); // while (parent != null && !(parent instanceof LuaPsiFile)) { // if (parent == this) { // if (!processor.execute(this, resolveState)) return false; // } // // parent = parent.getParent(); // } // // return true; // } // public boolean processDeclarations(@NotNull PsiScopeProcessor processor, @NotNull ResolveState state, PsiElement lastParent, @NotNull PsiElement place) { // if (lastParent != null && lastParent.getParent() == this) { // // } //// return ResolveUtil.processChildren(this, processor, state, lastParent, place); // } @Override public boolean processDeclarations(@NotNull PsiScopeProcessor processor, @NotNull ResolveState state, PsiElement lastParent, @NotNull PsiElement place) { return LuaPsiUtils.processChildDeclarations(this, processor, state, lastParent, place); } @Override public Instruction[] getControlFlow() { // java.lang.Thread.dumpStack(); assert isValid(); CachedValue<Instruction[]> controlFlow = getUserData(CONTROL_FLOW); if (controlFlow == null) { controlFlow = CachedValuesManager.getManager(getProject()).createCachedValue(new CachedValueProvider<Instruction[]>() { @Override public Result<Instruction[]> compute() { return Result.create(new ControlFlowBuilder(getProject()).buildControlFlow(LuaBlockImpl.this), getContainingFile()); } }, false); putUserData(CONTROL_FLOW, controlFlow); } return controlFlow.getValue(); } @Override public LuaStatementElement addStatementBefore(@NotNull LuaStatementElement statement, LuaStatementElement anchor) throws IncorrectOperationException { return (LuaStatementElement) addBefore(statement, anchor); } @Override public void removeVariable(LuaIdentifier variable) { //To change body of implemented methods use File | Settings | File Templates. } @Override public LuaDeclarationStatement addVariableDeclarationBefore(LuaDeclarationStatement declaration, LuaStatementElement anchor) throws IncorrectOperationException { return null; //To change body of implemented methods use File | Settings | File Templates. } }
apache-2.0
ravikumaran2015/ravikumaran201504
core/store/serializers/src/main/java/org/onosproject/store/serializers/KryoNamespaces.java
16897
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.serializers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.onlab.packet.ChassisId; import org.onlab.packet.Ip4Address; import org.onlab.packet.Ip4Prefix; import org.onlab.packet.Ip6Address; import org.onlab.packet.Ip6Prefix; import org.onlab.packet.IpAddress; import org.onlab.packet.IpPrefix; import org.onlab.packet.MacAddress; import org.onlab.packet.VlanId; import org.onlab.util.KryoNamespace; import org.onosproject.app.ApplicationState; import org.onosproject.cluster.ControllerNode; import org.onosproject.cluster.DefaultControllerNode; import org.onosproject.cluster.Leadership; import org.onosproject.cluster.LeadershipEvent; import org.onosproject.cluster.NodeId; import org.onosproject.cluster.RoleInfo; import org.onosproject.core.DefaultApplication; import org.onosproject.core.DefaultApplicationId; import org.onosproject.core.DefaultGroupId; import org.onosproject.core.Version; import org.onosproject.mastership.MastershipTerm; import org.onosproject.net.ConnectPoint; import org.onosproject.net.DefaultAnnotations; import org.onosproject.net.DefaultDevice; import org.onosproject.net.DefaultEdgeLink; import org.onosproject.net.DefaultLink; import org.onosproject.net.DefaultPath; import org.onosproject.net.DefaultPort; import org.onosproject.net.Device; import org.onosproject.net.DeviceId; import org.onosproject.net.Element; import org.onosproject.net.HostId; import org.onosproject.net.HostLocation; import org.onosproject.net.Link; import org.onosproject.net.LinkKey; import org.onosproject.net.Port; import org.onosproject.net.PortNumber; import org.onosproject.net.device.DefaultDeviceDescription; import org.onosproject.net.device.DefaultPortDescription; import org.onosproject.net.flow.CompletedBatchOperation; import org.onosproject.net.flow.DefaultFlowEntry; import org.onosproject.net.flow.DefaultFlowRule; import org.onosproject.net.flow.DefaultTrafficSelector; import org.onosproject.net.flow.DefaultTrafficTreatment; import org.onosproject.net.flow.FlowEntry; import org.onosproject.net.flow.FlowId; import org.onosproject.net.flow.FlowRule; import org.onosproject.net.flow.FlowRuleBatchEntry; import org.onosproject.net.flow.FlowRuleBatchEvent; import org.onosproject.net.flow.FlowRuleBatchOperation; import org.onosproject.net.flow.FlowRuleBatchRequest; import org.onosproject.net.flow.StoredFlowEntry; import org.onosproject.net.flow.criteria.Criteria; import org.onosproject.net.flow.criteria.Criterion; import org.onosproject.net.flow.instructions.Instructions; import org.onosproject.net.flow.instructions.L0ModificationInstruction; import org.onosproject.net.flow.instructions.L2ModificationInstruction; import org.onosproject.net.flow.instructions.L3ModificationInstruction; import org.onosproject.net.host.DefaultHostDescription; import org.onosproject.net.host.HostDescription; import org.onosproject.net.intent.ConnectivityIntent; import org.onosproject.net.intent.FlowRuleIntent; import org.onosproject.net.intent.HostToHostIntent; import org.onosproject.net.intent.Intent; import org.onosproject.net.intent.IntentId; import org.onosproject.net.intent.IntentOperation; import org.onosproject.net.intent.IntentState; import org.onosproject.net.intent.Key; import org.onosproject.net.intent.LinkCollectionIntent; import org.onosproject.net.intent.MplsIntent; import org.onosproject.net.intent.MplsPathIntent; import org.onosproject.net.intent.MultiPointToSinglePointIntent; import org.onosproject.net.intent.OpticalConnectivityIntent; import org.onosproject.net.intent.OpticalPathIntent; import org.onosproject.net.intent.PathIntent; import org.onosproject.net.intent.PointToPointIntent; import org.onosproject.net.intent.SinglePointToMultiPointIntent; import org.onosproject.net.intent.constraint.AnnotationConstraint; import org.onosproject.net.intent.constraint.BandwidthConstraint; import org.onosproject.net.intent.constraint.BooleanConstraint; import org.onosproject.net.intent.constraint.LambdaConstraint; import org.onosproject.net.intent.constraint.LatencyConstraint; import org.onosproject.net.intent.constraint.LinkTypeConstraint; import org.onosproject.net.intent.constraint.ObstacleConstraint; import org.onosproject.net.intent.constraint.WaypointConstraint; import org.onosproject.net.link.DefaultLinkDescription; import org.onosproject.net.packet.DefaultOutboundPacket; import org.onosproject.net.packet.DefaultPacketRequest; import org.onosproject.net.packet.PacketPriority; import org.onosproject.net.provider.ProviderId; import org.onosproject.net.resource.Bandwidth; import org.onosproject.net.resource.BandwidthResourceAllocation; import org.onosproject.net.resource.BandwidthResourceRequest; import org.onosproject.net.resource.DefaultLinkResourceAllocations; import org.onosproject.net.resource.DefaultLinkResourceRequest; import org.onosproject.net.resource.Lambda; import org.onosproject.net.resource.LambdaResourceAllocation; import org.onosproject.net.resource.LambdaResourceRequest; import org.onosproject.net.resource.LinkResourceRequest; import org.onosproject.net.resource.MplsLabel; import org.onosproject.net.resource.MplsLabelResourceAllocation; import org.onosproject.net.resource.MplsLabelResourceRequest; import org.onosproject.store.Timestamp; import org.onosproject.store.service.Versioned; import java.net.URI; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.Optional; public final class KryoNamespaces { public static final KryoNamespace BASIC = KryoNamespace.newBuilder() .nextId(KryoNamespace.FLOATING_ID) .register(byte[].class) .register(new ImmutableListSerializer(), ImmutableList.class, ImmutableList.of(1).getClass(), ImmutableList.of(1, 2).getClass()) .register(new ImmutableSetSerializer(), ImmutableSet.class, ImmutableSet.of().getClass(), ImmutableSet.of(1).getClass(), ImmutableSet.of(1, 2).getClass()) .register(new ImmutableMapSerializer(), ImmutableMap.class, ImmutableMap.of().getClass(), ImmutableMap.of("a", 1).getClass(), ImmutableMap.of("R", 2, "D", 2).getClass()) .register(HashMap.class) .register(ArrayList.class, LinkedList.class, HashSet.class ) .register(new ArraysAsListSerializer(), Arrays.asList().getClass()) .register(Collections.singletonList(1).getClass()) .register(Duration.class) .register(Collections.emptySet().getClass()) .register(Optional.class) .register(Collections.emptyList().getClass()) .register(Collections.unmodifiableSet(Collections.emptySet()).getClass()) .build(); /** * KryoNamespace which can serialize ON.lab misc classes. */ public static final KryoNamespace MISC = KryoNamespace.newBuilder() .nextId(KryoNamespace.FLOATING_ID) .register(new IpPrefixSerializer(), IpPrefix.class) .register(new Ip4PrefixSerializer(), Ip4Prefix.class) .register(new Ip6PrefixSerializer(), Ip6Prefix.class) .register(new IpAddressSerializer(), IpAddress.class) .register(new Ip4AddressSerializer(), Ip4Address.class) .register(new Ip6AddressSerializer(), Ip6Address.class) .register(new MacAddressSerializer(), MacAddress.class) .register(VlanId.class) .build(); /** * Kryo registration Id for user custom registration. */ public static final int BEGIN_USER_CUSTOM_ID = 300; // TODO: Populate other classes /** * KryoNamespace which can serialize API bundle classes. */ public static final KryoNamespace API = KryoNamespace.newBuilder() .nextId(KryoNamespace.INITIAL_ID) .register(BASIC) .nextId(KryoNamespace.INITIAL_ID + 30) .register(MISC) .nextId(KryoNamespace.INITIAL_ID + 30 + 10) .register( Version.class, ControllerNode.State.class, ApplicationState.class, DefaultApplication.class, Device.Type.class, Port.Type.class, ChassisId.class, DefaultAnnotations.class, DefaultControllerNode.class, DefaultDevice.class, DefaultDeviceDescription.class, DefaultLinkDescription.class, Port.class, DefaultPortDescription.class, Element.class, Link.Type.class, Link.State.class, Timestamp.class, Leadership.class, LeadershipEvent.class, LeadershipEvent.Type.class, HostId.class, HostDescription.class, DefaultHostDescription.class, DefaultFlowEntry.class, StoredFlowEntry.class, FlowRule.Type.class, DefaultFlowRule.class, DefaultFlowEntry.class, DefaultPacketRequest.class, PacketPriority.class, FlowEntry.FlowEntryState.class, FlowId.class, DefaultTrafficSelector.class, Criteria.PortCriterion.class, Criteria.MetadataCriterion.class, Criteria.EthCriterion.class, Criteria.EthTypeCriterion.class, Criteria.VlanIdCriterion.class, Criteria.VlanPcpCriterion.class, Criteria.IPDscpCriterion.class, Criteria.IPEcnCriterion.class, Criteria.IPProtocolCriterion.class, Criteria.IPCriterion.class, Criteria.TcpPortCriterion.class, Criteria.UdpPortCriterion.class, Criteria.SctpPortCriterion.class, Criteria.IcmpTypeCriterion.class, Criteria.IcmpCodeCriterion.class, Criteria.IPv6FlowLabelCriterion.class, Criteria.Icmpv6TypeCriterion.class, Criteria.Icmpv6CodeCriterion.class, Criteria.IPv6NDTargetAddressCriterion.class, Criteria.IPv6NDLinkLayerAddressCriterion.class, Criteria.MplsCriterion.class, Criteria.IPv6ExthdrFlagsCriterion.class, Criteria.LambdaCriterion.class, Criteria.OpticalSignalTypeCriterion.class, Criterion.class, Criterion.Type.class, DefaultTrafficTreatment.class, Instructions.DropInstruction.class, Instructions.OutputInstruction.class, Instructions.GroupInstruction.class, L0ModificationInstruction.class, L0ModificationInstruction.L0SubType.class, L0ModificationInstruction.ModLambdaInstruction.class, L2ModificationInstruction.class, L2ModificationInstruction.L2SubType.class, L2ModificationInstruction.ModEtherInstruction.class, L2ModificationInstruction.PushHeaderInstructions.class, L2ModificationInstruction.ModVlanIdInstruction.class, L2ModificationInstruction.ModVlanPcpInstruction.class, L2ModificationInstruction.ModMplsLabelInstruction.class, L2ModificationInstruction.ModMplsTtlInstruction.class, L3ModificationInstruction.class, L3ModificationInstruction.L3SubType.class, L3ModificationInstruction.ModIPInstruction.class, L3ModificationInstruction.ModIPv6FlowLabelInstruction.class, L3ModificationInstruction.ModTtlInstruction.class, RoleInfo.class, FlowRuleBatchEvent.class, FlowRuleBatchEvent.Type.class, FlowRuleBatchRequest.class, FlowRuleBatchOperation.class, CompletedBatchOperation.class, FlowRuleBatchEntry.class, FlowRuleBatchEntry.FlowRuleOperation.class, IntentId.class, IntentState.class, //Key.class, is abstract Key.of(1L, new DefaultApplicationId(0, "bar")).getClass(), //LongKey.class Key.of("foo", new DefaultApplicationId(0, "bar")).getClass(), //StringKey.class Intent.class, ConnectivityIntent.class, PathIntent.class, DefaultPath.class, DefaultEdgeLink.class, HostToHostIntent.class, PointToPointIntent.class, MultiPointToSinglePointIntent.class, SinglePointToMultiPointIntent.class, FlowRuleIntent.class, LinkCollectionIntent.class, OpticalConnectivityIntent.class, OpticalPathIntent.class, LinkResourceRequest.class, DefaultLinkResourceRequest.class, BandwidthResourceRequest.class, LambdaResourceRequest.class, Lambda.class, Bandwidth.class, DefaultLinkResourceAllocations.class, BandwidthResourceAllocation.class, LambdaResourceAllocation.class, // Constraints LambdaConstraint.class, BandwidthConstraint.class, LinkTypeConstraint.class, LatencyConstraint.class, WaypointConstraint.class, ObstacleConstraint.class, AnnotationConstraint.class, BooleanConstraint.class, IntentOperation.class ) .register(new DefaultApplicationIdSerializer(), DefaultApplicationId.class) .register(new URISerializer(), URI.class) .register(new NodeIdSerializer(), NodeId.class) .register(new ProviderIdSerializer(), ProviderId.class) .register(new DeviceIdSerializer(), DeviceId.class) .register(new PortNumberSerializer(), PortNumber.class) .register(new DefaultPortSerializer(), DefaultPort.class) .register(new LinkKeySerializer(), LinkKey.class) .register(new ConnectPointSerializer(), ConnectPoint.class) .register(new DefaultLinkSerializer(), DefaultLink.class) .register(new MastershipTermSerializer(), MastershipTerm.class) .register(new HostLocationSerializer(), HostLocation.class) .register(new DefaultOutboundPacketSerializer(), DefaultOutboundPacket.class) .register(Versioned.class) .register(DefaultGroupId.class) .register( MplsIntent.class, MplsPathIntent.class, MplsLabelResourceAllocation.class, MplsLabelResourceRequest.class, MplsLabel.class, org.onlab.packet.MplsLabel.class, org.onlab.packet.MPLS.class ) .build(); // not to be instantiated private KryoNamespaces() {} }
apache-2.0
psiegman/ehcachetag
ehcachetag/src/test/java/nl/siegmann/ehcachetag/util/SecondTestBean.java
330
package nl.siegmann.ehcachetag.util; public class SecondTestBean { private String color; private String size; public String getColor() { return color; } public void setColor(String color) { this.color = color; } public String getSize() { return size; } public void setSize(String size) { this.size = size; } }
apache-2.0
googleapis/java-bigtable-hbase
bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/adapters/filters/FuzzyRowFilterAdapter.java
4093
/* * Copyright 2015 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.bigtable.hbase.adapters.filters; import static com.google.cloud.bigtable.data.v2.models.Filters.FILTERS; import com.google.api.core.InternalApi; import com.google.cloud.bigtable.data.v2.models.Filters.Filter; import com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter; import com.google.cloud.bigtable.hbase.adapters.read.ReaderExpressionHelper; import com.google.cloud.bigtable.hbase.adapters.read.ReaderExpressionHelper.QuoteMetaOutputStream; import com.google.common.base.Preconditions; import com.google.protobuf.ByteString; import java.io.IOException; import java.lang.reflect.Field; import java.util.List; import org.apache.hadoop.hbase.filter.FuzzyRowFilter; import org.apache.hadoop.hbase.util.Pair; /** * An adapter for {@link org.apache.hadoop.hbase.filter.FuzzyRowFilter}. * * <p>For internal use only - public for technical reasons. */ @InternalApi("For internal usage only") public class FuzzyRowFilterAdapter extends TypedFilterAdapterBase<FuzzyRowFilter> { private static Field FUZZY_KEY_DATA_FIELD; private static Exception FUZZY_KEY_DATA_FIELD_EXCEPTION; static { try { FUZZY_KEY_DATA_FIELD = FuzzyRowFilter.class.getDeclaredField("fuzzyKeysData"); FUZZY_KEY_DATA_FIELD.setAccessible(true); } catch (NoSuchFieldException | SecurityException e) { FUZZY_KEY_DATA_FIELD_EXCEPTION = e; } } /** {@inheritDoc} */ @Override public Filter adapt(FilterAdapterContext context, FuzzyRowFilter filter) throws IOException { List<Pair<byte[], byte[]>> pairs = extractFuzzyRowFilterPairs(filter); if (pairs.isEmpty()) { return FILTERS.pass(); } InterleaveFilter interleave = FILTERS.interleave(); for (Pair<byte[], byte[]> pair : pairs) { Preconditions.checkArgument( pair.getFirst().length == pair.getSecond().length, "Fuzzy info and match mask must have the same length"); interleave.filter(createSingleRowFilter(pair.getFirst(), pair.getSecond())); } return interleave; } private static Filter createSingleRowFilter(byte[] key, byte[] mask) throws IOException { ByteString.Output output = ByteString.newOutput(key.length * 2); QuoteMetaOutputStream quotingStream = new QuoteMetaOutputStream(output); for (int i = 0; i < mask.length; i++) { if (mask[i] == -1) { quotingStream.write(key[i]); } else { // Write unquoted to match any byte at this position: output.write(ReaderExpressionHelper.ANY_BYTE_BYTES); } } // match any trailing bytes output.write(ReaderExpressionHelper.ALL_BYTE_BYTES); quotingStream.close(); return FILTERS.key().regex(output.toByteString()); } @SuppressWarnings("unchecked") static List<Pair<byte[], byte[]>> extractFuzzyRowFilterPairs(FuzzyRowFilter filter) throws IOException { // TODO: Change FuzzyRowFilter to expose fuzzyKeysData. if (FUZZY_KEY_DATA_FIELD_EXCEPTION != null) { throw new IOException("Could not read the contents of the FuzzyRowFilter"); } try { return (List<Pair<byte[], byte[]>>) FUZZY_KEY_DATA_FIELD.get(filter); } catch (IllegalArgumentException | IllegalAccessException e) { throw new IOException("Could not read the contents of the FuzzyRowFilter", e); } } /** {@inheritDoc} */ @Override public FilterSupportStatus isFilterSupported( FilterAdapterContext context, FuzzyRowFilter filter) { return FilterSupportStatus.SUPPORTED; } }
apache-2.0
ty1er/incubator-asterixdb
hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMDiskComponent.java
8900
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hyracks.storage.am.lsm.common.impls; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.storage.am.common.api.IMetadataPageManager; import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilter; import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentId; import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent; import org.apache.hyracks.storage.am.lsm.common.api.LSMOperationType; import org.apache.hyracks.storage.am.lsm.common.util.ComponentUtils; import org.apache.hyracks.storage.am.lsm.common.util.LSMComponentIdUtils; import org.apache.hyracks.storage.common.MultiComparator; public abstract class AbstractLSMDiskComponent extends AbstractLSMComponent implements ILSMDiskComponent { private static final Logger LOGGER = Logger.getLogger(AbstractLSMDiskComponent.class.getName()); private final DiskComponentMetadata metadata; // a variable cache of componentId stored in metadata. // since componentId is immutable, we do not want to read from metadata every time the componentId // is requested. private ILSMComponentId componentId; public AbstractLSMDiskComponent(AbstractLSMIndex lsmIndex, IMetadataPageManager mdPageManager, ILSMComponentFilter filter) { super(lsmIndex, filter); state = ComponentState.READABLE_UNWRITABLE; metadata = new DiskComponentMetadata(mdPageManager); } @Override public boolean threadEnter(LSMOperationType opType, boolean isMutableComponent) { if (state == ComponentState.INACTIVE) { throw new IllegalStateException("Trying to enter an inactive disk component"); } switch (opType) { case FORCE_MODIFICATION: case MODIFICATION: case REPLICATE: case SEARCH: case DISK_COMPONENT_SCAN: readerCount++; break; case MERGE: if (state == ComponentState.READABLE_MERGING) { // This should never happen unless there are two concurrent merges that were scheduled // concurrently and they have interleaving components to be merged. // This should be handled properly by the merge policy, but we guard against that here anyway. return false; } state = ComponentState.READABLE_MERGING; readerCount++; break; default: throw new UnsupportedOperationException("Unsupported operation " + opType); } return true; } @Override public void threadExit(LSMOperationType opType, boolean failedOperation, boolean isMutableComponent) throws HyracksDataException { switch (opType) { case MERGE: // In case two merge operations were scheduled to merge an overlapping set of components, // the second merge will fail and it must reset those components back to their previous state. if (failedOperation) { state = ComponentState.READABLE_UNWRITABLE; } // Fallthrough case FORCE_MODIFICATION: case MODIFICATION: case REPLICATE: case SEARCH: case DISK_COMPONENT_SCAN: readerCount--; if (readerCount == 0 && state == ComponentState.READABLE_MERGING) { state = ComponentState.INACTIVE; } break; default: throw new UnsupportedOperationException("Unsupported operation " + opType); } if (readerCount <= -1) { throw new IllegalStateException("Invalid LSM disk component readerCount: " + readerCount); } } @Override public DiskComponentMetadata getMetadata() { return metadata; } @Override public ILSMComponentId getId() throws HyracksDataException { if (componentId != null) { return componentId; } synchronized (this) { if (componentId == null) { componentId = LSMComponentIdUtils.readFrom(metadata); } } if (componentId.missing()) { // For normal datasets, componentId shouldn't be missing, since otherwise it'll be a bug. // However, we cannot throw an exception here to be compatible with legacy datasets. // In this case, the disk component would always get a garbage Id [-1, -1], which makes the // component Id-based optimization useless but still correct. LOGGER.warning("Component Id not found from disk component metadata"); } return componentId; } /** * Mark the component as valid * * @param persist * whether the call should force data to disk before returning * @throws HyracksDataException */ @Override public void markAsValid(boolean persist) throws HyracksDataException { ComponentUtils.markAsValid(getMetadataHolder(), persist); if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Marked as valid component with id: " + getId()); } } @Override public void activate(boolean createNewComponent) throws HyracksDataException { if (createNewComponent) { getIndex().create(); } getIndex().activate(); if (getLSMComponentFilter() != null && !createNewComponent) { getLsmIndex().getFilterManager().readFilter(getLSMComponentFilter(), getMetadataHolder()); } } @Override public void deactivateAndDestroy() throws HyracksDataException { getIndex().deactivate(); getIndex().destroy(); } @Override public void destroy() throws HyracksDataException { getIndex().destroy(); } @Override public void deactivate() throws HyracksDataException { getIndex().deactivate(); } @Override public void deactivateAndPurge() throws HyracksDataException { getIndex().deactivate(); getIndex().purge(); } @Override public void validate() throws HyracksDataException { getIndex().validate(); } @Override public IChainedComponentBulkLoader createFilterBulkLoader() throws HyracksDataException { return new FilterBulkLoader(getLSMComponentFilter(), getMetadataHolder(), getLsmIndex().getFilterManager(), getLsmIndex().getTreeFields(), getLsmIndex().getFilterFields(), MultiComparator.create(getLSMComponentFilter().getFilterCmpFactories())); } @Override public IChainedComponentBulkLoader createIndexBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint, boolean checkIfEmptyIndex) throws HyracksDataException { return new LSMIndexBulkLoader( getIndex().createBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex)); } @Override public ChainedLSMDiskComponentBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint, boolean checkIfEmptyIndex, boolean withFilter, boolean cleanupEmptyComponent) throws HyracksDataException { ChainedLSMDiskComponentBulkLoader chainedBulkLoader = new ChainedLSMDiskComponentBulkLoader(this, cleanupEmptyComponent); if (withFilter && getLsmIndex().getFilterFields() != null) { chainedBulkLoader.addBulkLoader(createFilterBulkLoader()); } chainedBulkLoader .addBulkLoader(createIndexBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex)); return chainedBulkLoader; } @Override public String toString() { return "{\"class\":" + getClass().getSimpleName() + "\", \"index\":" + getIndex().toString() + "}"; } }
apache-2.0
Centril/sleepfighter
application/sleepfighter/src/main/java/se/toxbee/sleepfighter/preference/WeatherPreferences.java
1449
/* * Copyright 2014 toxbee.se * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package se.toxbee.sleepfighter.preference; import se.toxbee.sleepfighter.utils.prefs.PreferenceNode; /** * {@link WeatherPreferences} contains info about weather. * * @author Centril<twingoow@gmail.com> / Mazdak Farrokhzad. * @version 1.0 * @since Dec 15, 2013 */ public class WeatherPreferences extends AppPreferenceNode { protected WeatherPreferences( PreferenceNode b ) { super( b, "weather" ); } /** * Used to temporarily store the weather. Some seconds before the app starts, the weather is fetched. * This preference is used to temporarily store the weather info. * * @param weather */ public void setTemp( String weather ) { p.setString( "temp", weather ); } /** * Returns the weather. * * @see #setTemp(String) * @return the weather. */ public String getWeather() { return p.getString( "temp", null ); } }
apache-2.0
googleads/googleads-java-lib
modules/adwords_axis/src/main/java/com/google/api/ads/adwords/axis/v201809/cm/BiddingErrors.java
5527
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * BiddingErrors.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.adwords.axis.v201809.cm; /** * Represents error codes for bidding strategy entities. */ public class BiddingErrors extends com.google.api.ads.adwords.axis.v201809.cm.ApiError implements java.io.Serializable { /* The error reason represented by an enum. */ private com.google.api.ads.adwords.axis.v201809.cm.BiddingErrorsReason reason; public BiddingErrors() { } public BiddingErrors( java.lang.String fieldPath, com.google.api.ads.adwords.axis.v201809.cm.FieldPathElement[] fieldPathElements, java.lang.String trigger, java.lang.String errorString, java.lang.String apiErrorType, com.google.api.ads.adwords.axis.v201809.cm.BiddingErrorsReason reason) { super( fieldPath, fieldPathElements, trigger, errorString, apiErrorType); this.reason = reason; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .add("apiErrorType", getApiErrorType()) .add("errorString", getErrorString()) .add("fieldPath", getFieldPath()) .add("fieldPathElements", getFieldPathElements()) .add("reason", getReason()) .add("trigger", getTrigger()) .toString(); } /** * Gets the reason value for this BiddingErrors. * * @return reason * The error reason represented by an enum. */ public com.google.api.ads.adwords.axis.v201809.cm.BiddingErrorsReason getReason() { return reason; } /** * Sets the reason value for this BiddingErrors. * * @param reason * The error reason represented by an enum. */ public void setReason(com.google.api.ads.adwords.axis.v201809.cm.BiddingErrorsReason reason) { this.reason = reason; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof BiddingErrors)) return false; BiddingErrors other = (BiddingErrors) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = super.equals(obj) && ((this.reason==null && other.getReason()==null) || (this.reason!=null && this.reason.equals(other.getReason()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = super.hashCode(); if (getReason() != null) { _hashCode += getReason().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(BiddingErrors.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "BiddingErrors")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("reason"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "reason")); elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "BiddingErrors.Reason")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
apache-2.0
googleads/googleads-java-lib
modules/adwords_axis/src/main/java/com/google/api/ads/adwords/axis/v201809/o/MoneyAttribute.java
4988
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * MoneyAttribute.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.adwords.axis.v201809.o; /** * {@link Attribute} type that contains a {@link Money} value. */ public class MoneyAttribute extends com.google.api.ads.adwords.axis.v201809.o.Attribute implements java.io.Serializable { /* {@link Money} value contained by this {@link Attribute}. */ private com.google.api.ads.adwords.axis.v201809.cm.Money value; public MoneyAttribute() { } public MoneyAttribute( java.lang.String attributeType, com.google.api.ads.adwords.axis.v201809.cm.Money value) { super( attributeType); this.value = value; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .add("attributeType", getAttributeType()) .add("value", getValue()) .toString(); } /** * Gets the value value for this MoneyAttribute. * * @return value * {@link Money} value contained by this {@link Attribute}. */ public com.google.api.ads.adwords.axis.v201809.cm.Money getValue() { return value; } /** * Sets the value value for this MoneyAttribute. * * @param value * {@link Money} value contained by this {@link Attribute}. */ public void setValue(com.google.api.ads.adwords.axis.v201809.cm.Money value) { this.value = value; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof MoneyAttribute)) return false; MoneyAttribute other = (MoneyAttribute) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = super.equals(obj) && ((this.value==null && other.getValue()==null) || (this.value!=null && this.value.equals(other.getValue()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = super.hashCode(); if (getValue() != null) { _hashCode += getValue().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(MoneyAttribute.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "MoneyAttribute")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("value"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "value")); elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Money")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
apache-2.0
PennState/directory-fortress-core-1
src/main/java/org/apache/directory/fortress/core/ldap/LdapDataProvider.java
51475
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.fortress.core.ldap; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import org.apache.commons.lang.StringUtils; import org.apache.directory.api.ldap.extras.controls.ppolicy.PasswordPolicy; import org.apache.directory.api.ldap.extras.controls.ppolicy.PasswordPolicyImpl; import org.apache.directory.api.ldap.extras.controls.ppolicy_impl.PasswordPolicyDecorator; import org.apache.directory.api.ldap.model.constants.SchemaConstants; import org.apache.directory.api.ldap.model.cursor.CursorException; import org.apache.directory.api.ldap.model.cursor.SearchCursor; import org.apache.directory.api.ldap.model.entry.Attribute; import org.apache.directory.api.ldap.model.entry.DefaultAttribute; import org.apache.directory.api.ldap.model.entry.DefaultModification; import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.api.ldap.model.entry.Modification; import org.apache.directory.api.ldap.model.entry.ModificationOperation; import org.apache.directory.api.ldap.model.entry.Value; import org.apache.directory.api.ldap.model.exception.LdapException; import org.apache.directory.api.ldap.model.exception.LdapInvalidAttributeValueException; import org.apache.directory.api.ldap.model.exception.LdapInvalidDnException; import org.apache.directory.api.ldap.model.exception.LdapOperationErrorException; import org.apache.directory.api.ldap.model.message.BindRequest; import org.apache.directory.api.ldap.model.message.BindRequestImpl; import org.apache.directory.api.ldap.model.message.BindResponse; import org.apache.directory.api.ldap.model.message.CompareRequest; import org.apache.directory.api.ldap.model.message.CompareRequestImpl; import org.apache.directory.api.ldap.model.message.CompareResponse; import org.apache.directory.api.ldap.model.message.Control; import org.apache.directory.api.ldap.model.message.Response; import org.apache.directory.api.ldap.model.message.ResultCodeEnum; import org.apache.directory.api.ldap.model.message.SearchRequest; import org.apache.directory.api.ldap.model.message.SearchRequestImpl; import org.apache.directory.api.ldap.model.message.SearchScope; import org.apache.directory.api.ldap.model.message.controls.ProxiedAuthz; import org.apache.directory.api.ldap.model.message.controls.ProxiedAuthzImpl; import org.apache.directory.api.ldap.model.name.Dn; import org.apache.directory.fortress.core.GlobalIds; import org.apache.directory.fortress.core.model.Constraint; import org.apache.directory.fortress.core.model.ConstraintUtil; import org.apache.directory.fortress.core.model.FortEntity; import org.apache.directory.fortress.core.model.Hier; import org.apache.directory.fortress.core.model.Relationship; import org.apache.directory.fortress.core.util.Config; import org.apache.directory.fortress.core.util.LdapUtil; import org.apache.directory.ldap.client.api.LdapConnection; /** * Abstract class contains methods to perform low-level entity to ldap persistence. These methods are called by the * Fortress DAO's, i.e. {@link org.apache.directory.fortress.core.impl.UserDAO}. {@link org.apache.directory.fortress.core.impl.RoleDAO}, * {@link org.apache.directory.fortress.core.impl.PermDAO}, .... * These are low-level data utilities, very little if any data validations are performed here. * <p> * This class is thread safe. * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ public abstract class LdapDataProvider { // Logging private static final String CLS_NM = LdapDataProvider.class.getName(); private static final int MAX_DEPTH = 100; private static final LdapCounters COUNTERS = new LdapCounters(); private static final PasswordPolicy PP_REQ_CTRL = new PasswordPolicyImpl(); /** * Given a contextId and a fortress param name return the LDAP dn. * * @param contextId is to determine what sub-tree to use. * @param root contains the fortress parameter name that corresponds with a particular LDAP container. * @return String contains the dn to use for operation. */ protected static String getRootDn( String contextId, String root ) { String szDn = Config.getInstance().getProperty( root ); // The contextId must not be null, or "HOME" or "null" if ( StringUtils.isNotEmpty( contextId ) && !contextId.equalsIgnoreCase( GlobalIds.NULL ) && !contextId .equals( GlobalIds.HOME ) ) { int idx = szDn.indexOf( Config.getInstance().getProperty( GlobalIds.SUFFIX ) ); if ( idx > 0 ) { // Found. The DN is ,ou=<contextId>, StringBuilder dn = new StringBuilder(); dn.append( szDn.substring( 0, idx - 1 ) ).append( "," ).append( SchemaConstants.OU_AT ).append( "=" ) .append( contextId ).append( "," ).append( szDn.substring( idx ) ); return dn.toString(); } else { return ""; } } else { return szDn; } } /** * Given a contextId return the LDAP dn that includes the suffix. * * @param contextId is to determine what sub-tree to use. * @return String contains the dn to use for operation. */ protected String getRootDn( String contextId ) { StringBuilder dn = new StringBuilder(); if ( StringUtils.isNotEmpty( contextId ) && !contextId.equalsIgnoreCase( GlobalIds.NULL ) && !contextId .equals( GlobalIds.HOME ) ) { dn.append( SchemaConstants.OU_AT ).append( "=" ).append( contextId ).append( "," + "" ).append( Config.getInstance().getProperty( GlobalIds.SUFFIX ) ); } else { dn.append( Config.getInstance().getProperty( GlobalIds.SUFFIX ) ); } return dn.toString(); } /** * Read the ldap record from specified location. * * @param connection handle to ldap connection. * @param dn contains ldap distinguished name. * @param attrs array contains array names to pull back. * @return ldap entry. * @throws LdapException in the event system error occurs. */ protected Entry read( LdapConnection connection, String dn, String[] attrs ) throws LdapException { COUNTERS.incrementRead(); return connection.lookup( dn, attrs ); } /** * Read the ldap record from specified location. * * @param connection handle to ldap connection. * @param dn contains ldap distinguished name. * @param attrs array contains array names to pull back. * @return ldap entry. * @throws LdapException in the event system error occurs. */ protected Entry read( LdapConnection connection, Dn dn, String[] attrs ) throws LdapException { COUNTERS.incrementRead(); return connection.lookup( dn, attrs ); } /** * Read the ldap record from specified location with user assertion. * * @param connection handle to ldap connection. * @param dn contains ldap distinguished name. * @param attrs array contains array names to pull back. , * PoolMgr.ConnType.USER * @param userDn string value represents the identity of user on who's behalf the request was initiated. The * value will be stored in openldap auditsearch record AuthZID's attribute. * @return ldap entry. * @throws LdapException in the event system error occurs. * @throws UnsupportedEncodingException for search control errors. */ protected Entry read( LdapConnection connection, String dn, String[] attrs, String userDn ) throws LdapException { COUNTERS.incrementRead(); return connection.lookup( dn, attrs ); } /** * Add a new ldap entry to the directory. Do not add audit context. * * @param connection handle to ldap connection. * @param entry contains data to add.. * @throws LdapException in the event system error occurs. */ protected void add( LdapConnection connection, Entry entry ) throws LdapException { COUNTERS.incrementAdd(); connection.add( entry ); } /** * Add a new ldap entry to the directory. Add audit context. * * @param connection handle to ldap connection. * @param entry contains data to add.. * @param entity contains audit context. * @throws LdapException in the event system error occurs. */ protected void add( LdapConnection connection, Entry entry, FortEntity entity ) throws LdapException { COUNTERS.incrementAdd(); if ( !Config.getInstance().isAuditDisabled() && ( entity != null ) && ( entity.getAdminSession() != null ) ) { if ( StringUtils.isNotEmpty( entity.getAdminSession().getInternalUserId() ) ) { entry.add( GlobalIds.FT_MODIFIER, entity.getAdminSession().getInternalUserId() ); } if ( StringUtils.isNotEmpty( entity.getModCode() ) ) { entry.add( GlobalIds.FT_MODIFIER_CODE, entity.getModCode() ); } if ( StringUtils.isNotEmpty( entity.getModId() ) ) { entry.add( GlobalIds.FT_MODIFIER_ID, entity.getModId() ); } } connection.add( entry ); } /** * Update exiting ldap entry to the directory. Do not add audit context. * * @param connection handle to ldap connection. * @param dn contains distinguished node of entry. * @param mods contains data to modify. * @throws LdapException in the event system error occurs. */ protected void modify( LdapConnection connection, String dn, List<Modification> mods ) throws LdapException { COUNTERS.incrementMod(); connection.modify( dn, mods.toArray( new Modification[]{} ) ); } /** * Update exiting ldap entry to the directory. Do not add audit context. * * @param connection handle to ldap connection. * @param dn contains distinguished node of entry. * @param mods contains data to modify. * @throws LdapException in the event system error occurs. */ protected void modify( LdapConnection connection, Dn dn, List<Modification> mods ) throws LdapException { COUNTERS.incrementMod(); connection.modify( dn, mods.toArray( new Modification[] {} ) ); } /** * Update exiting ldap entry to the directory. Add audit context. * * @param connection handle to ldap connection. * @param dn contains distinguished node of entry. * @param mods contains data to modify. * @param entity contains audit context. * @throws LdapException in the event system error occurs. */ protected void modify( LdapConnection connection, String dn, List<Modification> mods, FortEntity entity ) throws LdapException { COUNTERS.incrementMod(); audit( mods, entity ); connection.modify( dn, mods.toArray( new Modification[] {} ) ); } /** * Update exiting ldap entry to the directory. Add audit context. * * @param connection handle to ldap connection. * @param dn contains distinguished node of entry. * @param mods contains data to modify. * @param entity contains audit context. * @throws LdapException in the event system error occurs. */ protected void modify( LdapConnection connection, Dn dn, List<Modification> mods, FortEntity entity ) throws LdapException { COUNTERS.incrementMod(); audit( mods, entity ); connection.modify( dn, mods.toArray( new Modification[] {} ) ); } /** * Delete exiting ldap entry from the directory. Do not add audit context. * * @param connection handle to ldap connection. * @param dn contains distinguished node of entry targeted for removal.. * @throws LdapException in the event system error occurs. */ protected void delete( LdapConnection connection, String dn ) throws LdapException { COUNTERS.incrementDelete(); connection.delete( dn ); } /** * Delete exiting ldap entry from the directory. Add audit context. This method will call modify prior to * delete which will * force corresponding audit record to be written to slapd access log. * * @param connection handle to ldap connection. * @param dn contains distinguished node of entry targeted for removal.. * @param entity contains audit context. * @throws LdapException in the event system error occurs. */ protected void delete( LdapConnection connection, String dn, FortEntity entity ) throws LdapException { COUNTERS.incrementDelete(); List<Modification> mods = new ArrayList<Modification>(); audit( mods, entity ); if ( mods.size() > 0 ) { modify( connection, dn, mods ); } connection.delete( dn ); } /** * Delete exiting ldap entry from the directory. Add audit context. This method will call modify prior to * delete which will * force corresponding audit record to be written to slapd access log. * * @param connection handle to ldap connection. * @param dn contains distinguished node of entry targeted for removal.. * @param entity contains audit context. * @throws LdapException in the event system error occurs. */ protected void delete( LdapConnection connection, Dn dn, FortEntity entity ) throws LdapException { COUNTERS.incrementDelete(); List<Modification> mods = new ArrayList<Modification>(); audit( mods, entity ); if ( mods.size() > 0 ) { modify( connection, dn, mods ); } connection.delete( dn ); } /** * Delete exiting ldap entry and all descendants from the directory. Do not add audit context. * * @param connection handle to ldap connection. * @param dn contains distinguished node of entry targeted for removal.. * @throws LdapException in the event system error occurs. * @throws IOException * @throws CursorException */ protected void deleteRecursive( LdapConnection connection, String dn ) throws LdapException, CursorException { int recursiveCount = 0; deleteRecursive( dn, connection, recursiveCount ); } /** * Delete exiting ldap entry and all descendants from the directory. Add audit context. This method will call * modify prior to delete which will * force corresponding audit record to be written to slapd access log. * * @param connection handle to ldap connection. * @param dn contains distinguished node of entry targeted for removal.. * @param entity contains audit context. * @throws LdapException in the event system error occurs. * @throws CursorException */ protected void deleteRecursive( LdapConnection connection, String dn, FortEntity entity ) throws LdapException, CursorException { List<Modification> mods = new ArrayList<Modification>(); audit( mods, entity ); if ( mods.size() > 0 ) { modify( connection, dn, mods ); } deleteRecursive( connection, dn ); } /** * Used to recursively remove all nodes up to record pointed to by dn attribute. * * @param dn contains distinguished node of entry targeted for removal.. * @param connection handle to ldap connection. * @param recursiveCount keeps track of how many iterations have been performed. * @throws LdapException in the event system error occurs. * @throws CursorException */ private void deleteRecursive( String dn, LdapConnection connection, int recursiveCount ) throws LdapException, CursorException { String method = "deleteRecursive"; // Sanity check - only allow max tree depth of 100 if ( recursiveCount++ > MAX_DEPTH ) { // too deep inside of a recursive sequence; String error = "." + method + " dn [" + dn + "] depth error in recursive"; throw new LdapOperationErrorException( error ); } String theDN; // Find child nodes SearchCursor cursor = search( connection, dn, SearchScope.ONELEVEL, "(objectclass=*)", SchemaConstants.NO_ATTRIBUTE_ARRAY, false, 0 ); // Iterate over all entries under this entry while ( cursor.next() ) { try { // Next directory entry Entry entry = cursor.getEntry(); theDN = entry.getDn().getName(); // continue down: deleteRecursive( theDN, connection, recursiveCount ); recursiveCount--; } catch ( LdapException le ) { // cannot continue; String error = "." + method + " dn [" + dn + "] caught LdapException=" + le.getMessage(); throw new LdapException( error ); } } // delete the node: COUNTERS.incrementDelete(); delete( connection, dn ); } /** * Add the audit context variables to the modfication set. * * @param mods used to update ldap attributes. * @param entity contains audit context. */ private void audit( List<Modification> mods, FortEntity entity ) { if ( !Config.getInstance().isAuditDisabled() && ( entity != null ) && ( entity.getAdminSession() != null ) ) { if ( StringUtils.isNotEmpty( entity.getAdminSession().getInternalUserId() ) ) { Modification modification = new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, GlobalIds.FT_MODIFIER, entity.getAdminSession().getInternalUserId() ); mods.add( modification ); } if ( StringUtils.isNotEmpty( entity.getModCode() ) ) { Modification modification = new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, GlobalIds.FT_MODIFIER_CODE, entity.getModCode() ); mods.add( modification ); } if ( StringUtils.isNotEmpty( entity.getModId() ) ) { Modification modification = new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, GlobalIds.FT_MODIFIER_ID, entity.getModId() ); mods.add( modification ); } } } /** * Perform normal ldap search accepting default batch size. * * @param connection is LdapConnection object used for all communication with host. * @param baseDn contains address of distinguished name to begin ldap search * @param scope indicates depth of search starting at basedn. 0 (base dn), * 1 (one level down) or 2 (infinite) are valid values. * @param filter contains the search criteria * @param attrs is the requested list of attritubutes to return from directory search. * @param attrsOnly if true pull back attribute names only. * @return result set containing ldap entries returned from directory. * @throws LdapException thrown in the event of error in ldap client or server code. */ protected SearchCursor search( LdapConnection connection, String baseDn, SearchScope scope, String filter, String[] attrs, boolean attrsOnly ) throws LdapException { COUNTERS.incrementSearch(); SearchRequest searchRequest = new SearchRequestImpl(); searchRequest.setBase( new Dn( baseDn ) ); searchRequest.setScope( scope ); searchRequest.setFilter( filter ); searchRequest.setTypesOnly( attrsOnly ); searchRequest.addAttributes( attrs ); return connection.search( searchRequest ); } /** * Perform normal ldap search specifying default batch size and max entries to return. * * @param connection is LdapConnection object used for all communication with host. * @param baseDn contains address of distinguished name to begin ldap search * @param scope indicates depth of search starting at basedn. 0 (base dn), * 1 (one level down) or 2 (infinite) are valid values. * @param filter contains the search criteria * @param attrs is the requested list of attritubutes to return from directory search. * @param attrsOnly if true pull back attribute names only. * @param maxEntries specifies the maximum number of entries to return in this search query. * @return result set containing ldap entries returned from directory. * @throws LdapException thrown in the event of error in ldap client or server code. */ protected SearchCursor search( LdapConnection connection, String baseDn, SearchScope scope, String filter, String[] attrs, boolean attrsOnly, int maxEntries ) throws LdapException { COUNTERS.incrementSearch(); SearchRequest searchRequest = new SearchRequestImpl(); searchRequest.setBase( new Dn( baseDn ) ); searchRequest.setFilter( filter ); searchRequest.setScope( scope ); searchRequest.setSizeLimit( maxEntries ); searchRequest.setTypesOnly( attrsOnly ); searchRequest.addAttributes( attrs ); return connection.search( searchRequest ); } /** * This method will search the directory and return at most one record. If more than one record is found * an ldap exception will be thrown. * * @param connection is LdapConnection object used for all communication with host. * @param baseDn contains address of distinguished name to begin ldap search * @param scope indicates depth of search starting at basedn. 0 (base dn), * 1 (one level down) or 2 (infinite) are valid values. * @param filter contains the search criteria * @param attrs is the requested list of attritubutes to return from directory search. * @param attrsOnly if true pull back attribute names only. * @return entry containing target ldap node. * @throws LdapException thrown in the event of error in ldap client or server code. * @throws CursorException If we weren't able to fetch an element from the search result */ protected Entry searchNode( LdapConnection connection, String baseDn, SearchScope scope, String filter, String[] attrs, boolean attrsOnly ) throws LdapException, CursorException { SearchRequest searchRequest = new SearchRequestImpl(); searchRequest.setBase( new Dn( baseDn ) ); searchRequest.setFilter( filter ); searchRequest.setScope( scope ); searchRequest.setTypesOnly( attrsOnly ); searchRequest.addAttributes( attrs ); SearchCursor result = connection.search( searchRequest ); Entry entry = result.getEntry(); if ( result.next() ) { throw new LdapException( "searchNode failed to return unique record for LDAP search of base DN [" + baseDn + "] filter [" + filter + "]" ); } return entry; } /** * This search method uses OpenLDAP Proxy Authorization Control to assert arbitrary user identity onto connection. * * @param connection is LdapConnection object used for all communication with host. * @param baseDn contains address of distinguished name to begin ldap search * @param scope indicates depth of search starting at basedn. 0 (base dn), * 1 (one level down) or 2 (infinite) are valid values. * @param filter contains the search criteria * @param attrs is the requested list of attritubutes to return from directory search. * @param attrsOnly if true pull back attribute names only. * @param userDn string value represents the identity of user on who's behalf the request was initiated. The * value will be stored in openldap auditsearch record AuthZID's attribute. * @return entry containing target ldap node. * @throws LdapException thrown in the event of error in ldap client or server code. * @throws CursorException If we weren't able to fetch an element from the search result */ protected Entry searchNode( LdapConnection connection, String baseDn, SearchScope scope, String filter, String[] attrs, boolean attrsOnly, String userDn ) throws LdapException, CursorException { COUNTERS.incrementSearch(); SearchRequest searchRequest = new SearchRequestImpl(); searchRequest.setBase( new Dn( baseDn ) ); searchRequest.setFilter( filter ); searchRequest.setScope( scope ); searchRequest.setTypesOnly( attrsOnly ); searchRequest.addAttributes( attrs ); SearchCursor result = connection.search( searchRequest ); Entry entry = result.getEntry(); if ( result.next() ) { throw new LdapException( "searchNode failed to return unique record for LDAP search of base DN [" + baseDn + "] filter [" + filter + "]" ); } return entry; } /** * This method uses the compare ldap func to assert audit record into the directory server's configured audit * logger. * * This is for one reason - to force the ldap server to maintain an audit trail on checkAccess api. * * Use proxy authz control (RFC4370) to assert the caller's id onto the record. * * @param connection is LdapConnection object used for all communication with host. * @param dn contains address of distinguished name to begin ldap search * @param userDn dn for user node * @param attribute attribute used for compare * @return true if compare operation succeeds * @throws LdapException thrown in the event of error in ldap client or server code. * @throws UnsupportedEncodingException in the event the server cannot perform the operation. */ protected boolean compareNode( LdapConnection connection, String dn, String userDn, Attribute attribute ) throws LdapException, UnsupportedEncodingException { COUNTERS.incrementCompare(); CompareRequest compareRequest = new CompareRequestImpl(); compareRequest.setName( new Dn( dn ) ); compareRequest.setAttributeId( attribute.getId() ); compareRequest.setAssertionValue( attribute.getString() ); // Assert the end user's dn onto the reqest using proxy authZ control so openldap can log who the user was (for authZ audit trail) ProxiedAuthz proxiedAuthzControl = new ProxiedAuthzImpl(); proxiedAuthzControl.setAuthzId( "dn: " + userDn ); compareRequest.addControl( proxiedAuthzControl ); CompareResponse response = connection.compare( compareRequest ); return response.getLdapResult().getResultCode() == ResultCodeEnum.SUCCESS; } /** * Method wraps ldap client to return multi-occurring attribute values by name within a given entry and returns * as a list of strings. * * @param entry contains the target ldap entry. * @param attributeName name of ldap attribute to retrieve. * @return List of type string containing attribute values. */ protected List<String> getAttributes( Entry entry, String attributeName ) { List<String> attrValues = new ArrayList<>(); if ( entry != null ) { Attribute attr = entry.get( attributeName ); if ( attr != null ) { for ( Value<?> value : attr ) { attrValues.add( value.getString() ); } } else { return null; } } return attrValues; } /** * Return the image stored on the entry. * * @param entry contains the image target. * @param attributeName to be retrieved. * @return byte array containing image. * @throws LdapInvalidAttributeValueException contains the system error. */ protected byte[] getPhoto( Entry entry, String attributeName ) throws LdapInvalidAttributeValueException { byte[] photo = null; Attribute attr = entry.get( attributeName ); if ( attr != null ) { photo = attr.getBytes(); } return photo; } /** * Method wraps ldap client to return multi-occurring attribute values by name within a given entry and returns * as a set of strings. * * @param entry contains the target ldap entry. * @param attributeName name of ldap attribute to retrieve. * @return List of type string containing attribute values. */ protected Set<String> getAttributeSet( Entry entry, String attributeName ) { // create Set with case insensitive comparator: Set<String> attrValues = new TreeSet<>( String.CASE_INSENSITIVE_ORDER ); if ( entry != null && entry.containsAttribute( attributeName ) ) { for ( Value<?> value : entry.get( attributeName ) ) { attrValues.add( value.getString() ); } } return attrValues; } /** * Method wraps ldap client to return attribute value by name within a given entry and returns as a string. * * @param entry contains the target ldap entry. * @param attributeName name of ldap attribute to retrieve. * @return value contained in a string variable. * @throws LdapInvalidAttributeValueException When we weren't able to get the attribute from the entry */ protected String getAttribute( Entry entry, String attributeName ) throws LdapInvalidAttributeValueException { if ( entry != null ) { Attribute attr = entry.get( attributeName ); if ( attr != null ) { return attr.getString(); } else { return null; } } else { return null; } } /** * Method will retrieve the relative distinguished name from a distinguished name variable. * * @param dn contains ldap distinguished name. * @return rDn as string. */ protected String getRdn( String dn ) { try { return new Dn( dn ).getRdn().getName(); } catch ( LdapInvalidDnException lide ) { return null; } } protected String getRdnValue( String dn ) { try { return new Dn( dn ).getRdn().getNormValue(); } catch ( LdapInvalidDnException lide ) { return null; } } /** * Create multi-occurring ldap attribute given array of strings and attribute name. * * @param name contains attribute name to create. * @param values array of string that contains attribute values. * @return Attribute containing multi-occurring attribute set. * @throws LdapException in the event of ldap client error. */ protected Attribute createAttributes( String name, String values[] ) throws LdapException { return new DefaultAttribute( name, values ); } /** * Convert constraint from raw ldap format to application entity. * * @param le ldap entry containing constraint. * @param ftDateTime reference to {@link org.apache.directory.fortress.core.model.Constraint} containing formatted data. * @throws LdapInvalidAttributeValueException * * @throws LdapInvalidAttributeValueException when we weren't able to retrieve the attribute from the entry */ protected void unloadTemporal( Entry le, Constraint ftDateTime ) throws LdapInvalidAttributeValueException { String szRawData = getAttribute( le, GlobalIds.CONSTRAINT ); if ( szRawData != null && szRawData.length() > 0 ) { ConstraintUtil.setConstraint( szRawData, ftDateTime ); } } /** * Given an ldap attribute name and a list of attribute values, construct an ldap attribute set to be added to directory. * * @param list list of type string containing attribute values to load into attribute set. * @param entry contains ldap attribute set targeted for adding. * @param attrName name of ldap attribute being added. * @throws LdapException If we weren't able to add the attributes into the entry */ protected void loadAttrs( List<String> list, Entry entry, String attrName ) throws LdapException { if ( list != null && list.size() > 0 ) { entry.add( attrName, list.toArray( new String[] {} ) ); } } /** * Given an ldap attribute name and a list of attribute values, construct an ldap modification set to be updated * in directory. * * @param list list of type string containing attribute values to load into modification set. * @param mods contains ldap modification set targeted for updating. * @param attrName name of ldap attribute being modified. */ protected void loadAttrs( List<String> list, List<Modification> mods, String attrName ) { if ( ( list != null ) && ( list.size() > 0 ) ) { mods.add( new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, attrName, list.toArray( new String[] {} ) ) ); } } /** * Given a collection of {@link org.apache.directory.fortress.core.model.Relationship}s, convert to raw data name-value format and * load into ldap modification set in preparation for ldap modify. * * @param list contains List of type {@link org.apache.directory.fortress.core.model.Relationship} targeted for updating in ldap. * @param mods ldap modification set containing parent-child relationships in raw ldap format. * @param attrName contains the name of the ldap attribute to be updated. * @param op specifies type of mod: {@link org.apache.directory.fortress.core.model.Hier.Op#ADD}, * {@link org.apache.directory.fortress.core.model.Hier.Op#MOD}, {@link org.apache.directory.fortress.core.model.Hier.Op#REM}. */ protected void loadRelationshipAttrs( List<Relationship> list, List<Modification> mods, String attrName, Hier.Op op ) { if ( list != null ) { Attribute attr; for ( Relationship rel : list ) { // This LDAP attr is stored as a name-value pair separated by a ':'. attr = new DefaultAttribute( attrName, rel.getChild() + GlobalIds.PROP_SEP + rel.getParent() ); switch ( op ) { case ADD: mods.add( new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attr ) ); break; case MOD: mods.add( new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, attr ) ); break; case REM: mods.add( new DefaultModification( ModificationOperation.REMOVE_ATTRIBUTE, attr ) ); break; } } } } /** * Given an ldap attribute name and a set of attribute values, construct an ldap modification set to be updated * in directory. * * @param values set of type string containing attribute values to load into modification set. * @param mods contains ldap modification set targeted for updating. * @param attrName name of ldap attribute being updated. */ protected void loadAttrs( Set<String> values, List<Modification> mods, String attrName ) { if ( ( values != null ) && ( values.size() > 0 ) ) { mods.add( new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, attrName, values.toArray( new String[] {} ) ) ); } } /** * Given an ldap attribute name and a set of attribute values, construct an ldap attribute set to be added to * directory. * * @param values set of type string containing attribute values to load into attribute set. * @param entry contains ldap entry to pull attrs from. * @param attrName name of ldap attribute being added. * @throws LdapException If we weren't able to add the values into the entry */ protected void loadAttrs( Set<String> values, Entry entry, String attrName ) throws LdapException { if ( ( values != null ) && ( values.size() > 0 ) ) { entry.add( attrName, values.toArray( new String[] {} ) ); } } /** * Given a collection of {@link java.util.Properties}, convert to raw data name-value format and load into ldap * modification set in preparation for ldap modify. * * @param props contains {@link java.util.Properties} targeted for updating in ldap. * @param mods ldap modification set containing name-value pairs in raw ldap format. * @param attrName contains the name of the ldap attribute to be updated. * @param replace boolean variable, if set to true use {@link ModificationOperation#REPLACE_ATTRIBUTE} else {@link * ModificationOperation#ADD_ATTRIBUTE}. */ protected void loadProperties( Properties props, List<Modification> mods, String attrName, boolean replace ) { loadProperties( props, mods, attrName, replace, GlobalIds.PROP_SEP ); } /** * Given a collection of {@link java.util.Properties}, convert to raw data name-value format and load into ldap * modification set in preparation for ldap modify. * * @param props contains {@link java.util.Properties} targeted for updating in ldap. * @param mods ldap modification set containing name-value pairs in raw ldap format. * @param attrName contains the name of the ldap attribute to be updated. * @param replace boolean variable, if set to true use {@link ModificationOperation#REPLACE_ATTRIBUTE} else {@link * ModificationOperation#ADD_ATTRIBUTE}. * @param separator contains the char value used to separate name and value in ldap raw format. */ protected void loadProperties( Properties props, List<Modification> mods, String attrName, boolean replace, char separator ) { if ( props != null && props.size() > 0 ) { if ( replace ) { mods.add( new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, attrName ) ); } for ( Enumeration<?> e = props.propertyNames(); e.hasMoreElements(); ) { String key = ( String ) e.nextElement(); String val = props.getProperty( key ); // This LDAP attr is stored as a name-value pair separated by a ':'. mods.add( new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attrName, key + separator + val ) ); } } } /** * Given a collection of {@link java.util.Properties}, convert to raw data name-value format and load into ldap * modification set in preparation for ldap modify. * * @param props contains {@link java.util.Properties} targeted for removal from ldap. * @param mods ldap modification set containing name-value pairs in raw ldap format to be removed. * @param attrName contains the name of the ldap attribute to be removed. */ protected void removeProperties( Properties props, List<Modification> mods, String attrName ) { if ( props != null && props.size() > 0 ) { for ( Enumeration<?> e = props.propertyNames(); e.hasMoreElements(); ) { String key = ( String ) e.nextElement(); String val = props.getProperty( key ); // This LDAP attr is stored as a name-value pair separated by a ':'. mods.add( new DefaultModification( ModificationOperation.REMOVE_ATTRIBUTE, attrName, key + GlobalIds.PROP_SEP + val ) ); } } } /** * Given a collection of {@link java.util.Properties}, convert to raw data name-value format and load into ldap * modification set in preparation for ldap add. * * @param props contains {@link java.util.Properties} targeted for adding to ldap. * @param entry contains ldap entry to pull attrs from. * @param attrName contains the name of the ldap attribute to be added. * @throws LdapException If we weren't able to add the properies into the entry */ protected void loadProperties( Properties props, Entry entry, String attrName ) throws LdapException { if ( ( props != null ) && ( props.size() > 0 ) ) { Attribute attr = new DefaultAttribute( attrName ); for ( Enumeration<?> e = props.propertyNames(); e.hasMoreElements(); ) { // This LDAP attr is stored as a name-value pair separated by a ':'. String key = ( String ) e.nextElement(); String val = props.getProperty( key ); String prop = key + GlobalIds.PROP_SEP + val; attr.add( prop ); } if ( attr.size() != 0 ) { entry.add( attr ); } } } /** * Given a collection of {@link java.util.Properties}, convert to raw data name-value format and load into ldap modification set in preparation for ldap add. * * @param props contains {@link java.util.Properties} targeted for adding to ldap. * @param entry contains ldap entry to push attrs into. * @param attrName contains the name of the ldap attribute to be added. * @param separator contains the char value used to separate name and value in ldap raw format. * @throws LdapException If we weren't able to add the properies into the entry */ protected void loadProperties( Properties props, Entry entry, String attrName, char separator ) throws LdapException { if ( ( props != null ) && ( props.size() > 0 ) ) { Attribute attr = null; for ( Enumeration<?> e = props.propertyNames(); e.hasMoreElements(); ) { // This LDAP attr is stored as a name-value pair separated by a ':'. String key = ( String ) e.nextElement(); String val = props.getProperty( key ); String prop = key + separator + val; if ( attr == null ) { attr = new DefaultAttribute( attrName ); } else { attr.add( prop ); } } if ( attr != null ) { entry.add( attr ); } } } /** * Encode some text so that it can be used in a LDAP filter. * * @param value The value to encode * @param validLen The maximum accepted length of the value. * @return String containing encoded data. * @throws LdapException If the value is longer than the maximum value */ protected String encodeSafeText( String value, int validLen ) throws LdapException { if ( StringUtils.isNotEmpty( value ) ) { int length = value.length(); if ( length > validLen ) { String error = "encodeSafeText value [" + value + "] invalid length [" + length + "]"; throw new LdapException( error ); } if ( LdapUtil.getInstance().isLdapfilterSizeFound() ) { value = escapeLDAPSearchFilter( value ); } } return value; } /** * Get Password Policy Response Control from LDAP client. * * @param resp contains reference to LDAP pw policy response. * @return PasswordPolicy response control. */ protected PasswordPolicy getPwdRespCtrl( Response resp ) { Control control = resp.getControls().get( PP_REQ_CTRL.getOid() ); if ( control == null ) { return null; } return ( ( PasswordPolicyDecorator ) control ).getDecorated(); } /** * Calls the PoolMgr to perform an LDAP bind for a user/password combination. This function is valid * if and only if the user entity is a member of the USERS data set. * * @param connection connection to ldap server. * @param szUserDn contains the LDAP dn to the user entry in String format. * @param password contains the password in clear text. * @return bindResponse contains the result of the operation. * @throws LdapException in the event of LDAP error. */ protected BindResponse bind( LdapConnection connection, String szUserDn, String password ) throws LdapException { COUNTERS.incrementBind(); Dn userDn = new Dn( szUserDn ); BindRequest bindReq = new BindRequestImpl(); bindReq.setDn( userDn ); bindReq.setCredentials( password ); bindReq.addControl( PP_REQ_CTRL ); return connection.bind( bindReq ); } /** * Calls the PoolMgr to close the Admin LDAP connection. * * @param connection handle to ldap connection object. */ public void closeAdminConnection( LdapConnection connection ) { LdapConnectionProvider.getInstance().closeAdminConnection(connection); } /** * Calls the PoolMgr to close the Log LDAP connection. * * @param connection handle to ldap connection object. */ protected void closeLogConnection( LdapConnection connection ) { LdapConnectionProvider.getInstance().closeLogConnection(connection); } /** * Calls the PoolMgr to close the User LDAP connection. * * @param connection handle to ldap connection object. */ protected void closeUserConnection( LdapConnection connection ) { LdapConnectionProvider.getInstance().closeUserConnection(connection); } /** * Calls the PoolMgr to get an Admin connection to the LDAP server. * * @return ldap connection. * @throws LdapException If we had an issue getting an LDAP connection */ public LdapConnection getAdminConnection() throws LdapException { return LdapConnectionProvider.getInstance().getAdminConnection(); } /** * Calls the PoolMgr to get an Log connection to the LDAP server. * * @return ldap connection. * @throws LdapException If we had an issue getting an LDAP connection */ protected LdapConnection getLogConnection() throws LdapException { return LdapConnectionProvider.getInstance().getLogConnection(); } /** * Calls the PoolMgr to get an User connection to the LDAP server. * * @return ldap connection. * @throws LdapException If we had an issue getting an LDAP connection */ protected LdapConnection getUserConnection() throws LdapException { return LdapConnectionProvider.getInstance().getUserConnection(); } /** * Return to call reference to dao counter object with running totals for ldap operations add, mod, delete, search, etc. * * @return {@link LdapCounters} contains long values of atomic ldap operations for current running process. */ public static LdapCounters getLdapCounters() { return COUNTERS; } /** * Perform encoding on supplied input string for certain unsafe ascii characters. These chars may be unsafe * because ldap reserves some characters as operands. Safe encoding safeguards from malicious scripting input errors * that are possible if data filtering did not get performed before being passed into dao layer. * * @param filter contains the data to filter. * @return possibly modified input string for matched characters. */ protected String escapeLDAPSearchFilter( String filter ) { StringBuilder sb = new StringBuilder(); int filterLen = filter.length(); for ( int i = 0; i < filterLen; i++ ) { boolean found = false; char curChar = filter.charAt( i ); int j = 0; for ( ; j < GlobalIds.LDAP_FILTER_SIZE; j++ ) { if ( LdapUtil.getInstance().getLdapMetaChars()[j] > curChar ) { break; } else if ( curChar == LdapUtil.getInstance().getLdapMetaChars()[j] ) { sb.append( "\\" ); sb.append( LdapUtil.getInstance().getLdapReplVals()[j] ); found = true; break; } } if ( !found ) { sb.append( curChar ); } } return sb.toString(); } /** * Closes all the ldap connection pools. */ public static void closeAllConnectionPools(){ LdapConnectionProvider.getInstance().closeAllConnectionPools(); } }
apache-2.0
KleeGroup/vertigo-studio
src/main/java/io/vertigo/studio/plugins/reporting/domain/metrics/fields/FieldsMetricEngine.java
1578
/** * vertigo - simple java starter * * Copyright (C) 2013-2017, KleeGroup, direction.technique@kleegroup.com (http://www.kleegroup.com) * KleeGroup, Centre d'affaire la Boursidiere - BP 159 - 92357 Le Plessis Robinson Cedex - France * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.vertigo.studio.plugins.reporting.domain.metrics.fields; import io.vertigo.dynamo.domain.metamodel.DtDefinition; import io.vertigo.lang.Assertion; import io.vertigo.studio.impl.reporting.ReportMetricEngine; import io.vertigo.studio.reporting.ReportMetric; import io.vertigo.studio.reporting.ReportMetricBuilder; /** * Comptage du nombre de champs. * * @author pchretien */ public final class FieldsMetricEngine implements ReportMetricEngine<DtDefinition> { /** {@inheritDoc} */ @Override public ReportMetric execute(final DtDefinition dtDefinition) { Assertion.checkNotNull(dtDefinition); //----- final int size = dtDefinition.getFields().size(); return new ReportMetricBuilder() .withTitle("Nombre de champs") .withValue(size) .build(); } }
apache-2.0
TinghuanWang/source
src/cn/jsprun/utils/Common.java
107844
package cn.jsprun.utils; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URLDecoder; import java.net.URLEncoder; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.text.ParseException; import java.text.SimpleDateFormat; import java.text.StringCharacterIterator; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.TimeZone; import java.util.TreeMap; import java.util.Map.Entry; import javax.mail.internet.MimeUtility; import javax.servlet.ServletContext; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.commons.net.ftp.FTPClient; import org.apache.oro.text.regex.MalformedPatternException; import org.apache.oro.text.regex.MatchResult; import org.apache.oro.text.regex.Perl5Compiler; import org.apache.oro.text.regex.Perl5Matcher; import org.apache.struts.Globals; import org.apache.struts.upload.FormFile; import org.apache.struts.util.MessageResources; import cn.jsprun.domain.Members; import cn.jsprun.domain.Usergroups; import cn.jsprun.foreg.service.PostOperating; import cn.jsprun.service.DataBaseService; import cn.jsprun.service.MemberService; public final class Common { public final static String SIGNSTRING = "\t3938-3187-414023-22164\t"; public static final String[] THREAD_COLORS={"", "red", "orange", "yellow", "green", "cyan", "blue", "purple", "gray", "white"}; public static final String[] COLOR_OPTIONS = { "Black", "Sienna", "DarkOliveGreen","DarkGreen", "DarkSlateBlue", "Navy", "Indigo","DarkSlateGray", "DarkRed", "DarkOrange", "Olive", "Green","Teal", "Blue", "SlateGray", "DimGray", "Red", "SandyBrown","YellowGreen", "SeaGreen", "MediumTurquoise", "RoyalBlue","Purple", "Gray", "Magenta", "Orange", "Yellow", "Lime","Cyan", "DeepSkyBlue", "DarkOrchid", "Silver", "Pink", "Wheat","LemonChiffon", "PaleGreen", "PaleTurquoise", "LightBlue","Plum", "White" }; private static final char[] PREG_CHARS={'.', '\\' ,'+' ,'*', '?', '[', '^' ,']' ,'$', '(' ,')', '{', '}', '=' ,'!', '<','>', '|',':'}; private static final String RAND_CHARS = "0123456789abcdefghigklmnopqrstuvtxyzABCDEFGHIGKLMNOPQRSTUVWXYZ"; private static final String CHARSET_NAME="GBK"; private static Random random = new Random(); private static DataBaseService dataBaseService=(DataBaseService) BeanFactory.getBean("dataBaseService"); private static MemberService memberService =((MemberService) BeanFactory.getBean("memberService")); private static PostOperating postOperating = (PostOperating)BeanFactory.getBean("postOperating"); private static DataParse dataParse=((DataParse)BeanFactory.getBean("dataParse")); private static FtpUtils ftputil=((FtpUtils)BeanFactory.getBean("ftputils")); private static final Map<String,String[]> timeZoneIDs=new LinkedHashMap<String,String[]>(32); static{ timeZoneIDs.put("-12", new String[]{"GMT-12:00","(GMT -12:00) Eniwetok, Kwajalein"}); timeZoneIDs.put("-11", new String[]{"GMT-11:00","(GMT -11:00) Midway Island, Samoa"}); timeZoneIDs.put("-10", new String[]{"GMT-10:00","(GMT -10:00) Hawaii"}); timeZoneIDs.put("-9", new String[]{"GMT-09:00","(GMT -09:00) Alaska"}); timeZoneIDs.put("-8", new String[]{"GMT-08:00","(GMT -08:00) Pacific Time (US &amp; Canada), Tijuana"}); timeZoneIDs.put("-7", new String[]{"GMT-07:00","(GMT -07:00) Mountain Time (US &amp; Canada), Arizona"}); timeZoneIDs.put("-6", new String[]{"GMT-06:00","(GMT -06:00) Central Time (US &amp; Canada), Mexico City"}); timeZoneIDs.put("-5", new String[]{"GMT-05:00","(GMT -05:00) Eastern Time (US &amp; Canada), Bogota, Lima, Quito"}); timeZoneIDs.put("-4", new String[]{"GMT-04:00","(GMT -04:00) Atlantic Time (Canada), Caracas, La Paz"}); timeZoneIDs.put("-3.5", new String[]{"GMT-03:30","(GMT -03:30) Newfoundland"}); timeZoneIDs.put("-3", new String[]{"GMT-03:00","(GMT -03:00) Brassila, Buenos Aires, Georgetown, Falkland Is"}); timeZoneIDs.put("-2", new String[]{"GMT-02:00","(GMT -02:00) Mid-Atlantic, Ascension Is., St. Helena"}); timeZoneIDs.put("-1", new String[]{"GMT-01:00","(GMT -01:00) Azores, Cape Verde Islands"}); timeZoneIDs.put("0", new String[]{"GMT","(GMT) Casablanca, Dublin, Edinburgh, London, Lisbon, Monrovia"}); timeZoneIDs.put("1", new String[]{"GMT+01:00","(GMT +01:00) Amsterdam, Berlin, Brussels, Madrid, Paris, Rome"}); timeZoneIDs.put("2", new String[]{"GMT+02:00","(GMT +02:00) Cairo, Helsinki, Kaliningrad, South Africa"}); timeZoneIDs.put("3", new String[]{"GMT+03:00","(GMT +03:00) Baghdad, Riyadh, Moscow, Nairobi"}); timeZoneIDs.put("3.5", new String[]{"GMT+03:30","(GMT +03:30) Tehran"}); timeZoneIDs.put("4", new String[]{"GMT+04:00","(GMT +04:00) Abu Dhabi, Baku, Muscat, Tbilisi"}); timeZoneIDs.put("4.5", new String[]{"GMT+04:30","(GMT +04:30) Kabul"}); timeZoneIDs.put("5", new String[]{"GMT+05:00","(GMT +05:00) Ekaterinburg, Islamabad, Karachi, Tashkent"}); timeZoneIDs.put("5.5", new String[]{"GMT+05:30","(GMT +05:30) Bombay, Calcutta, Madras, New Delhi"}); timeZoneIDs.put("5.75", new String[]{"GMT+05:45","(GMT +05:45) Katmandu"}); timeZoneIDs.put("6", new String[]{"GMT+06:00","(GMT +06:00) Almaty, Colombo, Dhaka, Novosibirsk"}); timeZoneIDs.put("6.5", new String[]{"GMT+06:30","(GMT +06:30) Rangoon"}); timeZoneIDs.put("7", new String[]{"GMT+07:00","(GMT +07:00) Bangkok, Hanoi, Jakarta"}); timeZoneIDs.put("8", new String[]{"GMT+08:00","(GMT +08:00) Beijing, Hong Kong, Perth, Singapore, Taipei"}); timeZoneIDs.put("9", new String[]{"GMT+09:00","(GMT +09:00) Osaka, Sapporo, Seoul, Tokyo, Yakutsk"}); timeZoneIDs.put("9.5", new String[]{"GMT+09:30","(GMT +09:30) Adelaide, Darwin"}); timeZoneIDs.put("10", new String[]{"GMT+10:00","(GMT +10:00) Canberra, Guam, Melbourne, Sydney, Vladivostok"}); timeZoneIDs.put("11", new String[]{"GMT+11:00","(GMT +11:00) Magadan, New Caledonia, Solomon Islands"}); timeZoneIDs.put("12", new String[]{"GMT+12:00","(GMT +12:00) Auckland, Wellington, Fiji, Marshall Island"}); } public static int rand(int max){ return random.nextInt(max+1); } public static int rand(int min, int max){ if(min<max){ if(min>0){ return rand(max-min)+min; }else{ return rand(max); } }else{ return min; } } public static String getRandStr(int length, boolean isOnlyNum) { int size=isOnlyNum?10:62; StringBuffer hash = new StringBuffer(length); for (int i = 0; i < length; i++) { hash.append(RAND_CHARS.charAt(random.nextInt(size))); } return hash.toString(); } @SuppressWarnings("unchecked") public static boolean empty(Object obj) { if (obj == null) { return true; } else if (obj instanceof String && (obj.equals("") || obj.equals("0"))) { return true; } else if (obj instanceof Number && ((Number) obj).doubleValue() == 0) { return true; } else if (obj instanceof Boolean && !((Boolean) obj)) { return true; } else if (obj instanceof Collection && ((Collection) obj).isEmpty()) { return true; } else if (obj instanceof Map && ((Map) obj).isEmpty()) { return true; } else if (obj instanceof Object[] && ((Object[]) obj).length == 0) { return true; } return false; } private static List<String> periodscheck(String banperiods,byte disableperiodctrl,String timeoffset){ if(disableperiodctrl==0&&banperiods!=null&&!banperiods.equals("")){ float now=Float.valueOf(gmdate("HH.mm", Common.time(), timeoffset)); String[] periods=banperiods.split("\r\n"); for (String period : periods) { period=period.trim().replaceAll(":", "."); String[] periodTime=period.split("-"); if(periodTime!=null&&periodTime.length>=2) { float periodbegin=Float.valueOf(periodTime[0]); float periodend=Float.valueOf(periodTime[1]); if((periodbegin > periodend && (now >= periodbegin || now < periodend)) || (periodbegin < periodend && now >= periodbegin && now < periodend)) { List<String> rList = new ArrayList<String>(2); rList.add("period_nopermission"); rList.add(banperiods.replaceAll("\r\n", ",")); return rList; } } } } return null; } public static String periodscheck(String banperiods,byte disableperiodctrl,int timestamp,String timeoffset,MessageResources mr,Locale locale) { if(disableperiodctrl==0&&banperiods.length()>0) { float now=Float.valueOf(Common.gmdate("HH.mm", timestamp, timeoffset)); String[] periods=banperiods.split("\r\n"); for (String period : periods) { period=period.trim().replaceAll(":", "."); String[] periodTime=period.split("-"); if(periodTime.length>1){ float periodbegin=Float.valueOf(periodTime[0]); float periodend=Float.valueOf(periodTime[1]); if((periodbegin > periodend && (now >= periodbegin || now < periodend)) || (periodbegin < periodend && now >= periodbegin && now < periodend)) { return mr.getMessage(locale, "period_nopermission",banperiods.replaceAll("\r\n", ",")); } } } } return null; } public static boolean datecheck(String ymd){ return datecheck(ymd,"-"); } public static boolean datecheck(String ymd,String sep){ if(ymd!=null&&ymd.length()!=0){ if(ymd.matches("^\\d{1,4}"+sep+"\\d{1,2}"+sep+"\\d{1,2}$")){ String[] dates=ymd.split(sep); return checkdate(Integer.valueOf(dates[0]),Integer.valueOf(dates[1]),Integer.valueOf(dates[2])); } } return false; } public static boolean checkdate(int year,int month,int day){ if(year<1||year>9999||month<1||month>12||day<1){ return false; } Calendar calendar = Calendar.getInstance(); calendar.set(year,month-1,1); int maxDay = calendar.getActualMaximum(Calendar.DAY_OF_MONTH); if(day>maxDay){ return false; } return true; } public static String dateformat(String ymd){ return dateformat(ymd,"yyyy-MM-dd"); } public static String dateformat(String ymd,String formattype){ SimpleDateFormat simpleDateFormat = getSimpleDateFormat(formattype, ForumInit.settings.get("timeoffset")); try { return simpleDateFormat.format(simpleDateFormat.parse(ymd)); } catch (ParseException e) { e.printStackTrace(); return ""; } } public static boolean isEmail(String email) { return email!=null&&email.length() > 6&& email.matches("^[\\w\\-\\.]+@[\\w\\-\\.]+(\\.\\w+)+$"); } public static boolean forum(Map<String, String> forum,String hideprivate,short groupid,int lastvisit, String extgroupids,Map<String, Map<String, String>> lastposts,SimpleDateFormat sdf) { String viewperm = forum.get("viewperm"); if ("".equals(viewperm) || !"".equals(viewperm)&& forumperm(viewperm,groupid,extgroupids)|| !Common.empty(forum.get("allowview"))) { forum.put("permission", "2"); } else if (hideprivate!=null&&hideprivate.equals("0")) { forum.put("permission", "1"); } else { return false; } String icon=forum.get("icon"); if (!"".equals(icon)) { if (forum.get("icon").indexOf(",")>=0) { String[] flash = forum.get("icon").split(","); if(flash.length==3){ forum.put("icon","<a href=\"forumdisplay.jsp?fid="+ forum.get("fid")+ "\">"+"<embed style=\"margin-right: 10px\" src=\""+ flash[0].trim()+ "\" width=\""+ flash[1].trim()+ "\" height=\""+ flash[2].trim()+ "\" type=\"application/x-shockwave-flash\" align=\"left\"></embed></a>"); } } else { forum.put("icon","<a href=\"forumdisplay.jsp?fid="+ forum.get("fid")+ "\">" + "<img style=\"margin-right: 10px\" src=\""+ icon+ "\" align=\"left\" border=\"0\" /></a>"); } } Map<String, String> lastpost = new HashMap<String, String>(); int dateline=0; String lastpoststr = forum.get("lastpost").trim(); if (lastpoststr.length()>0) { String[] obj = lastpoststr.split("\t"); dateline=Integer.parseInt(obj[2]); lastpost.put("tid", obj[0]); lastpost.put("subject", obj[1]); lastpost.put("dateline", Common.gmdate(sdf, dateline)); if(obj.length>3){ lastpost.put("author", obj[3]); }else{ lastpost.put("author", ""); } }else{ lastpost.put("tid", "0"); lastpost.put("subject", ""); lastpost.put("dateline", "0"); lastpost.put("author", ""); } forum.put("folder", lastvisit < dateline ? " class='new'" : ""); if (Integer.valueOf(lastpost.get("tid")) > 0) { String author=lastpost.get("author"); if (!"".equals(author)) { lastpost.put("author", "<a href=\"space.jsp?username="+ Common.encode(author) + "\">" + Common.cutstr(author, 11, null) + "</a>"); } lastposts.put(forum.get("fid"), lastpost); } else { lastposts.put(forum.get("fid"), null); } lastpost=null; forum.put("moderators", moddisplay(forum.get("moderators"),"flat",false)); String subforums=forum.get("subforums"); if (subforums!= null&& !subforums.equals("")) { forum.put("subforums", subforums); } return true; } public static String moddisplay(String moderator, String type, Boolean inherit) { StringBuffer modlist = new StringBuffer(); if ("selectbox".equals(type)) { if (moderator != null && !"".equals(moderator)) { String[] moderators = moderator.split("\t"); for (String obj : moderators) { modlist.append("<li><a href=\"space.jsp?username=" + Common.encode(obj)+ "\">" + (inherit ? "<strong>" + obj + "</strong>" : obj) + "</a></li>"); } } return modlist.toString(); } else { if (moderator != null && !"".equals(moderator)) { String[] moderators = moderator.split("\t"); for (String obj : moderators) { modlist.append("<a class=\"notabs\" href=\"space.jsp?username="+Common.encode(obj) + "\">"+ (inherit ? "<strong>" + obj + "</strong>" : obj)+ "</a>, "); } } int length=modlist.length(); return length>=2?modlist.substring(0,length-2):""; } } public static boolean forumperm(String permstr,short groupid,String extgroupid) { StringBuffer groupidarray = new StringBuffer(); groupidarray.append(groupid); if (extgroupid != null && extgroupid.length() > 0) { String[] extgroupids = extgroupid.split("\t"); for (String obj : extgroupids) { if(obj.length()>0){ groupidarray.append("|" + obj); } } } return permstr.matches(".*(^|\t)(" + groupidarray + ")(\t|$).*"); } public static boolean matches(String content, String regex) { boolean flag=false; try { flag =new Perl5Matcher().contains(content, new Perl5Compiler().compile(regex)); } catch (MalformedPatternException e) { e.printStackTrace(); } return flag; } public static boolean ismoderator(String ismodertars, Members member) { if (member == null) { return false; } if (member.getAdminid() == 1 || member.getAdminid() == 2 || !Common.empty(ismodertars)) { return true; } return false; } public static boolean ismoderator(short fid, Members member) { if (member == null) { return false; } if (member.getAdminid() == 1 || member.getAdminid() == 2) { return true; } List<Map<String, String>> modertar = dataBaseService.executeQuery("select m.uid from jrun_moderators m where m.uid='"+ member.getUid() + "' AND m.fid=" + fid); boolean flag=false; if (modertar != null && modertar.size() > 0) { flag= true; } modertar=null; return flag; } @SuppressWarnings("unchecked") public static Map<String,String> forumformulaperm(String formulaperm,Members member,boolean ismoderator,Map<Integer,Map<String,String>> extcredits,MessageResources mr,Locale locale) { if(ismoderator){ return null; } Map<Integer,String> formula=dataParse.characterParse(formulaperm, true); if(formula.size()<2){ return null; } String formulamessage=formula.get(0); if(!formulamessage.trim().equals("")) { if(member!=null){ List<Map<String,String>> members=dataBaseService.executeQuery("SELECT uid FROM jrun_members WHERE uid="+member.getUid()+" AND ("+formulamessage+")"); if(members!=null&&members.size()>0){ return null; } } String usermsg=formula.get(1); String[] usermsgs=usermsg.split("\\$_DSESSION"); List<String> usermsgList=new ArrayList<String>(); if(usermsgs!=null) { for (String obj : usermsgs) { obj=obj.trim(); if(!obj.equals("")){ usermsgList.add(obj.substring(obj.indexOf("'")+1,obj.lastIndexOf("'"))); } } } if(usermsgList!=null) { usermsg=""; if(member!=null){ Map<String,Object> usermsgValue=getValues(member,usermsgList,new HashMap<String, Object>()); for (String obj : usermsgList) { Object value=usermsgValue.get(obj); if(value!=null) { usermsg+=obj+" = "+value+"&nbsp;&nbsp;&nbsp;"; }else{ usermsg +=obj+" = 0&nbsp;&nbsp;&nbsp;"; } } }else{ for (String obj : usermsgList) { usermsg +=obj+" = 0&nbsp;&nbsp;&nbsp;"; } } } Map<String,String> replaces=new TreeMap<String, String>(); replaces.put("digestposts", "&nbsp;"+mr.getMessage(locale, "digestposts")+"&nbsp;"); replaces.put("posts", "&nbsp;"+mr.getMessage(locale, "posts")+"&nbsp;"); replaces.put("oltime", "&nbsp;"+mr.getMessage(locale, "a_setting_creditsformula_oltime")+"&nbsp;"); replaces.put("pageviews", "&nbsp;"+mr.getMessage(locale, "pageviews")+"&nbsp;"); replaces.put("or", "&nbsp;&nbsp;"+mr.getMessage(locale, "or")+"&nbsp;&nbsp;"); replaces.put("and", "&nbsp;&nbsp;"+mr.getMessage(locale, "and")+"&nbsp;&nbsp;"); if(extcredits!=null&&extcredits.size()>0){ for (Integer i = 1; i <= 8; i++) { Map<String,String> extcredit=extcredits.get(i); if(extcredit!=null){ replaces.put("extcredits"+i, extcredit.get("title")); }else{ replaces.put("extcredits"+i, mr.getMessage(locale, "a_setting_creditsformula_extcredits")+i); } } } Set<Entry<String,String>> dsessions=replaces.entrySet(); for (Entry<String,String> temp : dsessions) { String dsession = temp.getKey(); String values = temp.getValue(); formulamessage=formulamessage.replaceAll(dsession, values); usermsg=usermsg.replaceAll(dsession, values); } Map<String,String> messages=new HashMap<String, String>(); messages.put("formulamessage", formulamessage); messages.put("usermsg", usermsg); return messages; } return null; } public static Object setValues(Object bean, String fieldName, String value) { try { Field field = bean.getClass().getDeclaredField(fieldName); StringBuffer setMethod = new StringBuffer(); setMethod.append("set"); setMethod.append(fieldName.substring(0, 1).toUpperCase()); setMethod.append(fieldName.substring(1, fieldName.length())); Method method = bean.getClass().getMethod(setMethod+"",field.getType()); method.invoke(bean, convert(value, field.getType())); } catch (Exception e) { e.printStackTrace(); } return bean; } public static Object setValues(Object bean,HttpServletRequest request) { try { Field[] fields = bean.getClass().getDeclaredFields(); String paraName = ""; String paraValue = ""; String setMethod = ""; for (int i = 0; i < fields.length; i++) { paraName = fields[i].getName(); paraValue = request.getParameter(paraName); if (paraValue != null && !"".equals(paraValue)) { setMethod = "set"+paraName.substring(0,1).toUpperCase()+paraName.substring(1,paraName.length()); Method method = bean.getClass().getMethod(setMethod,fields[i].getType()); method.invoke(bean, convert(paraValue,fields[i].getType())); } } } catch (IllegalAccessException e) { e.printStackTrace(); } catch (NoSuchMethodException e){ e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } return bean; } @SuppressWarnings("unchecked") public static Object convert(String source, Class type) { String typeName = type.getName(); Object target = null; if (typeName.equals("java.lang.String")) { target = source; } else if (typeName.equals("java.lang.Integer")) { target = Common.intval(source); } else if (typeName.equals("java.lang.Short")) { target = (short)Math.min(32767, Common.intval(source)); } else if (typeName.equals("java.lang.Byte")) { target = (byte)Math.min(127, Common.intval(source)); } else if (typeName.equals("java.lang.Long")) { target = Long.parseLong(source); } return target; } public static Object getValues(Object bean, String fieldName) { Object paraValue = null; try { Method method = bean.getClass().getMethod("get"+fieldName.substring(0, 1).toUpperCase()+fieldName.substring(1, fieldName.length())); paraValue = method.invoke(bean); } catch (Exception e) { e.printStackTrace(); } return paraValue; } private static Map<String, Object> getValues(Object bean, List<String> fields, Map<String, Object> fieldsMap) { try { Field[] beanFields = bean.getClass().getDeclaredFields(); if (fieldsMap == null) { fieldsMap = new HashMap<String, Object>(); } int fieldLength = fields.size(); String paraName =null; String getMethod=null; for (int i = 0; i < fieldLength; i++) { paraName = fields.get(i); Method method = null; Object paraValue = null; int beanFieldLength = beanFields.length; for (int j = 0; j < beanFieldLength; j++) { if (paraName.equals(beanFields[j].getName())) { getMethod = "get"+paraName.substring(0, 1).toUpperCase()+paraName.substring(1, paraName.length()); method = bean.getClass().getMethod(getMethod); paraValue = method.invoke(bean, new Object[0]); break; } } if (method != null) { if (paraValue instanceof Short) { paraValue = String.valueOf(paraValue); } fieldsMap.put(paraName, paraValue); } } } catch (Exception e) { e.printStackTrace(); } return fieldsMap; } public static List<String> getStr(String content, String regex) { List<String> strList = new ArrayList<String>(); try { Perl5Matcher patternMatcher=new Perl5Matcher(); if (patternMatcher.contains(content, new Perl5Compiler().compile(regex))) { MatchResult result = patternMatcher.getMatch(); for (int i = 0; i < result.groups(); i++) { strList.add(result.group(i)); } result=null; } } catch (MalformedPatternException e) { e.printStackTrace(); } return strList; } public static String checkpost(String subject, String message,Map<String, String> settings, Map<String, String> admingroups,MessageResources mr,Locale locale) { if (Common.strlen(subject) > 80) { return mr.getMessage(locale, "post_subject_toolong_com"); } int disablepostctrl = admingroups!=null?Common.toDigit(admingroups.get("disablepostctrl")):0; if (disablepostctrl==0) { int maxpostsize = Common.toDigit(settings.get("maxpostsize")); int minpostsize = Common.toDigit(settings.get("minpostsize")); if (maxpostsize > 0 && Common.strlen(message) > maxpostsize) { return mr.getMessage(locale, "post_message_toolong",maxpostsize); } else if (minpostsize > 0&& (Common.strlen(message.replaceAll("\\[quote\\].+?\\[quote\\]", "")) < minpostsize)) { return mr.getMessage(locale, "post_message_tooshort",minpostsize); } } return null; } public static boolean isNum(String value) { boolean flag = true; if (value != null) { int length = value.length(); for (int i = 0; i < length; i++) { if ((i == 0 && value.charAt(0) == '-')|| Character.isDigit(value.charAt(i))) { } else { flag = false; break; } } } return flag; } public static int dataToInteger(String ymd,String timeoffset) { return dataToInteger(ymd,"yyyy-MM-dd HH:mm",timeoffset); } public static int dataToInteger(String ymd,String pattern,String timeoffset) { SimpleDateFormat format = getSimpleDateFormat(pattern, timeoffset); try { if(ymd==null||ymd.equals("")){ return 0; } Date ndate = format.parse(ymd); if (format.format(ndate).equals(ymd)) { return (int)(ndate.getTime()/1000); } else { return 0; } } catch (Exception e) { return -1; } } public static void procThread(Map<String,String> thread,double ppp){ int replies = Integer.valueOf(thread.get("replies")); int views = Integer.valueOf(thread.get("views")); int special = Integer.valueOf(thread.get("special")); if (replies>views) { thread.put("views", thread.get("replies")); } double postsnum = special > 0 ? replies : replies + 1; if (postsnum > ppp) { StringBuffer pagelinks = new StringBuffer(); int topicpages = (int)Math.ceil(postsnum/ppp); for (int i = 1; i<=6&&i <= topicpages; i++) { pagelinks.append("<a href=\"viewthread.jsp?tid="+ thread.get("tid") + "&page=" + i + "\" target=\"_blank\">" + i + "</a> "); } if (topicpages > 6) { pagelinks.append(" .. <a href=\"viewthread.jsp?tid="+ thread.get("tid") + "&page=" + topicpages + "\" target=\"_blank\">"+ topicpages + "</a> "); } thread.put("multipage", " &nbsp; " + pagelinks); } procThread(thread); } public static void procThread(Map<String,String> thread){ int highlight=Integer.valueOf(thread.get("highlight")); if(highlight>0){ StringBuffer style=new StringBuffer(); style.append(" style=\""); if(highlight>=40){ style.append("font-weight: bold;"); highlight=highlight%40; } if(highlight>=20){ style.append("font-style: italic;"); highlight=highlight%20; } if(highlight>=10){ style.append("text-decoration: underline;"); highlight=highlight%10; } if(highlight>0){ style.append("color: "+THREAD_COLORS[highlight]); } style.append("\""); thread.put("highlight",style.toString()); } else{ thread.put("highlight", ""); } } @SuppressWarnings("unchecked") public static boolean isshowsuccess(HttpSession session,String keyword){ Map<String,Object> msgforward = (Map<String,Object>)session.getServletContext().getAttribute("msgforward"); byte quick = msgforward==null?(byte)0:Byte.valueOf(msgforward.get("quick").toString()); if(quick==1){ Map successmessages = (Map)(msgforward==null?null:msgforward.get("messages")); if(successmessages != null){ Iterator<Entry> it = successmessages.entrySet().iterator(); while(it.hasNext()){ Entry temp = it.next(); String value = (String)temp.getValue(); if(keyword.equals(value)){ return true; } } } } return false; } public static void requestforward(HttpServletResponse response,String referer){ try { response.sendRedirect(referer); } catch (IOException e) { e.printStackTrace(); } } public static String strip_tags(String content) { return content==null?"":content.replaceAll("<[\\s\\S]*?>", ""); } public static String htmlspecialchars(String string){ return htmlspecialchars(string, 1); } public static String htmlspecialchars(String text,int quotestyle){ StringBuffer sb = new StringBuffer(text.length() * 2); StringCharacterIterator iterator = new StringCharacterIterator(text); char character = iterator.current(); while(character != StringCharacterIterator.DONE){ switch (character) { case '&': sb.append("&amp;"); break; case '<': sb.append("&lt;"); break; case '>': sb.append("&gt;"); break; case '"': if(quotestyle==1||quotestyle==2){ sb.append("&quot;"); }else{ sb.append(character); } break; case '\'': if(quotestyle==2){ sb.append("&#039;"); }else{ sb.append(character); } break; default: sb.append(character); break; } character = iterator.next(); } return sb.toString(); } @SuppressWarnings("unchecked") public static void setExtcredits(HttpServletRequest request) { Map<String, String> settings = ForumInit.settings; request.setAttribute("extcredits", dataParse.characterParse(settings.get("extcredits"),true)); } public static void sessionExists(HttpServletRequest request,HttpServletResponse response,String sid,int uid,Map<String,String> settings) { boolean sessionexists = false; String seccode = null; int lastolupdate = 0; int spageviews=0; List<Map<String,String>> sessionlist = dataBaseService.executeQuery("select uid,seccode,pageviews,lastolupdate from jrun_sessions where sid='"+sid+"'"); if (sessionlist == null || sessionlist.size()<=0) { seccode = Common.getRandStr(6, true); } else { Map<String,String> session = sessionlist.get(0);sessionlist=null; int s_uid=Integer.parseInt(session.get("uid")); if(s_uid > 0){ if(s_uid!=uid){ int jsprun_uid=toDigit(CookieUtil.getCookie(request, "uid", true,settings)); HttpSession httpSession=request.getSession(); String jsprun_userss = null; short groupid = 7; byte adminid = 0; if (jsprun_uid>0) { Members member = memberService.findMemberById(jsprun_uid); if (member != null) { String validateAuth = Md5Token.getInstance().getLongToken(member.getPassword() + "\t"+ member.getSecques() + "\t"+ member.getUid()); if (validateAuth.equals(CookieUtil.getCookie(request, "auth",true,settings))) { jsprun_uid = member.getUid(); jsprun_userss = member.getUsername(); groupid = member.getGroupid(); adminid = member.getAdminid(); httpSession.setAttribute("user", member); Common.setDateformat(httpSession, settings); httpSession.setAttribute("jsprun_pw", member.getPassword()); } } } else { CookieUtil.setCookie(request, response, "uid", String.valueOf(jsprun_uid), 604800, true,settings); } httpSession.setAttribute("jsprun_uid", jsprun_userss!=null?jsprun_uid:0); httpSession.setAttribute("jsprun_userss", jsprun_userss!=null?jsprun_userss:""); httpSession.setAttribute("jsprun_groupid", groupid); httpSession.setAttribute("jsprun_adminid", adminid); httpSession.setAttribute("formhash", Common.getRandStr(8, false)); } seccode = session.get("seccode"); }else{ if(s_uid!=uid){ CookieUtil.clearCookies(request, response,settings); } seccode = Common.getRandStr(6, true); } sessionexists = true; lastolupdate = Integer.parseInt(session.get("lastolupdate")); spageviews=Integer.parseInt(session.get("pageviews")); session=null; } request.setAttribute("seccode", Integer.parseInt(seccode)); request.setAttribute("sessionexists", sessionexists); request.setAttribute("lastolupdate", lastolupdate); request.setAttribute("spageviews", spageviews); } public static void setDateformat(HttpSession session,Map<String,String> settings){ Members member=(Members)session.getAttribute("user"); String timeoffset=null; String dateformat = null; Byte timeformat = null; if (member != null) { if (member.getDateformat() > 0) { String[] userdateformat = settings.get("userdateformat").split("\r\n"); if(userdateformat.length>=member.getDateformat()){ dateformat = userdateformat[member.getDateformat()-1]; } } if (member.getTimeformat() > 0) { timeformat=member.getTimeformat(); } if (!member.getTimeoffset().equals("9999")) { timeoffset = member.getTimeoffset(); } } dateformat = dateformat == null ? settings.get("dateformat"): dateformat; timeformat = timeformat == null ? Byte.parseByte(settings.get("timeformat")):timeformat; timeoffset = timeoffset == null ? settings.get("timeoffset"): timeoffset; session.setAttribute("dateformat", dateformat); session.setAttribute("timeformat",timeformat==1 ? "hh:mm a" : "HH:mm"); session.setAttribute("timeoffset", timeoffset); } public static boolean censoruser(String content,String censoruser) { if(content!=null&&content.length()>0){ String censorexp = censoruser.replaceAll("\\*", ".*"); censorexp = censorexp.replaceAll("(\r\n|\r|\n)", "|"); censorexp = censorexp.replaceAll("\\s", ""); censorexp = "^(" + censorexp + ")"; String guestexp = "\\xA1\\xA1|^Guest|^\\xD3\\xCE\\xBF\\xCD|\\xB9\\x43\\xAB\\xC8"; if (Common.matches(content, "^\\s*$|^c:\\con\\con$|[%,\\\\'\\*\"\\s\\t\\<\\>\\&]|"+ guestexp)|| (censoruser.length()>0 && Common.matches(content, censorexp))) { return true; } } return false; } public static boolean allowAccessBbs(HttpServletRequest request,HttpServletResponse response,HttpSession session,Map<String,String> settings,String accessPath) { Short groupid = (Short) session.getAttribute("jsprun_groupid"); Byte adminid = (Byte) session.getAttribute("jsprun_adminid"); List<Map<String,String>> usergroups = dataBaseService.executeQuery("select allowvisit,disableperiodctrl from jrun_usergroups as u where groupid="+groupid); if(usergroups!=null&&usergroups.size()>0){ int timestamp = Common.time(); boolean tempB = false; Map<String,String> usermap = usergroups.get(0); byte allowvisit = Byte.valueOf(usermap.get("allowvisit")); byte disableperiodctrl = Byte.valueOf(usermap.get("disableperiodctrl")); usergroups = null; usermap = null; String ipaccess=settings.get("ipaccess"); String ipban_expiration = settings.get("ipban_expiration"); int ipban_expiration_int = Common.toDigit(ipban_expiration); if(ipban_expiration_int>0&&ipban_expiration_int<timestamp){ ServletContext context = request.getSession().getServletContext(); ForumInit.setSettings(context, null); } String ipbanned = settings.get("ipbanned"); if (!allowAccess(request,ipaccess, allowvisit,ipbanned)) { session.setAttribute("jsprun_uid", 0); session.setAttribute("jsprun_userss", ""); session.setAttribute("jsprun_groupid", (short) 7); session.setAttribute("jsprun_adminid", (byte) 0); request.setAttribute("propertyKey", true); request.setAttribute("resultInfo","user_banned"); try { request.getRequestDispatcher("/showmessage.jsp").forward(request,response); return true; } catch (Exception e) { e.printStackTrace(); } } else if (!(accessPath.equals("logging.jsp")|| accessPath.equals("wap.jsp") || adminid == 1)) { String message = null; String arg0_forKey = null; int bbclosed = Integer.valueOf(settings.get("bbclosed")); if (bbclosed > 0) { session.setAttribute("jsprun_uid", 0); session.setAttribute("jsprun_userss", ""); session.setAttribute("jsprun_pw", ""); session.removeAttribute("user"); session.setAttribute("jsprun_groupid", (short) 7); session.setAttribute("jsprun_adminid", (byte) 0); message = settings.get("closedreason"); if(message == null || message.equals("")){ tempB = true; message = "board_closed"; arg0_forKey = settings.get("adminemail"); } } else { tempB = true; List<String> tempL = Common.periodscheck(settings.get("visitbanperiods"), disableperiodctrl, settings.get("timeoffset")); if(tempL != null && tempL.size() == 2){ message = tempL.get(0); arg0_forKey = tempL.get(1); } } if (message != null) { if(tempB){ request.setAttribute("propertyKey", true); request.setAttribute("arg0_forKey", arg0_forKey); request.setAttribute("show_message", message); }else{ request.setAttribute("show_message", message); } try { request.getRequestDispatcher("/showmessage.jsp?action=nopermission").forward(request, response); return true; } catch (Exception e) { e.printStackTrace(); } } } } return false; } public static boolean allowAccess(HttpServletRequest request,String ipaccess, Byte allowVisit,String ipbanned) { String currentPage = (String) request.getAttribute("CURSCRIPT"); String action = request.getParameter("action"); String memberIP = Common.get_onlineip(request); boolean allowAccess = true; ipaccess = ipaccess == null ? "" : ipaccess.trim(); if (allowVisit != null&& allowVisit == 0&& !(currentPage.equals("member.jsp") && (action != null && (action.equals("groupexpiry") || action.equals("activate"))))) { return false; } if (!ipaccess.equals("")) { String[] ipaccesses = ipaccess.split("(\r|\n)"); boolean sign = false; for (String ipaccesse:ipaccesses) { if (memberIP.startsWith(ipaccesse)) { sign = true; break; } } if(!sign){ return false; } } if(!ipbanned.equals("")){ return !memberIP.matches(ipbanned); } return allowAccess; } public static void setForums(List<Map<String,String>> forumList){ if(forumList!=null){ List<Map<String,String>> groups = new ArrayList<Map<String,String>>(); List<Map<String,String>> forums = new ArrayList<Map<String,String>>(); List<Map<String,String>> subs = new ArrayList<Map<String,String>>(); List<Map<String,String>> removeList = new ArrayList<Map<String,String>>(); for(Map<String,String> forum1:forumList){ if("group".equals(forum1.get("type"))){ boolean isExit=false; for(Map<String,String> forum2:forumList){ if("forum".equals(forum2.get("type")) && forum1.get("fid").equals(forum2.get("fup"))){ isExit=true; break; } } if(!isExit){ removeList.add(forum1); } } } forumList.removeAll(removeList); for(Map<String,String> forum:forumList){ String type=forum.get("type"); if("group".equals(type)){ groups.add(forum); }else if("forum".equals(type)){ forums.add(forum); }else if("sub".equals(type)){ subs.add(forum); } } forumList.clear(); for(Map<String,String> group:groups){ forumList.add(group); for(Map<String,String> forum:forums){ if(group.get("fid").equals(forum.get("fup"))){ forumList.add(forum); for(Map<String,String> sub:subs){ if(forum.get("fid").equals(sub.get("fup"))){ forumList.add(sub); } } } } } } } public static String forumselect(boolean groupselectable,boolean tableformat,short groupid,String extgroupid,String fid){ List<Map<String,String>> forumList=dataBaseService.executeQuery("SELECT f.fid, f.type, f.name, f.fup, ff.viewperm, ff.formulaperm FROM jrun_forums f LEFT JOIN jrun_forumfields ff ON ff.fid=f.fid WHERE f.status=1 ORDER BY f.type, f.displayorder"); setForums(forumList); StringBuffer forumlist=new StringBuffer(); if(forumList!=null){ Map<String,Boolean> visible=new HashMap<String,Boolean>(); if(tableformat){ forumlist.append("<dl><dd><ul>"); for(Map<String,String> forum:forumList){ String type=forum.get("type"); String name=Common.strip_tags(forum.get("name")); String viewperm=forum.get("viewperm"); if("group".equals(type)){ forumlist.append("</ul></dd></dl><dl><dt><a href='index.jsp?gid="+forum.get("fid")+"'>"+name+"</a></dt><dd><ul>"); visible.put(forum.get("fid"), true); }else if("forum".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<li"+(forum.get("fid").equals(fid) ? " class='current'" : "")+"><a href=\"forumdisplay.jsp?fid="+forum.get("fid")+"\">"+name+"</a></li>"); visible.put(forum.get("fid"), true); }else if("sub".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<li class='sub"+(forum.get("fid").equals(fid) ? " current'" : "'")+"><a href=\"forumdisplay.jsp?fid="+forum.get("fid")+"\">"+name+"</a></li>"); } } forumlist.append("</ul></dd></dl>"); }else{ forumlist.append("<optgroup label='&nbsp;'>"); for(Map<String,String> forum:forumList){ String type=forum.get("type"); String name=Common.strip_tags(forum.get("name")); String viewperm=forum.get("viewperm"); if("group".equals(type)){ forumlist.append(groupselectable ? "<option value='"+forum.get("fid")+"'>"+name+"</option>" : "</optgroup><optgroup label='"+name+"'>"); visible.put(forum.get("fid"), true); }else if("forum".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<option value='"+forum.get("fid")+"' "+(forum.get("fid").equals(fid) ? " selected" : "")+">&nbsp; &gt; "+name+"</option>"); visible.put(forum.get("fid"), true); }else if("sub".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<option value='"+forum.get("fid")+"' "+(forum.get("fid").equals(fid) ? " selected" : "")+">&nbsp; &nbsp; &nbsp; &gt; "+name+"</option>"); } } forumlist.append("</optgroup>"); } visible=null; } forumList=null; return forumlist.toString().replace(tableformat?"<dl><dd><ul></ul></dd></dl>":"<optgroup label='&nbsp;'></optgroup>", ""); } public static String forumselect(List<Map<String,String>> forumList,boolean groupselectable,boolean tableformat,short groupid,String extgroupid,String fid){ StringBuffer forumlist=new StringBuffer(); if(forumList!=null){ Map<String,Boolean> visible=new HashMap<String,Boolean>(); if(tableformat){ forumlist.append("<dl><dd><ul>"); for(Map<String,String> forum:forumList){ String type=forum.get("type"); String name=Common.strip_tags(forum.get("name")); String viewperm=forum.get("viewperm"); if("group".equals(type)){ forumlist.append("</ul></dd></dl><dl><dt><a href='index.jsp?gid="+forum.get("fid")+"'>"+name+"</a></dt><dd><ul>"); visible.put(forum.get("fid"), true); }else if("forum".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<li"+(forum.get("fid").equals(fid) ? " class='current'" : "")+"><a href=\"forumdisplay.jsp?fid="+forum.get("fid")+"\">"+name+"</a></li>"); visible.put(forum.get("fid"), true); }else if("sub".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<li class='sub"+(forum.get("fid").equals(fid) ? " current'" : "'")+"><a href=\"forumdisplay.jsp?fid="+forum.get("fid")+"\">"+name+"</a></li>"); } } forumlist.append("</ul></dd></dl>"); }else{ forumlist.append("<optgroup label='&nbsp;'>"); for(Map<String,String> forum:forumList){ String type=forum.get("type"); String name=Common.strip_tags(forum.get("name")); String viewperm=forum.get("viewperm"); if("group".equals(type)){ forumlist.append(groupselectable ? "<option value='"+forum.get("fid")+"'>"+name+"</option>" : "</optgroup><optgroup label='"+name+"'>"); visible.put(forum.get("fid"), true); }else if("forum".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<option value='"+forum.get("fid")+"' "+(forum.get("fid").equals(fid) ? " selected" : "")+">&nbsp; &gt; "+name+"</option>"); visible.put(forum.get("fid"), true); }else if("sub".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<option value='"+forum.get("fid")+"' "+(forum.get("fid").equals(fid) ? " selected" : "")+">&nbsp; &nbsp; &nbsp; &gt; "+name+"</option>"); } } forumlist.append("</optgroup>"); } visible=null; } forumList=null; return forumlist.toString().replace(tableformat?"<dl><dd><ul></ul></dd></dl>":"<optgroup label='&nbsp;'></optgroup>", ""); } public static String forumselect(List<Map<String,String>> forumList,short groupid,String extgroupid,String fid){ StringBuffer forumlist=new StringBuffer(); if(forumList!=null){ Map<String,Boolean> visible=new HashMap<String,Boolean>(); forumlist.append("<dl><dd><ul>"); for(Map<String,String> forum:forumList){ String type=forum.get("type"); String viewperm=forum.get("viewperm"); if("group".equals(type)){ forumlist.append("</ul></dd></dl><dl><dt><a href='index.jsp?gid="+forum.get("fid")+"'>"+forum.get("name")+"</a></dt><dd><ul>"); visible.put(forum.get("fid"), true); }else if("forum".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<li"+(forum.get("fid").equals(fid) ? " class='current'" : "")+"><a href=\"forumdisplay.jsp?fid="+forum.get("fid")+"\">"+forum.get("name")+"</a></li>"); visible.put(forum.get("fid"), true); }else if("sub".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<li class='sub"+(forum.get("fid").equals(fid) ? " current'" : "'")+"><a href=\"forumdisplay.jsp?fid="+forum.get("fid")+"\">"+forum.get("name")+"</a></li>"); } } forumlist.append("</ul></dd></dl>"); visible=null; } forumList=null; return forumlist.toString().replace("<dl><dd><ul></ul></dd></dl>", ""); } public static String showForumWithSelected(boolean groupselectable,boolean tableformat,Short groupid,String extgroupid,List<String> selectFidList){ List<Map<String,String>> forumList=dataBaseService.executeQuery("SELECT f.fid, f.type, f.name, f.fup, ff.viewperm, ff.formulaperm, a.uid FROM jrun_forums f LEFT JOIN jrun_forumfields ff ON ff.fid=f.fid LEFT JOIN jrun_access a ON a.fid=f.fid AND a.allowview=1 WHERE f.status=1 ORDER BY f.type, f.displayorder"); setForums(forumList); StringBuffer forumlist=new StringBuffer(); if(forumList!=null){ Map<String,Boolean> visible=new HashMap<String,Boolean>(); if(tableformat){ forumlist.append("<dl><dd><ul>"); for(Map<String,String> forum:forumList){ String type=forum.get("type"); String name=Common.strip_tags(forum.get("name")); String viewperm=forum.get("viewperm"); if("group".equals(type)){ forumlist.append("</ul></dd></dl><dl><dt><a href='index.jsp?gid="+forum.get("fid")+"'>"+name+"</a></dt><dd><ul>"); visible.put(forum.get("fid"), true); }else if("forum".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<li"+(selectFidList.contains(forum.get("fid")) ? " class='current'" : "")+"><a href=\"forumdisplay.jsp?fid="+forum.get("fid")+"\">"+name+"</a></li>"); visible.put(forum.get("fid"), true); }else if("sub".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<li class='sub"+(selectFidList.contains(forum.get("fid")) ? " current'" : "'")+"><a href=\"forumdisplay.jsp?fid="+forum.get("fid")+"\">"+name+"</a></li>"); } } forumlist.append("</ul></dd></dl>"); }else{ forumlist.append("<optgroup label='&nbsp;'>"); for(Map<String,String> forum:forumList){ String type=forum.get("type"); String name=Common.strip_tags(forum.get("name")); String viewperm=forum.get("viewperm"); if("group".equals(type)){ forumlist.append(groupselectable ? "<option value='"+forum.get("fid")+"'>"+name+"</option>" : "</optgroup><optgroup label='"+name+"'>"); visible.put(forum.get("fid"), true); }else if("forum".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<option value='"+forum.get("fid")+"' "+(selectFidList.contains(forum.get("fid")) ? " selected" : "")+">&nbsp; &gt; "+name+"</option>"); visible.put(forum.get("fid"), true); }else if("sub".equals(type)&&visible.get(forum.get("fup"))!=null && ("".equals(viewperm) || (!"".equals(viewperm) && forumperm(viewperm,groupid,extgroupid)))){ forumlist.append("<option value='"+forum.get("fid")+"' "+(selectFidList.contains(forum.get("fid")) ? " selected" : "")+">&nbsp; &nbsp; &nbsp; &gt; "+name+"</option>"); } } forumlist.append("</optgroup>"); } visible=null; } forumList=null; return forumlist.toString().replace(tableformat?"<dl><dd><ul></ul></dd></dl>":"<optgroup label='&nbsp;'></optgroup>", ""); } public static void visitedforums(HttpServletRequest request,HttpServletResponse response,int visitedforumcount,String visited,Map<String,String> settings){ int count=0; String split="D"; String visitedfid = CookieUtil.getCookie(request, "visitedfid", true, settings); String[] visitedfids=null; if(visitedfid!=null){ visitedfid=Base64.decode(visitedfid,JspRunConfig.CHARSET); visitedfids=visitedfid.split(split); } StringBuffer fidSB=new StringBuffer(); fidSB.append(visited); StringBuffer visitedforums=new StringBuffer(); if(visitedfids!=null){ for(String fid:visitedfids){ if(!visited.equals(fid)){ fidSB.append(split); fidSB.append(fid); String[] fids=fid.split("=>"); visitedforums.append("<option value="+fids[0]+">"+fids[1]+"</option>"); if(++count>=visitedforumcount){ break; } } } } String value = Base64.encode(fidSB.toString(),JspRunConfig.CHARSET); if(!value.equals(visitedfid)){ CookieUtil.setCookie(request, response, "visitedfid", value, 2592000, true, settings); } if(visitedforums.length()>0){ request.setAttribute("visitedforums",visitedforums.toString()); } } public static String implode(String[] data, String separator){ if(data==null){ return ""; } StringBuffer out=new StringBuffer(); int length=data.length; String o; for(int i=0;i<length;i++){ o=data[i].trim(); if(o.length()>0){ if(i>0){ out.append(separator); } out.append(o); } } return out.toString(); } public static String implodeids(String[] ids){ String id=implode(ids, ","); return id.length()>0?id:"0"; } @SuppressWarnings("unchecked") public static String implode(Object data, String separator) { if (data == null) { return ""; } StringBuffer out = new StringBuffer(); if (data instanceof Object[]) { boolean flag = false; for (Object obj : (Object[]) data) { if (flag) { out.append(separator); } else { flag = true; } out.append(obj); } } else if (data instanceof Map) { Map temp = (Map) data; Set<Object> keys = temp.keySet(); boolean flag = false; for (Object key : keys) { if (flag) { out.append(separator); } else { flag = true; } out.append(temp.get(key)); } } else if (data instanceof Collection) { boolean flag = false; for (Object obj : (Collection) data) { if (flag) { out.append(separator); } else { flag = true; } out.append(obj); } } else { return data.toString(); } return out.toString(); } public static String sImplode(Object ids) { return "'" + implode(ids, "','") + "'"; } public static String number_format(double number,String format) { return new DecimalFormat(format).format(number); } public static String number_format(double number) { return number_format(number, 0); } public static String number_format(double number,int decimals,char... separators) { DecimalFormat df = new DecimalFormat(); if(separators!=null){ DecimalFormatSymbols dfs=df.getDecimalFormatSymbols(); int length=separators.length; switch (length) { case 2: dfs.setGroupingSeparator(separators[1]); case 1: dfs.setDecimalSeparator(separators[0]); } df.setDecimalFormatSymbols(dfs); } df.setMaximumFractionDigits(decimals); return df.format(number); } public static String sizeFormat(long dataSize){ if(dataSize>=1073741824){ return ((double) Math.round(dataSize / 1073741824d * 100) / 100)+ " GB"; }else if(dataSize>=1048576){ return ((double) Math.round(dataSize / 1048576d * 100) / 100)+ " MB"; }else if(dataSize>=1024){ return ((double) Math.round(dataSize / 1024d * 100) / 100)+ " KB"; }else{ return dataSize+ " Bytes"; } } public static String ajax_decode(String s) { try { return URLDecoder.decode(s,"UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } return s; } public static String encode(String s) { try { return URLEncoder.encode(s,JspRunConfig.CHARSET); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } return s; } public static String decode(String s) { try { return URLDecoder.decode(s,JspRunConfig.CHARSET); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } return s; } public static String[] getgroupid(Members member,String creditsformula,Map<String,String> usermap,Usergroups usergroup){ if(creditsformula!=null && !creditsformula.equals("")){ String[] result = new String[6]; if(member!=null){ postOperating.setCredits(creditsformula,member,null); if(usergroup.getType().equals("member") && !(member.getCredits()>=usergroup.getCreditshigher() && member.getCredits()<=usergroup.getCreditslower())){ List<Map<String,String>> grouplist = dataBaseService.executeQuery("SELECT groupid,grouptitle,color,stars,groupavatar FROM jrun_usergroups WHERE type='member' AND "+member.getCredits()+">=creditshigher AND "+member.getCredits()+"<creditslower LIMIT 1"); if(grouplist!=null && grouplist.size()>0){ member.setGroupid(Short.valueOf(grouplist.get(0).get("groupid"))); result[0] = grouplist.get(0).get("groupid"); result[1] = grouplist.get(0).get("grouptitle"); result[2] = member.getCredits()+""; result[3] = grouplist.get(0).get("color"); result[4] = grouplist.get(0).get("stars"); result[5] = grouplist.get(0).get("groupavatar"); } } memberService.modifyMember(member); if(result[0]==null){ result[0] = usergroup.getGroupid()+""; result[1] = usergroup.getGrouptitle()+""; result[2] = member.getCredits()+""; result[3] = usergroup.getColor(); result[4] = usergroup.getStars()+""; result[5] = usergroup.getGroupavatar(); } }else{ String credits = postOperating.setCreditsbyMap(creditsformula, usermap, null); if(credits!=null){ if("member".equals(usermap.get("type")) && !(Integer.valueOf(credits)>=Integer.valueOf(usermap.get("creditshigher")) && Integer.valueOf(credits)<=Integer.valueOf(usermap.get("creditslower")))){ List<Map<String,String>> grouplist = dataBaseService.executeQuery("SELECT groupid,grouptitle,color,stars,groupavatar FROM jrun_usergroups WHERE type='member' AND "+credits+">=creditshigher AND "+credits+"<creditslower LIMIT 1"); if(grouplist!=null && grouplist.size()>0){ result[0] = grouplist.get(0).get("groupid"); result[1] = grouplist.get(0).get("grouptitle"); result[3] = grouplist.get(0).get("color"); result[4] = grouplist.get(0).get("stars"); result[5] = grouplist.get(0).get("groupavatar"); } grouplist = null; } if(!usermap.get("credits").equals(credits)){ result[2] = credits; String sql = ""; if(result[0]!=null){ sql = ",groupid="+result[0]; } dataBaseService.runQuery("update jrun_members set credits = "+credits+sql+" where uid="+usermap.get("uid"),true); } } if(result[0]==null){ result[0] = usermap.get("groupid"); result[1] = usermap.get("grouptitle"); result[3] = usermap.get("color"); result[4] = usermap.get("stars"); result[5] = usermap.get("groupavatar"); } if(result[2]==null){ result[2] = usermap.get("credits"); } } return result; } return null; } public static void updatepostcredits(String operator, int uid,Map<Integer, Integer> creditsarray, int timestamp,int count,String creditsformula) { StringBuffer creditsadd = new StringBuffer(); Set<Entry<Integer, Integer>> keys = creditsarray.entrySet(); for (Entry<Integer, Integer> temp : keys) { Integer key = temp.getKey(); creditsadd.append(", extcredits" + key + "= extcredits" + key+ operator + temp.getValue()+"*"+count); } dataBaseService.runQuery("UPDATE jrun_members SET posts=posts+('"+ operator + count + "') " + ", lastpost='" + timestamp + "'" + " " + creditsadd+ ",credits="+creditsformula+" WHERE uid =" + uid,true); creditsadd=null; } public static void updatepostcredits(String operator, int uid,Map<Integer, Integer> creditsarray, int timestamp) { StringBuffer creditsadd = new StringBuffer(); Set<Entry<Integer, Integer>> keys = creditsarray.entrySet(); for (Entry<Integer, Integer> temp : keys) { Integer key = temp.getKey(); creditsadd.append(", extcredits" + key + "= extcredits" + key+ operator + temp.getValue()); } dataBaseService.runQuery("UPDATE jrun_members SET posts=posts+('"+ operator + 1 + "') " + ", lastpost='" + timestamp + "'" + " " + creditsadd+ " WHERE uid =" + uid,true); creditsadd=null; } public static void updatepostcredits(String operator, int uid,int count,Map<Integer, Integer> creditsarray) { StringBuffer creditsadd = new StringBuffer(); Set<Entry<Integer, Integer>> keys = creditsarray.entrySet(); for (Entry<Integer, Integer> temp : keys) { Integer key = temp.getKey(); creditsadd.append(", extcredits" + key + "= extcredits" + key+ operator + temp.getValue()+"*"+count); } if(creditsadd.length()>0){ dataBaseService.runQuery("UPDATE jrun_members SET " + creditsadd.substring(1)+ " WHERE uid =" + uid,true); } creditsadd=null; } public static void updatepostcredits(int uid,String creditsformula) { dataBaseService.runQuery("UPDATE jrun_members SET credits="+creditsformula+" WHERE uid =" + uid,true); } public static void updateMember(HttpSession session,int uid) { if(uid>0){ session.setAttribute("user", memberService.findMemberById(uid)); } } @SuppressWarnings("unchecked") public static void calcGroup(HttpSession session,HttpServletRequest request,HttpServletResponse response,Map<String,String> settings){ Members member = (Members) session.getAttribute("user"); int timestamp = (Integer)request.getAttribute("timestamp"); if (member != null) { List<Map<String,String>> meminfolist = dataBaseService.executeQuery("select mm.groupterms,m.extgroupids,m.credits,m.extcredits1,m.extcredits2,m.extcredits3,m.extcredits4,m.extcredits5,m.extcredits6,m.extcredits7,m.extcredits8,m.groupid,m.adminid,m.newpm,u.type from jrun_members as m left join jrun_memberfields as mm on m.uid=mm.uid left join jrun_usergroups as u on m.groupid=u.groupid where m.uid="+member.getUid()); if(meminfolist!=null && meminfolist.size()>0){ Map<String,String> membermap = meminfolist.get(0); session.setAttribute("jsprun_groupid", Short.valueOf(membermap.get("groupid"))); member.setGroupid(Short.valueOf(membermap.get("groupid"))); member.setAdminid(Byte.valueOf(membermap.get("adminid"))); member.setNewpm(Byte.valueOf(membermap.get("newpm"))); member.setExtgroupids(membermap.get("extgroupids")); member.setCredits(Integer.valueOf(membermap.get("credits"))); member.setExtcredits1(Integer.valueOf(membermap.get("extcredits1"))); member.setExtcredits2(Integer.valueOf(membermap.get("extcredits2"))); member.setExtcredits3(Integer.valueOf(membermap.get("extcredits3"))); member.setExtcredits4(Integer.valueOf(membermap.get("extcredits4"))); member.setExtcredits5(Integer.valueOf(membermap.get("extcredits5"))); member.setExtcredits6(Integer.valueOf(membermap.get("extcredits6"))); member.setExtcredits7(Integer.valueOf(membermap.get("extcredits7"))); member.setExtcredits8(Integer.valueOf(membermap.get("extcredits8"))); session.setAttribute("user",member); String groupterm = membermap.get("groupterms"); Map grouptermMap = dataParse.characterParse(groupterm,false); if(grouptermMap!=null && !grouptermMap.keySet().isEmpty()){ Map extMap = (Map) grouptermMap.get("ext"); if (extMap != null) { if (!extMap.keySet().isEmpty()) { String extgroupids = membermap.get("extgroupids"); Map extids = new HashMap(); if (!extgroupids.equals("")) { String extdids[] = extgroupids.split("\t"); if (extdids != null && extdids.length > 0) { for (int i = 0; i < extdids.length; i++) { extids.put(extdids[i], "yes"); } } } Iterator<Entry> it = extMap.entrySet().iterator(); List keylist = new ArrayList(); while (it.hasNext()) { Entry temp = it.next(); Object key = temp.getKey(); int time = (Integer) temp.getValue(); if (time < timestamp) { keylist.add(key); if (extids != null && extids.get(key) != null) { extids.remove(key); } } } if(keylist.size()>0){ for(int j=0;j<keylist.size();j++){ extMap.remove(keylist.get(j)); } } grouptermMap.put("ext", extMap); keylist = null; StringBuffer extidp = new StringBuffer(); if (extids != null) { Iterator its = extids.keySet().iterator(); while (its.hasNext()) { Object key = its.next(); if(!key.toString().equals("")){ extidp.append(key + "\t"); } } } if(!member.getExtgroupids().equals(extidp.toString())){ member.setExtgroupids(extidp.toString()); dataBaseService.runQuery("update jrun_members set extgroupids = '"+extidp.toString().trim()+"' where uid="+member.getUid(),true); } }else{ grouptermMap.remove("ext"); } } Map mapMap = (Map) grouptermMap.get("main"); if (mapMap != null&&mapMap.size()>0) { byte adminid = (mapMap.get("adminid")==null?(byte)0:Byte.valueOf(mapMap.get("adminid").toString())); short groupid = (mapMap.get("groupid")==null?(short)10:Short.valueOf(mapMap.get("groupid").toString())); int time = 0; if(mapMap.get("time")!=null){ time = (Integer)mapMap.get("time"); } if (time>0) { if (time < timestamp) { member.setAdminid(adminid); member.setGroupid(groupid); member.setGroupexpiry(0); member.setCredits(Common.toDigit(membermap.get("credits"))); dataBaseService.runQuery("update jrun_members set adminid = "+adminid+",groupid="+groupid+",groupexpiry=0 where uid="+member.getUid(),true); session.setAttribute("user", member); session.setAttribute("jsprun_adminid", member.getAdminid()); session.setAttribute("jsprun_groupid", member.getGroupid()); if(mapMap.get("bktime")!=null){ mapMap.put("time", mapMap.get("bktime")); mapMap.remove("bktime"); mapMap.put("groupid", mapMap.get("bkgroupid")); mapMap.remove("bkgroupid"); mapMap.put("adminid", mapMap.get("bkadminid")); mapMap.remove("bkadminid"); grouptermMap.put("main", mapMap); }else{ grouptermMap.remove("main"); } } } } mapMap=null; if(grouptermMap.keySet().isEmpty()){ dataBaseService.runQuery("update jrun_memberfields set groupterms = '' where uid="+member.getUid(),true); }else{ String pmdf = dataParse.combinationChar(grouptermMap); if(!pmdf.equals(groupterm)){ dataBaseService.runQuery("update jrun_memberfields set groupterms = '"+pmdf+"' where uid="+member.getUid(),true); } } } List<Map<String,String>> usergrouplist = dataBaseService.executeQuery("select type from jrun_usergroups where groupid="+member.getGroupid()); if (usergrouplist==null || usergrouplist.size()<=0||usergrouplist.get(0).get("type").equals("member")) { List<Map<String,String>> membergrouplist = dataBaseService.executeQuery("select groupid from jrun_usergroups where type='member' and "+ membermap.get("credits")+" >= creditshigher and creditslower > "+ membermap.get("credits")); if (membergrouplist!=null && membergrouplist.size()>0 && !membergrouplist.get(0).get("groupid").equals(member.getGroupid()+"")) { member.setGroupid((short)Common.toDigit(membergrouplist.get(0).get("groupid"))); member.setAdminid(Byte.valueOf("0")); dataBaseService.runQuery("update jrun_members set adminid=0,groupid="+membergrouplist.get(0).get("groupid")+" where uid="+member.getUid(),true); } membergrouplist = null; } session.setAttribute("jsprun_adminid", member.getAdminid()); session.setAttribute("jsprun_groupid", member.getGroupid()); session.setAttribute("user", member); membermap = null; }else{ CookieUtil.clearCookies(request, response, settings); } } } public static boolean uploadFile(FormFile src,String targetpath){ InputStream is = null; OutputStream os = null; boolean flag = false; try { is = new BufferedInputStream(src.getInputStream(),4096); os = new BufferedOutputStream(new FileOutputStream(targetpath),4096); int count = 0; byte[] buffer = new byte[4096]; while((count = is.read(buffer))>0){ os.write(buffer,0,count); } flag = true; buffer = null; } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); }finally{ if(os != null){ try { os.close(); } catch (IOException e) { e.printStackTrace(); } } if(is != null){ try { is.close(); } catch (IOException e) { e.printStackTrace(); } } } is = null;src=null; return flag; } public static void updatemodworks(Map<String, String> settings, int uid,int timestamp, String modaction, int posts) { String today = gmdate("yyyy-MM-dd", timestamp, settings.get("timeoffset")); if ("1".equals(settings.get("modworkstatus")) && !"".equals(modaction) && posts > 0) { List<Map<String, String>> modworks = dataBaseService.executeQuery("SELECT * FROM jrun_modworks WHERE uid='"+ uid + "' AND modaction='" + modaction+ "' AND dateline='" + today + "'"); if (modworks != null && modworks.size() > 0) { dataBaseService.runQuery("UPDATE jrun_modworks SET count=count+1, posts=posts+'" + posts+ "' WHERE uid='" + uid + "' AND modaction='"+ modaction + "' AND dateline='" + today + "'",true); } else { dataBaseService.runQuery("INSERT INTO jrun_modworks (uid, modaction, dateline, count, posts) VALUES ('"+ uid + "', '" + modaction + "', '" + today + "', 1, '"+ posts + "')",true); } } } public static void updatemodlog(Members member, int timestamp, String tid,String action, Integer expiration, int status, boolean iscron) { int uid = iscron ? member.getUid() : 0; String username = iscron ? "0" : member.getUsername(); expiration = expiration != null ? expiration : 0; StringBuffer data = new StringBuffer(); String comma = ""; String[] tids = tid.split(","); if (tids != null && tids.length > 0) { for (String obj : tids) { data.append(comma + " ('" + obj + "', '" + uid + "', '" + username+ "', '" + timestamp + "', '" + action + "', '"+ expiration + "', '1', '0')"); comma = ","; } } if (data.length()>0) { dataBaseService.runQuery("INSERT INTO jrun_threadsmod (tid, uid, username, dateline, action, expiration, status, magicid) VALUES "+ data,true); } } @SuppressWarnings({ "unchecked", "deprecation" }) public static void updatesession(HttpServletRequest request,Map<String, String> settings) { if(request.getAttribute("sessionupdated")==null){ HttpSession httpSession = request.getSession(); int seccode = (Integer) request.getAttribute("seccode"); boolean sessionexists = (Boolean) request.getAttribute("sessionexists"); String sid = (String) httpSession.getAttribute("jsprun_sid"); String onlineip = Common.get_onlineip(request); int jsprun_uid = (Integer) httpSession.getAttribute("jsprun_uid"); String jsprun_userss=(String) httpSession.getAttribute("jsprun_userss"); if(jsprun_userss==null){ jsprun_userss=""; } jsprun_userss = Common.addslashes(jsprun_userss); Members member = (Members) httpSession.getAttribute("user"); int timestamp=(Integer)(request.getAttribute("timestamp")); int lastactivity = 0; int pvfrequence = Common.toDigit(settings.get("pvfrequence")); int spageviews = (Integer) request.getAttribute("spageviews"); int lastolupdate = (Integer) request.getAttribute("lastolupdate"); int oltimespan = Common.toDigit(settings.get("oltimespan")); int onlinehold = Common.toDigit(settings.get("onlinehold")); String styleid = (String)httpSession.getAttribute("styleid"); short groupid = (Short) httpSession.getAttribute("jsprun_groupid"); styleid = Common.toDigit(styleid)>0? styleid : settings.get("styleid"); byte invisible = 0; int jsprun_action =Common.intval((String)request.getAttribute("jsprun_action")); Short fid = (Short) request.getAttribute("fid"); Integer tid = (Integer) request.getAttribute("tid"); fid = fid != null ? fid : 0; tid = tid != null ? tid : 0; if (sessionexists&&member != null) { lastactivity = member.getLastactivity(); invisible = member.getInvisible(); } if (oltimespan > 0&& jsprun_uid > 0 && lastactivity > 0 && ((timestamp - (lastolupdate > 0 ? lastolupdate : lastactivity)) > (oltimespan * 60))) { lastolupdate = timestamp; Map<String,String> result=dataBaseService.runQuery("UPDATE jrun_onlinetime SET total=total+'"+ oltimespan + "', thismonth=thismonth+'" + oltimespan + "', lastupdate='" + timestamp + "' WHERE uid='"+ jsprun_uid + "'"); int affectedRows = intval(result.get("ok")); if(affectedRows==0){ dataBaseService.runQuery("INSERT INTO jrun_onlinetime (uid, thismonth, total, lastupdate) VALUES ('"+ jsprun_uid + "', '" + oltimespan + "', '" + oltimespan + "', '" + timestamp + "')",true); } } if (sessionexists) { String pageviewsadd = null; if (pvfrequence > 0 && jsprun_uid>0) { if (spageviews >= pvfrequence) { pageviewsadd = ", pageviews=\'0\'"; dataBaseService.runQuery("UPDATE jrun_members SET pageviews=pageviews+'" + spageviews+ "' WHERE uid='" + jsprun_uid + "'",true); member.setPageviews(member.getPageviews()+spageviews); httpSession.setAttribute("user",member); } else { pageviewsadd = ", pageviews=pageviews+1"; } } else { pageviewsadd = ""; } dataBaseService.runQuery("UPDATE jrun_sessions SET uid='" + jsprun_uid+ "', username='" + jsprun_userss + "', groupid='"+ groupid + "', styleid='" + styleid + "', invisible='" + invisible + "', action='" + jsprun_action+ "', lastactivity='" + timestamp + "', lastolupdate='"+ lastolupdate + "', seccode='" + seccode + "', fid='"+ fid + "', tid='" + tid + "' "+ pageviewsadd + " WHERE sid='" + sid + "'",true); } else { String[] ips = onlineip.split("\\."); dataBaseService.runQuery("DELETE FROM jrun_sessions WHERE sid='" + sid+ "' OR lastactivity<" + (timestamp - onlinehold)+ " OR ('" + jsprun_uid + "'<>'0' AND uid='" + jsprun_uid+ "') OR (uid='0' AND lastactivity>" + (timestamp - 60) + " AND ip1='" + ips[0] + "' AND ip2='"+ ips[1] + "' AND ip3='" + ips[2] + "' AND ip4='" + ips[3]+ "')",true); dataBaseService.runQuery("REPLACE INTO jrun_sessions (sid, ip1, ip2, ip3, ip4, uid, username, groupid, styleid, invisible, action, lastactivity, lastolupdate, seccode, fid, tid) VALUES ('"+ sid + "', '" + ips[0] + "', '" + ips[1] + "', '" + ips[2]+ "', '" + ips[3] + "', '" + jsprun_uid + "', '"+ jsprun_userss + "', '" + groupid + "', '" + styleid+ "', '" + invisible + "', '" + jsprun_action + "', '"+ timestamp + "', '" + lastolupdate + "', '" + seccode+ "', '" + fid + "', '" + tid + "')",true); if (jsprun_uid > 0 && (timestamp - lastactivity) > 21600) { String oltimeadd = null; if (oltimespan > 0 && timestamp - lastactivity > 86400) { List<Map<String, String>> total = dataBaseService.executeQuery("SELECT total FROM jrun_onlinetime WHERE uid='" + jsprun_uid + "'"); int size = (total != null && total.size() > 0 ? Integer.valueOf(total.get(0).get("total")) : 0); total=null; short oltime=Double.valueOf(Math.round((double) size / 60d)).shortValue(); oltimeadd = ", oltime="+oltime; member.setOltime(oltime); }else{ oltimeadd = ""; } dataBaseService.runQuery("UPDATE jrun_members SET lastip='" + onlineip+ "', lastvisit=lastactivity, lastactivity='"+ timestamp + "' " + oltimeadd + " WHERE uid='"+ jsprun_uid+"'",true); member.setLastip(onlineip); member.setLastvisit(member.getLastactivity()); member.setLastactivity(timestamp); httpSession.setAttribute("user",member); } } request.setAttribute("sessionupdated", true); } } public static String quescrypt(int questionid, String answer) { Md5Token md5 = Md5Token.getInstance(); return (questionid > 0 && !answer.equals("") ? md5.getLongToken(answer + md5.getLongToken(String.valueOf(questionid))).substring(16, 24) : ""); } public static void updateforumcount(String fid,MessageResources mr,Locale locale){ List<Map<String,String>> countlist = dataBaseService.executeQuery("select count(*) as threadcount,sum(t.replies)+count(*) as replycount from jrun_threads t,jrun_forums f where f.fid="+fid+" and t.fid=f.fid and t.displayorder>=0"); Map<String,String> countmap = countlist.get(0); String replycount = countmap.get("replycount")==null?"0":countmap.get("replycount"); String threadcount = countmap.get("threadcount")==null?"0":countmap.get("threadcount"); countlist = dataBaseService.executeQuery("SELECT tid, subject, author, lastpost, lastposter FROM jrun_threads WHERE fid='"+fid+"' AND displayorder>='0' ORDER BY lastpost DESC LIMIT 1"); if(countlist==null||countlist.size()<=0){ dataBaseService.runQuery("update jrun_forums set posts='"+replycount+"',threads='"+threadcount+"',lastpost='' where fid="+fid,true); return; } countmap = countlist.get(0); String subject = Common.cutstr(Common.addslashes(countmap.get("subject").replaceAll("\t", " ")), 40, null); String lastposter = countmap.get("author").equals("")?mr.getMessage(locale, "anonymous"):Common.addslashes(countmap.get("lastposter")); dataBaseService.runQuery("update jrun_forums set posts='"+replycount+"',threads='"+threadcount+"',lastpost='"+countmap.get("tid")+"\t"+subject+"\t"+countmap.get("lastpost")+"\t"+lastposter+"' where fid="+fid,true); countmap = null; } public static void setChecked(HttpServletRequest request,String variable,int size,int value){ for(int i=0;i<size;i++){ request.setAttribute(variable+i, (value&(int)Math.pow(2, i))>0?"checked":""); } } public static boolean ipaccess(String ip, String accessips){ return Common.matches(ip, "^("+accessips.replaceAll("(\r\n|\r|\n)", "|")+")"); } public static String cutstr(String text, int length){ return cutstr(text, length," ..."); } public static String cutstr(String text, int length,String dot){ int strBLen = strlen(text); if( strBLen <= length ){ return text; } int temp = 0; StringBuffer sb = new StringBuffer(length); char[] ch = text.toCharArray(); for ( char c : ch ) { sb.append( c ); if ( c > 256 ) { temp += 2; } else { temp += 1; } if (temp >= length) { if( dot != null) { sb.append( dot ); } break; } } return sb.toString(); } public static int strlen(String text){ if(text==null||text.length()==0){ return 0; } int length=0; try { length=text.getBytes(CHARSET_NAME).length; } catch (UnsupportedEncodingException e) { e.printStackTrace(); } return length; } public static String nl2br(String text){ if(text==null||text.length()==0){ return text; } StringBuffer sb = new StringBuffer(text.length() * 2); StringCharacterIterator iterator = new StringCharacterIterator(text); char character = iterator.current(); while(character != StringCharacterIterator.DONE){ switch (character) { case '\r': sb.append("<br/>"); sb.append(character); character = iterator.next(); if(character=='\n'){ character = iterator.next(); } break; case '\n': sb.append("<br/>"); sb.append(character); character = iterator.next(); if(character=='\r'){ sb.append(character); character = iterator.next(); } break; default: sb.append(character); character = iterator.next(); break; } } return sb.toString(); } public static void stats(HttpServletRequest request){ boolean sessionexists = (Boolean)request.getAttribute("sessionexists"); HttpSession session = request.getSession(); String jsprun_user = (String)session.getAttribute("jsprun_userss"); Map<String,String> visitor = new HashMap<String, String>(); String user_agent = request.getHeader("User-Agent"); visitor.put("agent", user_agent); Calendar calendar = Common.getCalendar(ForumInit.settings.get("timeoffset")); int nowMonth = calendar.get(Calendar.MONTH)+1; visitor.put("month", calendar.get(Calendar.YEAR)+(nowMonth>9?nowMonth+"":"0"+nowMonth)); visitor.put("week", calendar.get(Calendar.DAY_OF_WEEK)-1+""); int nowHour = calendar.get(Calendar.HOUR_OF_DAY); visitor.put("hour", (nowHour>9?"":"0")+nowHour); String visitorsadd = ""; int updatedrows = 4; if(!sessionexists){ if(user_agent != null){ user_agent = user_agent.toLowerCase(); if(user_agent.indexOf("netscape")>=0){ visitor.put("browser", "Netscape"); }else if(user_agent.indexOf("lynx")>=0){ visitor.put("browser", "Lynx"); }else if(user_agent.indexOf("opera")>=0){ visitor.put("browser", "Opera"); }else if(user_agent.indexOf("konqueror")>=0){ visitor.put("browser", "Konqueror"); }else if(user_agent.indexOf("msie")>=0){ visitor.put("browser", "MSIE"); }else if(user_agent.startsWith("mozilla")){ visitor.put("browser", "Mozilla"); }else{ visitor.put("browser", "Other"); } if(user_agent.indexOf("win")>=0){ visitor.put("os", "Windows"); }else if(user_agent.indexOf("mac")>=0){ visitor.put("os", "Mac"); }else if(user_agent.indexOf("linux")>=0){ visitor.put("os", "Linux"); }else if(user_agent.indexOf("freebsd")>=0){ visitor.put("os", "FreeBSD"); }else if(user_agent.indexOf("sunos")>=0){ visitor.put("os", "SunOS"); }else if(user_agent.indexOf("os/2")>=0){ visitor.put("os", "OS/2"); }else if(user_agent.indexOf("aix")>=0){ visitor.put("os", "AIX"); }else if(user_agent.matches("(bot|crawl|spider)")){ visitor.put("os", "Spiders"); }else{ visitor.put("os", "Other"); } }else{ visitor.put("browser", "Other"); visitor.put("os", "Other"); } visitorsadd = "OR (type='browser' AND variable='"+visitor.get("browser")+"') OR (type='os' AND variable='"+visitor.get("os")+"')"+(jsprun_user!=null&&!jsprun_user.equals("") ? " OR (type='total' AND variable='members')" : " OR (type='total' AND variable='guests')"); updatedrows = 7; } Map<String,String> resultMap = dataBaseService.runQuery("UPDATE jrun_stats SET count=count+1 WHERE (type='total' AND variable='hits') "+visitorsadd+" OR (type='month' AND variable='"+visitor.get("month")+"') OR (type='week' AND variable='"+visitor.get("week")+"') OR (type='hour' AND variable='"+visitor.get("hour")+"')"); String updateCount = resultMap.get("ok"); if(updateCount!=null&&updatedrows>Integer.valueOf(updateCount)){ dataBaseService.execute("INSERT INTO jrun_stats (type, variable, count) " + "VALUES ('month', '"+visitor.get("month")+"', '1')"); } } public static String authcode(String string,String operation,String key,String charset){ int expiry=0; if(charset==null){ charset=JspRunConfig.CHARSET; } try{ Md5Token md5=Md5Token.getInstance(); int ckey_length = 4; long millisecond=System.currentTimeMillis(); int time=(int)(millisecond/1000); key = md5.getLongToken(key); String keya = md5.getLongToken(key.substring(0,16)); String keyb = md5.getLongToken(key.substring(16,32)); String keyc = ckey_length!=0?("DECODE".equals(operation)?string.substring(0,ckey_length):md5.getLongToken(String.valueOf(millisecond)).substring(32-ckey_length)):""; String cryptkey = keya + md5.getLongToken(keya+keyc); int key_length = cryptkey.length(); string="DECODE".equals(operation)?Base64.decode(string.substring(ckey_length),charset):(expiry>0 ? expiry+time : "0000000000") + md5.getLongToken(string+keyb).substring(0,16) + string; int string_length = string.length(); int range=256; int[] rndkey = new int[range]; for (int i = 0; i < range; i++) { rndkey[i] = cryptkey.charAt(i%key_length); } int tmp; int[] box = new int[range]; for(int i=0,j=0; i < range; i++){ j = (j + box[i] + rndkey[i]) % range; tmp = box[i]; box[i] = box[j]; box[j] = tmp; } StringBuffer result = new StringBuffer(string_length); for(int a=0,i=0,j=0; i < string_length;i++){ a = (a + 1) % range; j = (j + box[a]) % range; tmp = box[a]; box[a] = box[j]; box[j] = tmp; result.append((char)((int)string.charAt(i) ^ (box[(box[a] + box[j]) % range]))); } if("DECODE".equals(operation)){ int resulttime=Common.intval(result.substring(0,10)); if ((resulttime==0 || resulttime-time>0) && result.substring(10,26).equals(md5.getLongToken(result.substring(26)+keyb).substring(0,16))) { return result.substring(26); } else { return ""; } } else { return keyc + (Base64.encode(result.toString(), charset)).replaceAll("=", ""); } }catch(Exception e){ return ""; } } public static String addslashes(String text){ StringBuffer sb = new StringBuffer(text.length() * 2); StringCharacterIterator iterator = new StringCharacterIterator(text); char character = iterator.current(); while (character != StringCharacterIterator.DONE) { switch (character) { case '\'': case '"': case '\\': sb.append("\\"); default: sb.append(character); break; } character = iterator.next(); } return sb.toString(); } public static String mysqlEscapeString(String text){ StringBuffer sb = new StringBuffer(text.length() * 2); StringCharacterIterator iterator = new StringCharacterIterator(text); char character = iterator.current(); while(character != StringCharacterIterator.DONE){ switch (character) { case '"': sb.append("\\\""); break; case '\'': sb.append("\\\'"); break; case '\\': sb.append("\\\\"); break; case '\r': sb.append("\\r"); break; case '\n': sb.append("\\n"); break; default: sb.append(character); break; } character = iterator.next(); } return sb.toString(); } public static String stripslashes(String text){ StringBuffer sb = new StringBuffer(text.length() * 2); StringCharacterIterator iterator = new StringCharacterIterator(text); char character = iterator.current(); boolean flag=true; while(character != StringCharacterIterator.DONE){ if(character=='\\'&&flag){ flag=false; }else{ flag=true; sb.append(character); } character = iterator.next(); } return sb.toString(); } public static String pregQuote(String text,char... delimiter){ char stip='\\'; StringBuffer sb = new StringBuffer(text.length() * 2); StringCharacterIterator iterator = new StringCharacterIterator(text); char character = iterator.current(); boolean flag=false; while(character != StringCharacterIterator.DONE){ flag=false; for (char c : PREG_CHARS) { if(character==c){ flag=true; break; } } if(!flag&&delimiter!=null){ for (char d : delimiter) { if(character==d){ flag=true; break; } } } if(flag){ sb.append(stip); } sb.append(character); character = iterator.next(); } return sb.toString(); } public static String get_onlineip(HttpServletRequest request) { String ip = request.getHeader("x-forwarded-for"); if(Common.empty(ip) || "unknown".equalsIgnoreCase(ip)) { ip = request.getHeader("X-Real-IP"); } if(Common.empty(ip) || "unknown".equalsIgnoreCase(ip)) { ip = request.getRemoteAddr(); } List<String> ips = Common.getStr(ip, "[\\d\\.]{7,15}"); return ips.size()>0?ips.get(0):"0.0.0.0"; } public static boolean in_array(Object[] source, Object ext){ return in_array(source, ext, false); } public static boolean in_array(Object[] source, Object ext, boolean strict){ if(source==null||ext==null){ return false; } for(Object s : source){ if(s.toString().equals(ext.toString())){ if(strict){ if((s.getClass().getName().equals(ext.getClass().getName()))){ return true; } }else{ return true; } } } return false; } public static void dunlink(String fileName,Byte havethumb,Byte remote,String filePath){ if(remote>0){ FTPClient fc = ftputil.getFTPClient(); String message = ftputil.connectToServer(fc); if(!message.equals("")){ ftputil.closeFtpConnect(fc); return; } ftputil.dftp_delete(fileName,fc); if(havethumb>0){ String str = fileName.substring(fileName.lastIndexOf(".")); ftputil.dftp_delete(fileName+".thumb"+str,fc); } ftputil.closeFtpConnect(fc); }else{ File file=new File(filePath+"/"+fileName); if(file.exists()){ file.delete(); } if(havethumb>0){ String str = fileName.substring(fileName.lastIndexOf(".")); file=new File(filePath+"/"+fileName+".thumb"+str); if(file.exists()){ file.delete(); } } } } @SuppressWarnings("unchecked") public static void setFtpValue(String ftp,String authorkey){ if(ftputil.isEmpty()){ Map<String,String> ftpmap = dataParse.characterParse(ftp, false); if(ftpmap.get("on").equals("1")){ ftputil.setFtpValues(ftpmap.get("host"),ftpmap.get("username"),authcode(ftpmap.get("password"),"DECODE",Md5Token.getInstance().getLongToken(authorkey),"utf-8"),ftpmap.get("attachdir"), toDigit(ftpmap.get("port")),ftpmap.get("ssl"), 0,ftpmap.get("pasv")); } ftpmap = null; } } @SuppressWarnings("unchecked") public static void include(HttpServletRequest request,HttpServletResponse response,HttpServlet servlet,String value,String defvalue){ File file=null; try { file=new File(JspRunConfig.realPath+value); if(file.exists()){ request.getRequestDispatcher(value).include(request, response); }else if(defvalue!=null){ request.getRequestDispatcher(defvalue).include(request, response); } } catch (Exception e) { e.printStackTrace(); }finally{ file=null; } } public static void include(HttpServletRequest request,HttpServletResponse response,String value,String cachename){ File file=null; try { file = new File(JspRunConfig.realPath+value); if(!file.exists()){ Cache.updateCache(cachename); } request.getRequestDispatcher(value).include(request, response); } catch (Exception e) { e.printStackTrace(); }finally{ file=null; } } @SuppressWarnings("unchecked") public static String typeselect(short fid,int curtypeid,int special,int modelid,String onchange,Map threadtypes){ if(onchange==null){ onchange="onchange=\"ajaxget('post.jsp?action=threadtypes&typeid='+this.options[this.selectedIndex].value+'&fid="+fid+"&rand='+Math.random(), 'threadtypes', 'threadtypeswait')\""; } if(threadtypes!=null&&threadtypes.size()>0){ Map<Integer,String> types=(Map<Integer,String>)threadtypes.get("types"); Map<Integer,String> specials=(Map<Integer,String>)threadtypes.get("special"); Map<Integer,String> modelids=(Map<Integer,String>)threadtypes.get("modelid"); StringBuffer html=new StringBuffer("<select name=\"typeid\" "+(special==0 ? onchange : "")+"><option value=\"0\">&nbsp;</option>"); Set<Integer> typeids=types.keySet(); for(Integer typeid:typeids){ boolean isspecial=specials!=null && "1".equals(specials.get(typeid)); if((special==0 ||specials!=null&&!isspecial)&&(modelid==0||String.valueOf(modelid).equals(modelids.get(typeid)))) { html.append("<option value=\""+typeid+"\" "+(curtypeid == typeid ? "selected=\"selected\"" : "")+" "+(isspecial ? "class=\"special\"" : "")+">"+Common.strip_tags(types.get(typeid))+"</option>"); } } html.append("</select><span id=\"threadtypeswait\"></span>"+(special == -1 ? "<input type=\"hidden\" name=\"typeid\" value=\""+curtypeid+"\" />" : "")); return html.toString(); }else{ return ""; } } public static void sendpm(String toid,String subject,String message,String fromid,String from,int timestamp){ subject=addslashes(subject); message=addslashes(message); String[] toids =toid.split(","); String[] pmids=new String[toids.length]; int i=0; for (String uid : toids) { if(uid.length()>0){ int pid=dataBaseService.insert("INSERT INTO jrun_pms (msgfrom, msgfromid, msgtoid, folder, new, subject, dateline, message) VALUES ('"+from+"', '"+fromid+"', '"+uid+"', 'inbox', '1', '"+subject+"', '"+timestamp+"', '"+message+"')",true); if(pid>0){ pmids[i]=uid; i++; } } } String uids=implodeids(pmids); if(uids.length()>0){ dataBaseService.runQuery("UPDATE jrun_members SET newpm='1' WHERE uid IN ("+toid+")"); } } public static Map<String,Integer> getMultiInfo(int num,int perpage,int curpage){ Map<String,Integer> multiInfo=new HashMap<String,Integer>(); int start_limit=0; if(num>perpage){ start_limit=(curpage - 1) * perpage; if(start_limit>=num){ int k = num%perpage; curpage=k>0?(num/perpage)+1:(num/perpage); start_limit=(curpage - 1) * perpage; }else if(start_limit<0){ curpage=1; start_limit=0; } }else{ curpage=1; } multiInfo.put("curpage", curpage); multiInfo.put("start_limit", start_limit); return multiInfo; } @SuppressWarnings("unchecked") public static Map<String,Object> multi(int num,int perpage,int curpage,String mpurl,int maxpages,int page,boolean autogoto,boolean simple,String ajaxtarget) { Map<String,Object> multi=new HashMap<String,Object>(); int realpages=1; if(num>perpage){ ajaxtarget=ajaxtarget!=null?" ajaxtarget=\""+Common.htmlspecialchars(ajaxtarget)+"\" ":""; mpurl+=mpurl.indexOf("?")>=0?"&amp;":"?"; int offset=2; realpages=(int)Math.ceil((float)num/(float)perpage); int pages = maxpages>0 && maxpages < realpages ? maxpages : realpages; int from=0; int to=0; if(page>pages){ from=1; to=pages; }else{ from=curpage-offset; to=from+page-1; if(from<1){ to=curpage+1-from; from=1; if(to-from<page){ to=page; } }else if(to>pages){ from=pages-page+1; to=pages; } } StringBuffer multipage=new StringBuffer(); multipage.append((curpage - offset > 1 && pages > page ? "<a href=\""+mpurl+"page=1\" class=\"first\""+ajaxtarget+">1 ...</a>" : "")); multipage.append((curpage > 1 && !simple ? "<a href=\""+mpurl+"page="+(curpage - 1)+"\" class=\"prev\""+ajaxtarget+">&lsaquo;&lsaquo;</a>" : "")); for (int i = from; i <= to; i++) { multipage.append(i == curpage ? "<strong>"+i+"</strong>":"<a href=\""+mpurl+"page="+i+(ajaxtarget.length()>0 && i == pages && autogoto ? "#" : "")+"\""+ajaxtarget+">"+i+"</a>"); } multipage.append((curpage < pages && !simple ? "<a href=\""+mpurl+"page="+(curpage + 1)+"\" class=\"next\""+ajaxtarget+">&rsaquo;&rsaquo;</a>" : "")); multipage.append(to < pages ? "<a href=\""+mpurl+"page="+pages+"\" class=\"last\""+ajaxtarget+">... "+realpages+"</a>" : ""); multipage.append(!simple && pages > page && ajaxtarget.length()==0 ? "<kbd><input type=\"text\" name=\"custompage\" size=\"3\" onkeydown=\"if(event.keyCode==13) {window.location=\'"+mpurl+"page=\'+this.value; return false;}\" /></kbd>" : ""); multi.put("multipage", "<div class=\"pages\">"+(!simple ? "<em>&nbsp;"+num+"&nbsp;</em>" : "")+multipage+"</div>"); } multi.put("maxpage",realpages); return multi; } public static int time(){ return (int)(System.currentTimeMillis()/1000); } public static Map<String,String[]> getTimeZoneIDs(){ return timeZoneIDs; } public static SimpleDateFormat getSimpleDateFormat(String format,String timeoffset){ String[] timeZoneID=timeZoneIDs.get(timeoffset); if(timeZoneID==null){ timeZoneID=timeZoneIDs.get(ForumInit.settings.get("timeoffset")); } SimpleDateFormat sdf = new SimpleDateFormat(format,Locale.ENGLISH); sdf.setTimeZone(TimeZone.getTimeZone(timeZoneID[0])); return sdf; } public static Calendar getCalendar(String timeoffset){ String[] timeZoneID=timeZoneIDs.get(timeoffset); if(timeZoneID==null){ timeZoneID=timeZoneIDs.get(ForumInit.settings.get("timeoffset")); } return Calendar.getInstance(TimeZone.getTimeZone(timeZoneID[0])); } public static String gmdate(SimpleDateFormat sdf,int timestamp){ return sdf.format(timestamp*1000l); } public static String gmdate(String format,int timestamp,String timeoffset){ return getSimpleDateFormat(format,timeoffset).format(timestamp*1000l); } @SuppressWarnings({ "unchecked", "static-access" }) public static Map advertisement(String range) { Map advs=new HashMap(); int timestamp = Common.time(); List<Map<String,String>> advertisements=dataBaseService.executeQuery("SELECT advid,type,targets,parameters,code FROM jrun_advertisements WHERE available=1 AND starttime<='"+timestamp+"' and (endtime ='0' or endtime >='"+timestamp+"') ORDER BY displayorder"); if(advertisements!=null&&advertisements.size()>0) { Map<String,String> itemsMap=new HashMap<String, String>(); Map<String,Map<String,String>> typesMap=new HashMap<String, Map<String,String>>(); for (Map<String, String> adv : advertisements) { String type=adv.get("type"); String advid=adv.get("advid"); String code=adv.get("code").replaceAll("\r\n", " "); code=code.replace("\\", "\\\\"); Map<String,String> parameters=new HashMap<String,String>(); if("footerbanner".equals(type)||"thread".equals(type)) { parameters=dataParse.characterParse(adv.get("parameters"), false); type+=(parameters.get("position")!=null&&parameters.get("position").matches("^(2|3)$")?parameters.get("position"):"1"); } adv.put("targets", (adv.get("targets").equals("")||adv.get("targets").equals("all"))?(type.equals("text")?"forum":(type.length()>6&&type.substring(0,6).equals("thread")?"forum":"all")):adv.get("targets")); String[] targets=adv.get("targets").split("\t"); if(targets!=null&&targets.length>0) { for (String target : targets) { target=("0".equals(target)?"index":("all".equals(target)||"index".equals(target)||"forumdisplay".equals(target)||"viewthread".equals(target)||"register".equals(target)||"redirect".equals(target)||"archiver".equals(target)?target:("forum".equals(target)?"forum_all":"forum_"+target))); if((("forumdisplay".equals(range)&&!("thread".equals(adv.get("type"))||"interthread".equals(adv.get("type"))))||"viewthread".equals(range))&&(target.length()>6&&target.substring(0,6).equals("forum_"))) { if("thread".equals(adv.get("type"))) { String displayorder=parameters.get("displayorder"); String []displayorders=displayorder!=null&&!displayorder.trim().equals("")?displayorder.split("\t"):new String[]{"0"}; for (String postcount : displayorders) { postcount=postcount.trim(); Map<String,String> targetMap=typesMap.get(type+"_"+postcount); if(targetMap==null) { targetMap=new HashMap<String, String>(); } targetMap.put(target, targetMap.get(target)!=null?targetMap.get(target)+","+advid:advid); typesMap.put(type+"_"+postcount, targetMap); } } else{ Map<String,String> targetMap=typesMap.get(type); if(targetMap==null) { targetMap=new HashMap<String, String>(); } targetMap.put(target, targetMap.get(target)!=null?targetMap.get(target)+","+advid:advid); typesMap.put(type, targetMap); } if(adv.get("type").equals("float")){ itemsMap.put(advid, addFloathForFloatAdv(code, (String)dataParse.characterParse(adv.get("parameters"), false).get("floath"))); }else{ itemsMap.put(advid, code); } } else if("all".equals(range)&&("all".equals(target)||"redirect".equals(target))) { Map targetMap=(Map)advs.get(target); if(targetMap==null) { targetMap=new HashMap(); } Map<String,Map<String,String>> typeMap=(Map<String,Map<String,String>>)targetMap.get("type"); Map<String,String> itemMap=(Map<String,String>)targetMap.get("items"); if(typeMap==null) { typeMap= new HashMap<String,Map<String,String>>(); itemMap= new HashMap<String,String>(); } Map<String,String> typeitems=typeMap.get(type); if(typeitems==null) { typeitems=new HashMap<String, String>(); } typeitems.put("all", typeitems.get("all")!=null?typeitems.get("all")+","+advid:advid); typeMap.put(type, typeitems); if(adv.get("type").equals("float")){ itemMap.put(advid, addFloathForFloatAdv(code, (String)dataParse.characterParse(adv.get("parameters"), false).get("floath"))); }else{ itemMap.put(advid, code); } if(typeMap.size()>0&&itemMap.size()>0) { targetMap.put("type", typeMap); targetMap.put("items", itemMap); } if(targetMap.size()>0) { advs.put(target, targetMap); } typeMap=null; }else if("index".equals(range)&&"intercat".equals(type)) { parameters=dataParse.characterParse(adv.get("parameters"), false); String position=parameters.get("position"); if(position==null||position.equals("")) { position="0"; } String[] positions=position.trim().split(","); Map<String,String> positionMap=(Map<String,String>)typesMap.get(type); if(positionMap==null) { positionMap=new HashMap<String,String>(); } for (String obj : positions) { positionMap.put(obj.trim(), positionMap.get(obj.trim())!=null?positionMap.get(obj.trim())+","+advid:advid); if(adv.get("type").equals("float")){ itemsMap.put(advid, addFloathForFloatAdv(code, (String)dataParse.characterParse(adv.get("parameters"), false).get("floath"))); }else{ itemsMap.put(advid, code); } } typesMap.put(type, positionMap); } else if(target.equals(range)||("index".equals(range)&&"forum_all".equals(target))) { Map<String,String> advtypeMap=(Map<String,String>)typesMap.get(type); if(advtypeMap==null) { advtypeMap=new HashMap<String,String>(); } advtypeMap.put("0",advtypeMap.get("0")!=null?advtypeMap.get("0")+","+advid:advid); if(adv.get("type").equals("float")){ itemsMap.put(advid, addFloathForFloatAdv(code, (String)dataParse.characterParse(adv.get("parameters"), false).get("floath"))); }else{ itemsMap.put(advid,code); } typesMap.put(type, advtypeMap); } } } if(itemsMap.size()>0&&typesMap.size()>0) { advs.put("items", itemsMap); advs.put("type", typesMap); } } itemsMap=null; typesMap=null; } advertisements=null; return advs; } private static String addFloathForFloatAdv(String code,String floath){ Map<String,String> tempM = new HashMap<String, String>(); tempM.put("code", code); tempM.put("floath", floath); return dataParse.combinationChar(tempM); } public static Locale getUserLocale(HttpServletRequest request) { HttpSession session = request.getSession(); Locale userLocale = (Locale) session.getAttribute(Globals.LOCALE_KEY); if (userLocale == null) { userLocale = request.getLocale(); } return userLocale; } public static MessageResources getMessageResources(HttpServletRequest request){ HttpSession session = request.getSession(); return (MessageResources)session.getServletContext().getAttribute(Globals.MESSAGES_KEY); } public static int range(int value,int max,int min){ return Math.min(max, Math.max(value, min)); } public static int intval(String s){ return intval(s,10); } public static int toDigit(String s){ return Math.max(intval(s,10), 0); } public static int intval(String s,int radix){ if (s == null||s.length()==0){ return 0; } if(radix==0){ radix=10; }else if (radix < Character.MIN_RADIX) { return 0; }else if (radix > Character.MAX_RADIX) { return 0; } int result = 0; int i = 0, max = s.length(); int limit; int multmin; int digit; boolean negative = false; if (s.charAt(0) == '-') { negative = true; limit = Integer.MIN_VALUE; i++; } else { limit = -Integer.MAX_VALUE; } if (i < max) { digit = Character.digit(s.charAt(i++),radix); if (digit < 0) { return 0; }else{ result = -digit; } } multmin = limit / radix; while (i < max) { digit = Character.digit(s.charAt(i++),radix); if (digit < 0) { break; } if (result < multmin) { result = limit; break; } result *= radix; if (result < limit + digit) { result = limit; break; } result -= digit; } if (negative) { if (i > 1) { return result; } else { return 0; } } else { return -result; } } public static boolean isFounder(Map<String,String> settings,Members user) { if(user.getAdminid()!=1){ return false; } String founders=settings.get("forumfounders"); if(Common.empty(founders)){ return true; } founders=founders.replaceAll(" ", ""); if((","+founders+",").contains(","+user.getUid()+",")){ return true; } return false; } public static void promotion(HttpServletRequest request,HttpServletResponse response,Map<String,String>settings,String fromuid,String fromuser,int jsprun_uid,String jsprun_user,Map creditspolicys){ int fuid = 0; if(!Common.empty(fromuid)) { fuid = Common.intval(fromuid); fromuser = ""; } if(jsprun_uid==0 || !(fuid == jsprun_uid || fromuser.equals(jsprun_user))) { if(!Common.empty(creditspolicys.get("promotion_visit"))) { String onlineip = Common.get_onlineip(request); dataBaseService.runQuery("REPLACE INTO jrun_promotions (ip, uid, username)VALUES ('"+onlineip+"', '"+fuid+"', '"+fromuser+"')"); } if(!Common.empty(creditspolicys.get("promotion_register"))) { if(!empty(fromuser) && empty(fromuid)) { String promotion=CookieUtil.getCookie(request, "promotion", true, settings); if(empty(promotion)) { List<Map<String,String>> list = dataBaseService.executeQuery("SELECT uid FROM jrun_members WHERE username='"+fromuser+"'"); fuid = list!=null&&list.size()>0?Common.intval(list.get(0).get("uid")):0; } else { fuid = Common.intval(promotion); } } if(fuid>0) { CookieUtil.setCookie(request, response, "promotion", fuid+"", 1800, true, settings); } } } } public static Object[] getCacheInfo(String tid,String cacheThreadDir){ String tidMD5=Md5Token.getInstance().getLongToken(tid).substring(3); String fullDir=cacheThreadDir+"/"+tidMD5.charAt(0)+"/"+tidMD5.charAt(1)+"/"+tidMD5.charAt(2)+"/"; String fileName=fullDir+tid+".htm"; int fileModified=0; File file = new File(fileName); if(file.exists()){ long lastModified = file.lastModified(); if(lastModified>0){ fileModified=(int)(lastModified/1000); } }else{ File fullFile = new File(fullDir); if(!fullFile.isDirectory()){ for (int i=0; i<3; i++) { cacheThreadDir+="/"+tidMD5.charAt(i); File cacheThreadFile = new File(cacheThreadDir); if(!cacheThreadFile.isDirectory()){ cacheThreadFile.mkdir(); } } } } Object[] cache = new Object[2]; cache[0]=fileModified; cache[1]=fileName; return cache; } public static String encodeText(HttpServletRequest request, String text){ String user_agent = request.getHeader("User-Agent"); if(user_agent==null){ user_agent=""; } try { if (user_agent.indexOf("MSIE")>=0) { text = new String(text.getBytes("GBK"),"ISO8859-1"); }else if (user_agent.indexOf("Firefox")>=0) { text = MimeUtility.encodeText(text, "UTF-8", "B"); } else { text=Common.encode(text); } } catch (UnsupportedEncodingException e) { e.printStackTrace(); } return text; } public static void setResponseHeader(HttpServletResponse response){ setResponseHeader(response, "text/html"); } public static void setResponseHeader(HttpServletResponse response,String type){ response.setContentType(type+"; charset="+JspRunConfig.CHARSET); response.setHeader("Cache-Control", "no-store"); response.setHeader("Program", "no-cache"); response.setDateHeader("Expirse", 0); } public static void writeMessage(HttpServletResponse response,String message,boolean iserror){ Common.setResponseHeader(response,"application/xml"); String content = "<?xml version=\"1.0\" encoding=\""+JspRunConfig.CHARSET+"\"?><root><![CDATA["; if(iserror){ message = message+" <script type=\"text/javascript\" reload=\"1\">function ajaxerror() { alert('"+message+"');}ajaxerror();</script>"; } message = message.replaceAll("([\\x01-\\x09\\x0b-\\x0c\\x0e-\\x1f])+", ""); message = message.replaceAll("]]>", "]]&gt"); content = content+message+"]]></root>"; try { response.getWriter().write(content); } catch (IOException e) { } } @SuppressWarnings("unchecked") public static String formHash(HttpServletRequest request) { Md5Token md5 = Md5Token.getInstance(); HttpSession session = request.getSession(); Object jsprun_userss = session.getAttribute("jsprun_userss"); Object jsprun_uid = session.getAttribute("jsprun_uid"); Object jsprun_pw = session.getAttribute("jsprun_pw"); String jsprun_auth_key = md5.getLongToken(ForumInit.settings.get("authkey")+request.getHeader("User-Agent")); String timestamp = request.getAttribute("timestamp").toString(); Boolean in_admincp = (Boolean)request.getAttribute("in_admincp"); char split = '|'; StringBuffer temp = new StringBuffer(); temp.append(timestamp.substring(0, timestamp.length() - 7)); temp.append(split); temp.append(jsprun_userss); temp.append(split); temp.append(jsprun_uid); temp.append(split); temp.append(jsprun_pw); temp.append(split); temp.append(jsprun_auth_key); if (in_admincp != null) { temp.append(split); temp.append("Only For JspRun! Admin Control Panel"); } return md5.getLongToken(temp.toString()).substring(8, 16); } public static String clearLineBreaksFI(String str){ char[] strArray=str.toCharArray(); char[] toArray=new char[strArray.length]; int j=0; boolean lastIsSpace=false; char appendChar=0; for (int i = 0; i < strArray.length; i++) { char c=strArray[i]; if(c!='\r'&&c!='\n'){ appendChar=c; lastIsSpace=(c==' '||c=='\t'); }else if(!lastIsSpace){ appendChar=' '; lastIsSpace=true; }else{ appendChar=0; } if(appendChar!=0){ toArray[j++]=appendChar; } } char[] dest=new char[j]; System.arraycopy(toArray, 0, dest, 0, j); return new String(dest); } }
apache-2.0
selrahal/droolsjbpm-integration
kie-spring/src/test/java/org/kie/spring/jbpm/LocalEntityManagerFactorySpringTest.java
5319
package org.kie.spring.jbpm; import java.util.List; import javax.naming.InitialContext; import javax.transaction.UserTransaction; import bitronix.tm.resource.jdbc.PoolingDataSource; import org.jbpm.process.audit.AuditLogService; import org.jbpm.process.audit.JPAAuditLogService; import org.jbpm.process.audit.ProcessInstanceLog; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.kie.api.runtime.Environment; import org.kie.api.runtime.KieSession; import org.kie.api.runtime.manager.RuntimeEngine; import org.kie.api.runtime.manager.RuntimeManager; import org.kie.api.runtime.process.ProcessInstance; import org.kie.api.task.TaskService; import org.kie.api.task.model.TaskSummary; import org.kie.internal.runtime.manager.InternalRuntimeManager; import org.kie.internal.runtime.manager.context.ProcessInstanceIdContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.AbstractPlatformTransactionManager; import org.springframework.transaction.support.DefaultTransactionDefinition; import static junit.framework.Assert.*; public class LocalEntityManagerFactorySpringTest extends AbstractJbpmSpringTest { @Test public void testSpringWithJTAAndEMF() throws Exception{ context = new ClassPathXmlApplicationContext("jbpm/local-emf/local-emf-spring.xml"); AbstractPlatformTransactionManager aptm = (AbstractPlatformTransactionManager) context.getBean( "jbpmTxManager" ); RuntimeManager manager = (RuntimeManager) context.getBean("runtimeManager"); RuntimeEngine engine = manager.getRuntimeEngine(ProcessInstanceIdContext.get()); KieSession ksession = engine.getKieSession(); TaskService taskService = engine.getTaskService(); int ksessionId = ksession.getId(); ProcessInstance processInstance = ksession.startProcess("com.sample.bpmn.hello"); System.out.println("Process started"); manager.disposeRuntimeEngine(engine); engine = manager.getRuntimeEngine(ProcessInstanceIdContext.get(processInstance.getId())); ksession = engine.getKieSession(); taskService = engine.getTaskService(); assertEquals(ksessionId, ksession.getId()); AuditLogService logService = (AuditLogService) context.getBean("logService"); DefaultTransactionDefinition def = new DefaultTransactionDefinition(); TransactionStatus status = aptm.getTransaction(def); ProcessInstanceLog log = logService.findProcessInstance(processInstance.getId()); aptm.commit(status); assertNotNull(log); List<TaskSummary> tasks = taskService.getTasksAssignedAsPotentialOwner("john", "en-UK"); System.out.println("Found " + tasks.size() + " task(s) for user 'john'"); assertEquals(1, tasks.size()); long taskId = tasks.get(0).getId(); taskService.start(taskId, "john"); taskService.complete(taskId, "john", null); tasks = taskService.getTasksAssignedAsPotentialOwner("mary", "en-UK"); System.out.println("Found " + tasks.size() + " task(s) for user 'mary'"); assertEquals(1, tasks.size()); taskId = tasks.get(0).getId(); taskService.start(taskId, "mary"); taskService.complete(taskId, "mary", null); processInstance = ksession.getProcessInstance(processInstance.getId()); assertNull(processInstance); System.out.println("Process instance completed"); } @Test public void testSpringWithJTAAndEMFwithRollback() throws Exception{ context = new ClassPathXmlApplicationContext("jbpm/local-emf/local-emf-spring.xml"); AbstractPlatformTransactionManager aptm = (AbstractPlatformTransactionManager) context.getBean( "jbpmTxManager" ); RuntimeManager manager = (RuntimeManager) context.getBean("runtimeManager"); RuntimeEngine engine = manager.getRuntimeEngine(ProcessInstanceIdContext.get()); KieSession ksession = engine.getKieSession(); TaskService taskService = engine.getTaskService(); AuditLogService logService = (AuditLogService) context.getBean("logService"); DefaultTransactionDefinition def = new DefaultTransactionDefinition(); TransactionStatus status = aptm.getTransaction(def); ProcessInstance processInstance = ksession.startProcess("com.sample.bpmn.hello"); long processInstanceId = processInstance.getId(); aptm.rollback(status); processInstance = ksession.getProcessInstance(processInstanceId); if (processInstance == null) { System.out.println("Process instance rolled back"); } else { throw new IllegalArgumentException("Process instance not rolled back"); } List<TaskSummary> tasks = taskService.getTasksAssignedAsPotentialOwner("john", "en-UK"); System.out.println("Found " + tasks.size() + " task(s) for user 'john'"); assertEquals(0, tasks.size()); def = new DefaultTransactionDefinition(); status = aptm.getTransaction(def); ProcessInstanceLog log = logService.findProcessInstance(processInstanceId); aptm.commit(status); assertNull(log); } }
apache-2.0
resty-gwt/resty-gwt
restygwt/src/main/java/org/fusesource/restygwt/rebind/util/AnnotationCopyUtil.java
5303
/** * Copyright (C) 2009-2011 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.restygwt.rebind.util; import java.lang.annotation.Annotation; import java.lang.reflect.Array; import java.lang.reflect.Method; import javax.ws.rs.Path; /** * An utility class that gets a String representation of an annotation. * * @author <a href="mailto:bogdan.mustiata@gmail.com">Bogdan Mustiata</a> */ public class AnnotationCopyUtil { public static String getAnnotationAsString(Annotation annotation) { StringBuilder result = encodeAnnotationName(annotation); if (hasAnnotationAttributes(annotation)) { encodeAnnotationAttributes(annotation, result); } return result.toString(); } private static StringBuilder encodeAnnotationName(Annotation annotation) { return new StringBuilder("@").append(annotation.annotationType().getCanonicalName()); } private static boolean hasAnnotationAttributes(Annotation annotation) { return annotation.annotationType().getDeclaredMethods().length != 0; } private static void encodeAnnotationAttributes(Annotation annotation, StringBuilder result) { result.append("("); OnceFirstIterator<String> comma = new OnceFirstIterator<String>("", ", "); for (Method method : annotation.annotationType().getDeclaredMethods()) { Object value = readAnnotationAttribute(annotation, method); String encodedValue = encodeAnnotationValue(value); // Strip regex expressions from Path annotation value if (Path.class == annotation.annotationType()) { encodedValue = encodedValue.replaceAll("\\{\\s*(\\S+)\\s*:\\s*[^{}]+\\}", "{$1}"); } if (encodedValue != null) { result.append(comma.next()).append(method.getName()).append(" = ").append(encodedValue); } } result.append(")"); } private static Object readAnnotationAttribute(Annotation annotation, Method annotationAttribute) { try { return annotationAttribute.invoke(annotation); } catch (Exception e) { throw new IllegalArgumentException( "Unable to read attribute " + annotationAttribute + " from " + annotation, e); } } /** * Returns the string representation of {@code value} or {@code null}, if the element should not be added. * * @param value * @return */ private static String encodeAnnotationValue(Object value) { // Values of annotation elements must not be "null" if (null != value) { if (value instanceof String) { return readStringValue(value); } else if (value instanceof Number) { return readNumberValue(value); } else if (value.getClass().isArray()) { // workaround for ClassCastException: [Ljava.lang.Object; cannot be cast to [I // ignore empty arrays, because it becomes Object[] if (Array.getLength(value) > 0) { return readArrayValue(value); } return null; } else if (value instanceof Annotation) { return getAnnotationAsString((Annotation) value); } else if (value instanceof Boolean) { return readBooleanValue((Boolean) value); } else if (value instanceof Class) { return readClassValue((Class) value); } } throw new IllegalArgumentException("Unsupported value for encodeAnnotationValue: " + value); } private static String readBooleanValue(Boolean value) { return Boolean.toString(value); } private static String readArrayValue(Object value) { StringBuilder result = new StringBuilder(); OnceFirstIterator<String> comma = new OnceFirstIterator<String>("", ", "); result.append("{"); for (int i = 0; i < Array.getLength(value); i++) { Object arrayValue = Array.get(value, i); result.append(comma.next()).append(encodeAnnotationValue(arrayValue)); } result.append("}"); return result.toString(); } private static String readNumberValue(Object value) { return value.toString(); } private static String readStringValue(Object value) { return "\"" + value.toString().replace("\"", "\\\"").replace("\n", "\\n") + "\""; } private static String readClassValue(Class value) { return value.getCanonicalName() + ".class"; } }
apache-2.0
googleapis/java-cloudbuild
proto-google-cloud-build-v1/src/main/java/com/google/cloudbuild/v1/VolumeOrBuilder.java
2299
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v1/cloudbuild.proto package com.google.cloudbuild.v1; public interface VolumeOrBuilder extends // @@protoc_insertion_point(interface_extends:google.devtools.cloudbuild.v1.Volume) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Name of the volume to mount. * Volume names must be unique per build step and must be valid names for * Docker volumes. Each named volume must be used by at least two build steps. * </pre> * * <code>string name = 1;</code> * * @return The name. */ java.lang.String getName(); /** * * * <pre> * Name of the volume to mount. * Volume names must be unique per build step and must be valid names for * Docker volumes. Each named volume must be used by at least two build steps. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ com.google.protobuf.ByteString getNameBytes(); /** * * * <pre> * Path at which to mount the volume. * Paths must be absolute and cannot conflict with other volume paths on the * same build step or with certain reserved volume paths. * </pre> * * <code>string path = 2;</code> * * @return The path. */ java.lang.String getPath(); /** * * * <pre> * Path at which to mount the volume. * Paths must be absolute and cannot conflict with other volume paths on the * same build step or with certain reserved volume paths. * </pre> * * <code>string path = 2;</code> * * @return The bytes for path. */ com.google.protobuf.ByteString getPathBytes(); }
apache-2.0
bramstein/closure-compiler-inline
test/com/google/javascript/jscomp/ControlFlowAnalysisTest.java
59952
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.collect.Lists; import com.google.javascript.jscomp.ControlFlowGraph.Branch; import com.google.javascript.jscomp.graph.DiGraph.DiGraphEdge; import com.google.javascript.jscomp.graph.DiGraph.DiGraphNode; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import junit.framework.TestCase; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * Tests {@link ControlFlowAnalysis}. * */ public class ControlFlowAnalysisTest extends TestCase { /** * Given an input in JavaScript, test if the control flow analysis * creates the proper control flow graph by comparing the expected * Dot file output. * * @param input Input JavaScript. * @param expected Expected Graphviz Dot file. */ private void testCfg(String input, String expected) { testCfg(input, expected, true); } /** * Gets all the edges of the graph. */ private static List<DiGraphEdge<Node, Branch>> getAllEdges( ControlFlowGraph<Node> cfg) { List<DiGraphEdge<Node, Branch>> edges = Lists.newArrayList(); for (DiGraphNode<Node, Branch> n : cfg.getDirectedGraphNodes()) { for (DiGraphEdge<Node, Branch> e : cfg.getOutEdges(n.getValue())) { edges.add(e); } } return edges; } /** * Gets all the control flow edges from some node with the first token to * some node with the second token. */ private static List<DiGraphEdge<Node, Branch>> getAllEdges( ControlFlowGraph<Node> cfg, int startToken, int endToken) { List<DiGraphEdge<Node, Branch>> edges = getAllEdges(cfg); Iterator<DiGraphEdge<Node, Branch>> it = edges.iterator(); while (it.hasNext()) { DiGraphEdge<Node, Branch> edge = it.next(); Node startNode = edge.getSource().getValue(); Node endNode = edge.getDestination().getValue(); if (startNode == null || endNode == null || startNode.getType() != startToken || endNode.getType() != endToken) { it.remove(); } } return edges; } /** * Gets all the control flow edges of the given type from some node with the * first token to some node with the second token. */ private static List<DiGraphEdge<Node, Branch>> getAllEdges( ControlFlowGraph<Node> cfg, int startToken, int endToken, Branch type) { List<DiGraphEdge<Node, Branch>> edges = getAllEdges(cfg, startToken, endToken); Iterator<DiGraphEdge<Node, Branch>> it = edges.iterator(); while (it.hasNext()) { if (type != it.next().getValue()) { it.remove(); } } return edges; } private static boolean isAncestor(Node n, Node maybeDescendent) { for (Node current = n.getFirstChild(); current != null; current = current.getNext()) { if (current == maybeDescendent || isAncestor(current, maybeDescendent)) { return true; } } return false; } /** * Gets all the control flow edges of the given type from some node with * the first token to some node with the second token. * This edge must flow from a parent to one of its descendants. */ private static List<DiGraphEdge<Node, Branch>> getAllDownEdges( ControlFlowGraph<Node> cfg, int startToken, int endToken, Branch type) { List<DiGraphEdge<Node, Branch>> edges = getAllEdges(cfg, startToken, endToken, type); Iterator<DiGraphEdge<Node, Branch>> it = edges.iterator(); while (it.hasNext()) { DiGraphEdge<Node, Branch> edge = it.next(); Node source = edge.getSource().getValue(); Node dest = edge.getDestination().getValue(); if (!isAncestor(source, dest)) { it.remove(); } } return edges; } /** * Assert that there exists a control flow edge of the given type * from some node with the first token to some node with the second token. */ private static void assertNoEdge(ControlFlowGraph<Node> cfg, int startToken, int endToken) { assertEquals(0, getAllEdges(cfg, startToken, endToken).size()); } /** * Assert that there exists a control flow edge of the given type * from some node with the first token to some node with the second token. * This edge must flow from a parent to one of its descendants. */ private static void assertDownEdge(ControlFlowGraph<Node> cfg, int startToken, int endToken, Branch type) { assertTrue("No down edge found", 0 != getAllDownEdges(cfg, startToken, endToken, type).size()); } /** * Assert that there exists a control flow edge of the given type * from some node with the first token to some node with the second token. * This edge must flow from a node to one of its ancestors. */ private static void assertUpEdge(ControlFlowGraph<Node> cfg, int startToken, int endToken, Branch type) { assertTrue("No up edge found", 0 != getAllDownEdges(cfg, endToken, startToken, type).size()); } /** * Assert that there exists a control flow edge of the given type * from some node with the first token to some node with the second token. * This edge must flow between two nodes that are not in the same subtree. */ private static void assertCrossEdge(ControlFlowGraph<Node> cfg, int startToken, int endToken, Branch type) { int numDownEdges = getAllDownEdges(cfg, startToken, endToken, type).size(); int numUpEdges = getAllDownEdges(cfg, endToken, startToken, type).size(); int numEdges = getAllEdges(cfg, startToken, endToken, type).size(); assertTrue("No cross edges found", numDownEdges + numUpEdges < numEdges); } /** * Assert that there exists a control flow edge of the given type * from some node with the first token to the return node. */ private static void assertReturnEdge(ControlFlowGraph<Node> cfg, int startToken) { List<DiGraphEdge<Node, Branch>> edges = getAllEdges(cfg); for (DiGraphEdge<Node, Branch> edge : edges) { Node source = edge.getSource().getValue(); DiGraphNode<Node, Branch> dest = edge.getDestination(); if (source.getType() == startToken && cfg.isImplicitReturn(dest)) { return; } } fail("No return edge found"); } /** * Assert that there exists no control flow edge of the given type * from some node with the first token to the return node. */ private static void assertNoReturnEdge(ControlFlowGraph<Node> cfg, int startToken) { List<DiGraphEdge<Node, Branch>> edges = getAllEdges(cfg); for (DiGraphEdge<Node, Branch> edge : edges) { Node source = edge.getSource().getValue(); DiGraphNode<Node, Branch> dest = edge.getDestination(); if (source.getType() == startToken) { assertTrue("Token " + startToken + " should not have an out going" + " edge to the implicit return", !cfg.isImplicitReturn(dest)); return; } } } /** * Given an input in JavaScript, get a control flow graph for it. * * @param input Input JavaScript. */ private ControlFlowGraph<Node> createCfg(String input, boolean runSynBlockPass) { Compiler compiler = new Compiler(); ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, true, true); Node root = compiler.parseSyntheticCode("cfgtest", input); if (runSynBlockPass) { CreateSyntheticBlocks pass = new CreateSyntheticBlocks( compiler, "START", "END"); pass.process(null, root); } cfa.process(null, root); return cfa.getCfg(); } private ControlFlowGraph<Node> createCfg(String input) { return createCfg(input, false); } /** * Given an input in JavaScript, test if the control flow analysis * creates the proper control flow graph by comparing the expected * Dot file output. * * @param input Input JavaScript. * @param expected Expected Graphviz Dot file. * @param shouldTraverseFunctions Whether to traverse functions when * constructing the CFG (true by default). Passed in to the * constructor of {@link ControlFlowAnalysis}. */ private void testCfg(String input, String expected, boolean shouldTraverseFunctions) { Compiler compiler = new Compiler(); ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, shouldTraverseFunctions, true); Node root = compiler.parseSyntheticCode("cfgtest", input); cfa.process(null, root); ControlFlowGraph<Node> cfg = cfa.getCfg(); try { assertEquals(expected, DotFormatter.toDot(root, cfg)); } catch (java.io.IOException e) { fail("Tests failed with IOExceptions"); } } public void testSimpleStatements() { String src = "var a; a = a; a = a"; ControlFlowGraph<Node> cfg = createCfg(src); assertDownEdge(cfg, Token.SCRIPT, Token.VAR, Branch.UNCOND); assertCrossEdge(cfg, Token.VAR, Token.EXPR_RESULT, Branch.UNCOND); assertCrossEdge(cfg, Token.EXPR_RESULT, Token.EXPR_RESULT, Branch.UNCOND); } // Test a simple IF control flow. public void testSimpleIf() { String src = "var x; if (x) { x() } else { x() };"; ControlFlowGraph<Node> cfg = createCfg(src); assertDownEdge(cfg, Token.SCRIPT, Token.VAR, Branch.UNCOND); assertCrossEdge(cfg, Token.VAR, Token.IF, Branch.UNCOND); assertDownEdge(cfg, Token.IF, Token.BLOCK, Branch.ON_TRUE); assertDownEdge(cfg, Token.BLOCK, Token.EXPR_RESULT, Branch.UNCOND); assertNoEdge(cfg, Token.EXPR_RESULT, Token.CALL); assertDownEdge(cfg, Token.IF, Token.BLOCK, Branch.ON_FALSE); assertReturnEdge(cfg, Token.EMPTY); } public void testBreakingBlock() { // BUG #1382217 String src = "X: { while(1) { break } }"; ControlFlowGraph<Node> cfg = createCfg(src); assertUpEdge(cfg, Token.BREAK, Token.BLOCK, Branch.UNCOND); } public void testBreakingTryBlock() { String src = "a: try { break a; } finally {} if(x) {}"; ControlFlowGraph<Node> cfg = createCfg(src); assertCrossEdge(cfg, Token.BREAK, Token.IF, Branch.UNCOND); src = "a: try {} finally {break a;} if(x) {}"; cfg = createCfg(src); assertCrossEdge(cfg, Token.BREAK, Token.IF, Branch.UNCOND); src = "a: try {} catch(e) {break a;} if(x) {}"; cfg = createCfg(src); assertCrossEdge(cfg, Token.BREAK, Token.IF, Branch.UNCOND); } public void testWithStatement() { String src = "var x, y; with(x) { y() }"; ControlFlowGraph<Node> cfg = createCfg(src); assertDownEdge(cfg, Token.WITH, Token.BLOCK, Branch.UNCOND); assertNoEdge(cfg, Token.WITH, Token.NAME); assertNoEdge(cfg, Token.NAME, Token.BLOCK); assertDownEdge(cfg, Token.BLOCK, Token.EXPR_RESULT, Branch.UNCOND); assertReturnEdge(cfg, Token.EXPR_RESULT); } // Test a simple WHILE control flow with BREAKs. public void testSimpleWhile() { String src = "var x; while (x) { x(); if (x) { break; } x() }"; ControlFlowGraph<Node> cfg = createCfg(src); assertDownEdge(cfg, Token.WHILE, Token.BLOCK, Branch.ON_TRUE); assertDownEdge(cfg, Token.BLOCK, Token.EXPR_RESULT, Branch.UNCOND); assertDownEdge(cfg, Token.IF, Token.BLOCK, Branch.ON_TRUE); assertReturnEdge(cfg, Token.BREAK); } public void testSimpleSwitch() { String src = "var x; switch(x){ case(1): x(); case('x'): x(); break" + "; default: x();}"; ControlFlowGraph<Node> cfg = createCfg(src); assertCrossEdge(cfg, Token.VAR, Token.SWITCH, Branch.UNCOND); assertNoEdge(cfg, Token.SWITCH, Token.NAME); // Transfer between cases and default. assertDownEdge(cfg, Token.SWITCH, Token.CASE, Branch.UNCOND); assertCrossEdge(cfg, Token.CASE, Token.CASE, Branch.ON_FALSE); assertCrossEdge(cfg, Token.CASE, Token.DEFAULT_CASE, Branch.ON_FALSE); // Within each case. assertDownEdge(cfg, Token.CASE, Token.BLOCK, Branch.ON_TRUE); assertDownEdge(cfg, Token.BLOCK, Token.EXPR_RESULT, Branch.UNCOND); assertNoEdge(cfg, Token.EXPR_RESULT, Token.CALL); assertNoEdge(cfg, Token.CALL, Token.NAME); } public void testSimpleNoDefault() { String src = "var x; switch(x){ case(1): break; } x();"; ControlFlowGraph<Node> cfg = createCfg(src); assertCrossEdge(cfg, Token.CASE, Token.EXPR_RESULT, Branch.ON_FALSE); } public void testSwitchDefaultFirst() { // DEFAULT appears first. But it is should evaluated last. String src = "var x; switch(x){ default: break; case 1: break; }"; ControlFlowGraph<Node> cfg = createCfg(src); assertDownEdge(cfg, Token.SWITCH, Token.CASE, Branch.UNCOND); assertCrossEdge(cfg, Token.CASE, Token.DEFAULT_CASE, Branch.ON_FALSE); } public void testSwitchDefaultInMiddle() { // DEFAULT appears in the middle. But it is should evaluated last. String src = "var x; switch(x){ case 1: break; default: break; " + "case 2: break; }"; ControlFlowGraph<Node> cfg = createCfg(src); assertDownEdge(cfg, Token.SWITCH, Token.CASE, Branch.UNCOND); assertCrossEdge(cfg, Token.CASE, Token.CASE, Branch.ON_FALSE); assertCrossEdge(cfg, Token.CASE, Token.DEFAULT_CASE, Branch.ON_FALSE); } public void testSwitchEmpty() { // DEFAULT appears first. But it is should evaluated last. String src = "var x; switch(x){}; x()"; ControlFlowGraph<Node> cfg = createCfg(src); assertCrossEdge(cfg, Token.SWITCH, Token.EMPTY, Branch.UNCOND); assertCrossEdge(cfg, Token.EMPTY, Token.EXPR_RESULT, Branch.UNCOND); } public void testReturnThrowingException() { String src = "function f() {try { return a(); } catch (e) {e()}}"; ControlFlowGraph<Node> cfg = createCfg(src); assertCrossEdge(cfg, Token.RETURN, Token.BLOCK, Branch.ON_EX); assertDownEdge(cfg, Token.BLOCK, Token.CATCH, Branch.UNCOND); } // Test a simple FOR loop. public void testSimpleFor() { String src = "var a; for (var x = 0; x < 100; x++) { a(); }"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"VAR\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"NAME\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"VAR\"];\n" + " node1 -> node3 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 [label=\"FOR\"];\n" + " node0 -> node4 [weight=1];\n" + " node4 -> node3 [weight=1];\n" + " node5 [label=\"NAME\"];\n" + " node3 -> node5 [weight=1];\n" + " node6 [label=\"NUMBER\"];\n" + " node5 -> node6 [weight=1];\n" + " node3 -> node4 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node7 [label=\"LT\"];\n" + " node4 -> node7 [weight=1];\n" + " node8 [label=\"NAME\"];\n" + " node7 -> node8 [weight=1];\n" + " node9 [label=\"NUMBER\"];\n" + " node7 -> node9 [weight=1];\n" + " node10 [label=\"INC\"];\n" + " node4 -> node10 [weight=1];\n" + " node11 [label=\"NAME\"];\n" + " node10 -> node11 [weight=1];\n" + " node10 -> node4 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node12 [label=\"BLOCK\"];\n" + " node4 -> node12 [weight=1];\n" + " node13 [label=\"EXPR_RESULT\"];\n" + " node12 -> node13 [weight=1];\n" + " node14 [label=\"CALL\"];\n" + " node13 -> node14 [weight=1];\n" + " node15 [label=\"NAME\"];\n" + " node14 -> node15 [weight=1];\n" + " node13 -> node10 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node12 -> node13 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 -> RETURN " + "[label=\"ON_FALSE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 -> node12 " + "[label=\"ON_TRUE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node1 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } public void testSimpleForWithContinue() { String src = "var a; for (var x = 0; x < 100; x++) {a();continue;a()}"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"VAR\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"NAME\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"VAR\"];\n" + " node1 -> node3 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 [label=\"FOR\"];\n" + " node0 -> node4 [weight=1];\n" + " node4 -> node3 [weight=1];\n" + " node5 [label=\"NAME\"];\n" + " node3 -> node5 [weight=1];\n" + " node6 [label=\"NUMBER\"];\n" + " node5 -> node6 [weight=1];\n" + " node3 -> node4 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node7 [label=\"LT\"];\n" + " node4 -> node7 [weight=1];\n" + " node8 [label=\"NAME\"];\n" + " node7 -> node8 [weight=1];\n" + " node9 [label=\"NUMBER\"];\n" + " node7 -> node9 [weight=1];\n" + " node10 [label=\"INC\"];\n" + " node4 -> node10 [weight=1];\n" + " node11 [label=\"NAME\"];\n" + " node10 -> node11 [weight=1];\n" + " node10 -> node4 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node12 [label=\"BLOCK\"];\n" + " node4 -> node12 [weight=1];\n" + " node13 [label=\"EXPR_RESULT\"];\n" + " node12 -> node13 [weight=1];\n" + " node14 [label=\"CALL\"];\n" + " node13 -> node14 [weight=1];\n" + " node15 [label=\"NAME\"];\n" + " node14 -> node15 [weight=1];\n" + " node16 [label=\"CONTINUE\"];\n" + " node13 -> node16 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node12 -> node16 [weight=1];\n" + " node16 -> node10 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node17 [label=\"EXPR_RESULT\"];\n" + " node12 -> node17 [weight=1];\n" + " node18 [label=\"CALL\"];\n" + " node17 -> node18 [weight=1];\n" + " node19 [label=\"NAME\"];\n" + " node18 -> node19 [weight=1];\n" + " node17 -> node10 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node12 -> node13 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 -> RETURN " + "[label=\"ON_FALSE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 -> node12 " + "[label=\"ON_TRUE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node1 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } public void testNestedFor() { // This is tricky as the inner FOR branches to "x++" ON_FALSE. String src = "var a,b;a();for(var x=0;x<100;x++){for(var y=0;y<100;y++){" + "continue;b();}}"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"VAR\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"NAME\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"NAME\"];\n" + " node1 -> node3 [weight=1];\n" + " node4 [label=\"EXPR_RESULT\"];\n" + " node1 -> node4 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node4 [weight=1];\n" + " node5 [label=\"CALL\"];\n" + " node4 -> node5 [weight=1];\n" + " node6 [label=\"NAME\"];\n" + " node5 -> node6 [weight=1];\n" + " node7 [label=\"VAR\"];\n" + " node4 -> node7 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node8 [label=\"FOR\"];\n" + " node0 -> node8 [weight=1];\n" + " node8 -> node7 [weight=1];\n" + " node9 [label=\"NAME\"];\n" + " node7 -> node9 [weight=1];\n" + " node10 [label=\"NUMBER\"];\n" + " node9 -> node10 [weight=1];\n" + " node7 -> node8 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node11 [label=\"LT\"];\n" + " node8 -> node11 [weight=1];\n" + " node12 [label=\"NAME\"];\n" + " node11 -> node12 [weight=1];\n" + " node13 [label=\"NUMBER\"];\n" + " node11 -> node13 [weight=1];\n" + " node14 [label=\"INC\"];\n" + " node8 -> node14 [weight=1];\n" + " node15 [label=\"NAME\"];\n" + " node14 -> node15 [weight=1];\n" + " node14 -> node8 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node16 [label=\"BLOCK\"];\n" + " node8 -> node16 [weight=1];\n" + " node17 [label=\"FOR\"];\n" + " node16 -> node17 [weight=1];\n" + " node18 [label=\"VAR\"];\n" + " node17 -> node18 [weight=1];\n" + " node19 [label=\"NAME\"];\n" + " node18 -> node19 [weight=1];\n" + " node20 [label=\"NUMBER\"];\n" + " node19 -> node20 [weight=1];\n" + " node18 -> node17 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node21 [label=\"LT\"];\n" + " node17 -> node21 [weight=1];\n" + " node22 [label=\"NAME\"];\n" + " node21 -> node22 [weight=1];\n" + " node23 [label=\"NUMBER\"];\n" + " node21 -> node23 [weight=1];\n" + " node24 [label=\"INC\"];\n" + " node17 -> node24 [weight=1];\n" + " node25 [label=\"NAME\"];\n" + " node24 -> node25 [weight=1];\n" + " node24 -> node17 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node26 [label=\"BLOCK\"];\n" + " node17 -> node26 [weight=1];\n" + " node27 [label=\"CONTINUE\"];\n" + " node26 -> node27 [weight=1];\n" + " node27 -> node24 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node28 [label=\"EXPR_RESULT\"];\n" + " node26 -> node28 [weight=1];\n" + " node29 [label=\"CALL\"];\n" + " node28 -> node29 [weight=1];\n" + " node30 [label=\"NAME\"];\n" + " node29 -> node30 [weight=1];\n" + " node28 -> node24 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node26 -> node27 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node17 -> node14 " + "[label=\"ON_FALSE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node17 -> node26 " + "[label=\"ON_TRUE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node16 -> node18 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node8 -> RETURN " + "[label=\"ON_FALSE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node8 -> node16 " + "[label=\"ON_TRUE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node1 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } public void testNestedDoWithBreak() { // The BREAK branches to a() with UNCOND. String src = "var a;do{do{break}while(a);do{a()}while(a)}while(a);"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"VAR\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"NAME\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"BLOCK\"];\n" + " node1 -> node3 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 [label=\"DO\"];\n" + " node0 -> node4 [weight=1];\n" + " node4 -> node3 [weight=1];\n" + " node5 [label=\"DO\"];\n" + " node3 -> node5 [weight=1];\n" + " node6 [label=\"BLOCK\"];\n" + " node5 -> node6 [weight=1];\n" + " node7 [label=\"BREAK\"];\n" + " node6 -> node7 [weight=1];\n" + " node8 [label=\"BLOCK\"];\n" + " node7 -> node8 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node6 -> node7 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node9 [label=\"NAME\"];\n" + " node5 -> node9 [weight=1];\n" + " node5 -> node6 " + "[label=\"ON_TRUE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node5 -> node8 " + "[label=\"ON_FALSE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node10 [label=\"DO\"];\n" + " node3 -> node10 [weight=1];\n" + " node10 -> node8 [weight=1];\n" + " node11 [label=\"EXPR_RESULT\"];\n" + " node8 -> node11 [weight=1];\n" + " node12 [label=\"CALL\"];\n" + " node11 -> node12 [weight=1];\n" + " node13 [label=\"NAME\"];\n" + " node12 -> node13 [weight=1];\n" + " node11 -> node10 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node8 -> node11 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node14 [label=\"NAME\"];\n" + " node10 -> node14 [weight=1];\n" + " node10 -> node4 " + "[label=\"ON_FALSE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node10 -> node8 " + "[label=\"ON_TRUE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node3 -> node6 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node15 [label=\"NAME\"];\n" + " node4 -> node15 [weight=1];\n" + " node4 -> RETURN " + "[label=\"ON_FALSE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 -> node3 " + "[label=\"ON_TRUE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node1 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } public void testForIn() { String src = "var a,b;for(a in b){a()};"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"VAR\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"NAME\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"NAME\"];\n" + " node1 -> node3 [weight=1];\n" + " node4 [label=\"NAME\"];\n" + " node1 -> node4 [label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node5 [label=\"FOR\"];\n" + " node0 -> node5 [weight=1];\n" + " node6 [label=\"NAME\"];\n" + " node5 -> node6 [weight=1];\n" + " node5 -> node4 [weight=1];\n" + " node4 -> node5 [label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node7 [label=\"BLOCK\"];\n" + " node5 -> node7 [weight=1];\n" + " node8 [label=\"EXPR_RESULT\"];\n" + " node7 -> node8 [weight=1];\n" + " node9 [label=\"CALL\"];\n" + " node8 -> node9 [weight=1];\n" + " node10 [label=\"NAME\"];\n" + " node9 -> node10 [weight=1];\n" + " node8 -> node5 [label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node7 -> node8 [label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node11 [label=\"EMPTY\"];\n" + " node5 -> node11 [label=\"ON_FALSE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node5 -> node7 [label=\"ON_TRUE\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node11 [weight=1];\n" + " node11 -> RETURN [label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node1 [label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } public void testThrow() { String src = "function f() { throw 1; f() }"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"FUNCTION\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"NAME\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"PARAM_LIST\"];\n" + " node1 -> node3 [weight=1];\n" + " node4 [label=\"BLOCK\"];\n" + " node1 -> node4 [weight=1];\n" + " node5 [label=\"THROW\"];\n" + " node4 -> node5 [weight=1];\n" + " node6 [label=\"NUMBER\"];\n" + " node5 -> node6 [weight=1];\n" + " node7 [label=\"EXPR_RESULT\"];\n" + " node4 -> node7 [weight=1];\n" + " node8 [label=\"CALL\"];\n" + " node7 -> node8 [weight=1];\n" + " node9 [label=\"NAME\"];\n" + " node8 -> node9 [weight=1];\n" + " node7 -> RETURN " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 -> node5 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node1 -> node4 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> RETURN " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } // Test a simple FUNCTION. public void testSimpleFunction() { String src = "function f() { f() } f()"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"FUNCTION\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"NAME\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"PARAM_LIST\"];\n" + " node1 -> node3 [weight=1];\n" + " node4 [label=\"BLOCK\"];\n" + " node1 -> node4 [weight=1];\n" + " node5 [label=\"EXPR_RESULT\"];\n" + " node4 -> node5 [weight=1];\n" + " node6 [label=\"CALL\"];\n" + " node5 -> node6 [weight=1];\n" + " node7 [label=\"NAME\"];\n" + " node6 -> node7 [weight=1];\n" + " node5 -> RETURN " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 -> node5 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node1 -> node4 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node8 [label=\"EXPR_RESULT\"];\n" + " node0 -> node8 [weight=1];\n" + " node9 [label=\"CALL\"];\n" + " node8 -> node9 [weight=1];\n" + " node10 [label=\"NAME\"];\n" + " node9 -> node10 [weight=1];\n" + " node8 -> RETURN " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node8 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } public void testSimpleCatch() { String src = "try{ throw x; x(); x['stuff']; x.x; x} catch (e) { e() }"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"TRY\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"BLOCK\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"THROW\"];\n" + " node2 -> node3 [weight=1];\n" + " node4 [label=\"NAME\"];\n" + " node3 -> node4 [weight=1];\n" + " node5 [label=\"BLOCK\"];\n" + " node3 -> node5 [label=\"ON_EX\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node6 [label=\"EXPR_RESULT\"];\n" + " node2 -> node6 [weight=1];\n" + " node7 [label=\"CALL\"];\n" + " node6 -> node7 [weight=1];\n" + " node8 [label=\"NAME\"];\n" + " node7 -> node8 [weight=1];\n" + " node9 [label=\"EXPR_RESULT\"];\n" + " node6 -> node5 [label=\"ON_EX\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node6 -> node9 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node2 -> node9 [weight=1];\n" + " node10 [label=\"GETELEM\"];\n" + " node9 -> node10 [weight=1];\n" + " node11 [label=\"NAME\"];\n" + " node10 -> node11 [weight=1];\n" + " node12 [label=\"STRING\"];\n" + " node10 -> node12 [weight=1];\n" + " node13 [label=\"EXPR_RESULT\"];\n" + " node9 -> node13 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node9 -> node5 [label=\"ON_EX\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node2 -> node13 [weight=1];\n" + " node14 [label=\"GETPROP\"];\n" + " node13 -> node14 [weight=1];\n" + " node15 [label=\"NAME\"];\n" + " node14 -> node15 [weight=1];\n" + " node16 [label=\"STRING\"];\n" + " node14 -> node16 [weight=1];\n" + " node17 [label=\"EXPR_RESULT\"];\n" + " node13 -> node17 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node13 -> node5 [label=\"ON_EX\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node2 -> node17 [weight=1];\n" + " node18 [label=\"NAME\"];\n" + " node17 -> node18 [weight=1];\n" + " node17 -> RETURN [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node2 -> node3 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node1 -> node5 [weight=1];\n" + " node19 [label=\"CATCH\"];\n" + " node5 -> node19 [weight=1];\n" + " node20 [label=\"NAME\"];\n" + " node19 -> node20 [weight=1];\n" + " node21 [label=\"BLOCK\"];\n" + " node19 -> node21 [weight=1];\n" + " node22 [label=\"EXPR_RESULT\"];\n" + " node21 -> node22 [weight=1];\n" + " node23 [label=\"CALL\"];\n" + " node22 -> node23 [weight=1];\n" + " node24 [label=\"NAME\"];\n" + " node23 -> node24 [weight=1];\n" + " node22 -> RETURN [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node21 -> node22 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node19 -> node21 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node5 -> node19 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node1 -> node2 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node1 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } public void testFunctionWithinTry() { // Make sure we don't search for the handler outside of the function. String src = "try { function f() {throw 1;} } catch (e) { }"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"TRY\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"BLOCK\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"FUNCTION\"];\n" + " node2 -> node3 [weight=1];\n" + " node4 [label=\"NAME\"];\n" + " node3 -> node4 [weight=1];\n" + " node5 [label=\"PARAM_LIST\"];\n" + " node3 -> node5 [weight=1];\n" + " node6 [label=\"BLOCK\"];\n" + " node3 -> node6 [weight=1];\n" + " node7 [label=\"THROW\"];\n" + " node6 -> node7 [weight=1];\n" + " node8 [label=\"NUMBER\"];\n" + " node7 -> node8 [weight=1];\n" + " node6 -> node7 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node3 -> node6 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node2 -> RETURN [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node9 [label=\"BLOCK\"];\n" + " node1 -> node9 [weight=1];\n" + " node10 [label=\"CATCH\"];\n" + " node9 -> node10 [weight=1];\n" + " node11 [label=\"NAME\"];\n" + " node10 -> node11 [weight=1];\n" + " node12 [label=\"BLOCK\"];\n" + " node10 -> node12 [weight=1];\n" + " node12 -> RETURN [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node10 -> node12 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node9 -> node10 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node1 -> node2 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node1 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } public void testNestedCatch() { // Make sure we are going to the right handler. String src = "try{try{throw 1;}catch(e){throw 2}}catch(f){}"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"TRY\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"BLOCK\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"TRY\"];\n" + " node2 -> node3 [weight=1];\n" + " node4 [label=\"BLOCK\"];\n" + " node3 -> node4 [weight=1];\n" + " node5 [label=\"THROW\"];\n" + " node4 -> node5 [weight=1];\n" + " node6 [label=\"NUMBER\"];\n" + " node5 -> node6 [weight=1];\n" + " node7 [label=\"BLOCK\"];\n" + " node5 -> node7 [label=\"ON_EX\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 -> node5 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node3 -> node7 [weight=1];\n" + " node8 [label=\"CATCH\"];\n" + " node7 -> node8 [weight=1];\n" + " node9 [label=\"NAME\"];\n" + " node8 -> node9 [weight=1];\n" + " node10 [label=\"BLOCK\"];\n" + " node8 -> node10 [weight=1];\n" + " node11 [label=\"THROW\"];\n" + " node10 -> node11 [weight=1];\n" + " node12 [label=\"NUMBER\"];\n" + " node11 -> node12 [weight=1];\n" + " node13 [label=\"BLOCK\"];\n" + " node11 -> node13 [label=\"ON_EX\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node10 -> node11 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node8 -> node10 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node7 -> node8 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node3 -> node4 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node2 -> node3 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node1 -> node13 [weight=1];\n" + " node14 [label=\"CATCH\"];\n" + " node13 -> node14 [weight=1];\n" + " node15 [label=\"NAME\"];\n" + " node14 -> node15 [weight=1];\n" + " node16 [label=\"BLOCK\"];\n" + " node14 -> node16 [weight=1];\n" + " node16 -> RETURN [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node14 -> node16 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node13 -> node14 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node1 -> node2 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node1 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } public void testSimpleFinally() { String src = "try{var x; foo()}finally{}"; ControlFlowGraph<Node> cfg = createCfg(src); assertDownEdge(cfg, Token.TRY, Token.BLOCK, Branch.UNCOND); assertDownEdge(cfg, Token.BLOCK, Token.VAR, Branch.UNCOND); // VAR to FINALLY. assertCrossEdge(cfg, Token.EXPR_RESULT, Token.BLOCK, Branch.UNCOND); // No CATCH to FINALLY. assertNoEdge(cfg, Token.BLOCK, Token.BLOCK); } public void testSimpleCatchFinally() { // Make sure we are going to the right handler. String src = "try{ if(a){throw 1}else{a} } catch(e){a}finally{a}"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"TRY\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"BLOCK\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"IF\"];\n" + " node2 -> node3 [weight=1];\n" + " node4 [label=\"NAME\"];\n" + " node3 -> node4 [weight=1];\n" + " node5 [label=\"BLOCK\"];\n" + " node3 -> node5 [weight=1];\n" + " node6 [label=\"THROW\"];\n" + " node5 -> node6 [weight=1];\n" + " node7 [label=\"NUMBER\"];\n" + " node6 -> node7 [weight=1];\n" + " node8 [label=\"BLOCK\"];\n" + " node6 -> node8 [label=\"ON_EX\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node5 -> node6 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node9 [label=\"BLOCK\"];\n" + " node3 -> node9 [weight=1];\n" + " node10 [label=\"EXPR_RESULT\"];\n" + " node9 -> node10 [weight=1];\n" + " node11 [label=\"NAME\"];\n" + " node10 -> node11 [weight=1];\n" + " node12 [label=\"BLOCK\"];\n" + " node10 -> node12 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node9 -> node10 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node3 -> node5 [label=\"ON_TRUE\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node3 -> node9 [label=\"ON_FALSE\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node2 -> node3 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node1 -> node8 [weight=1];\n" + " node13 [label=\"CATCH\"];\n" + " node8 -> node13 [weight=1];\n" + " node14 [label=\"NAME\"];\n" + " node13 -> node14 [weight=1];\n" + " node15 [label=\"BLOCK\"];\n" + " node13 -> node15 [weight=1];\n" + " node16 [label=\"EXPR_RESULT\"];\n" + " node15 -> node16 [weight=1];\n" + " node17 [label=\"NAME\"];\n" + " node16 -> node17 [weight=1];\n" + " node16 -> node12 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node15 -> node16 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node13 -> node15 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node8 -> node13 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node1 -> node12 [weight=1];\n" + " node18 [label=\"EXPR_RESULT\"];\n" + " node12 -> node18 [weight=1];\n" + " node19 [label=\"NAME\"];\n" + " node18 -> node19 [weight=1];\n" + " node18 -> RETURN [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node12 -> node18 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node1 -> node2 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node1 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } public void testComplicatedFinally2() { // Now the most nasty case..... String src = "while(1){try{" + "if(a){a;continue;}else if(b){b;break;} else if(c) throw 1; else a}" + "catch(e){}finally{c()}bar}foo"; ControlFlowGraph<Node> cfg = createCfg(src); // Focus only on the ON_EX edges. assertCrossEdge(cfg, Token.CONTINUE, Token.BLOCK, Branch.UNCOND); assertCrossEdge(cfg, Token.BREAK, Token.BLOCK, Branch.UNCOND); assertCrossEdge(cfg, Token.THROW, Token.BLOCK, Branch.ON_EX); } public void testDeepNestedBreakwithFinally() { String src = "X:while(1){try{while(2){try{var a;break X;}" + "finally{}}}finally{}}"; ControlFlowGraph<Node> cfg = createCfg(src); assertDownEdge(cfg, Token.WHILE, Token.BLOCK, Branch.ON_TRUE); assertDownEdge(cfg, Token.BLOCK, Token.TRY, Branch.UNCOND); assertDownEdge(cfg, Token.BLOCK, Token.VAR, Branch.UNCOND); // BREAK to FINALLY. assertCrossEdge(cfg, Token.BREAK, Token.BLOCK, Branch.UNCOND); // FINALLY to FINALLY. assertCrossEdge(cfg, Token.BLOCK, Token.BLOCK, Branch.UNCOND); assertCrossEdge(cfg, Token.WHILE, Token.BLOCK, Branch.ON_FALSE); assertReturnEdge(cfg, Token.BLOCK); } public void testDeepNestedFinally() { String src = "try{try{try{throw 1}" + "finally{1;var a}}finally{2;if(a);}}finally{3;a()}"; ControlFlowGraph<Node> cfg = createCfg(src); assertCrossEdge(cfg, Token.THROW, Token.BLOCK, Branch.ON_EX); assertCrossEdge(cfg, Token.VAR, Token.BLOCK, Branch.UNCOND); assertCrossEdge(cfg, Token.IF, Token.BLOCK, Branch.UNCOND); } public void testReturn() { String src = "function f() { return; }"; ControlFlowGraph<Node> cfg = createCfg(src); assertReturnEdge(cfg, Token.RETURN); } public void testReturnInFinally() { String src = "function f(x){ try{} finally {return x;} }"; ControlFlowGraph<Node> cfg = createCfg(src); assertReturnEdge(cfg, Token.RETURN); } public void testReturnInFinally2() { String src = "function f(x){" + " try{ try{}finally{var dummy; return x;} } finally {} }"; ControlFlowGraph<Node> cfg = createCfg(src); assertCrossEdge(cfg, Token.VAR, Token.RETURN, Branch.UNCOND); assertCrossEdge(cfg, Token.RETURN, Token.BLOCK, Branch.UNCOND); assertReturnEdge(cfg, Token.BLOCK); assertNoReturnEdge(cfg, Token.RETURN); } public void testReturnInTry() { String src = "function f(x){ try{x; return x()} finally {} var y;}"; ControlFlowGraph<Node> cfg = createCfg(src); assertCrossEdge(cfg, Token.EXPR_RESULT, Token.RETURN, Branch.UNCOND); assertCrossEdge(cfg, Token.RETURN, Token.BLOCK, Branch.UNCOND); assertCrossEdge(cfg, Token.BLOCK, Token.VAR, Branch.UNCOND); assertReturnEdge(cfg, Token.VAR); assertReturnEdge(cfg, Token.BLOCK); assertNoReturnEdge(cfg, Token.RETURN); } public void testOptionNotToTraverseFunctions() { String src = "var x = 1; function f() { x = null; }"; String expectedWhenNotTraversingFunctions = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"VAR\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"NAME\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"NUMBER\"];\n" + " node2 -> node3 [weight=1];\n" + " node1 -> RETURN " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 [label=\"FUNCTION\"];\n" + " node0 -> node4 [weight=1];\n" + " node5 [label=\"NAME\"];\n" + " node4 -> node5 [weight=1];\n" + " node6 [label=\"PARAM_LIST\"];\n" + " node4 -> node6 [weight=1];\n" + " node7 [label=\"BLOCK\"];\n" + " node4 -> node7 [weight=1];\n" + " node8 [label=\"EXPR_RESULT\"];\n" + " node7 -> node8 [weight=1];\n" + " node9 [label=\"ASSIGN\"];\n" + " node8 -> node9 [weight=1];\n" + " node10 [label=\"NAME\"];\n" + " node9 -> node10 [weight=1];\n" + " node11 [label=\"NULL\"];\n" + " node9 -> node11 [weight=1];\n" + " node0 -> node1 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"VAR\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"NAME\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"NUMBER\"];\n" + " node2 -> node3 [weight=1];\n" + " node1 -> RETURN " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 [label=\"FUNCTION\"];\n" + " node0 -> node4 [weight=1];\n" + " node5 [label=\"NAME\"];\n" + " node4 -> node5 [weight=1];\n" + " node6 [label=\"PARAM_LIST\"];\n" + " node4 -> node6 [weight=1];\n" + " node7 [label=\"BLOCK\"];\n" + " node4 -> node7 [weight=1];\n" + " node8 [label=\"EXPR_RESULT\"];\n" + " node7 -> node8 [weight=1];\n" + " node9 [label=\"ASSIGN\"];\n" + " node8 -> node9 [weight=1];\n" + " node10 [label=\"NAME\"];\n" + " node9 -> node10 [weight=1];\n" + " node11 [label=\"NULL\"];\n" + " node9 -> node11 [weight=1];\n" + " node8 -> RETURN " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node7 -> node8 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node4 -> node7 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node1 " + "[label=\"UNCOND\", fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); testCfg(src, expectedWhenNotTraversingFunctions, false); } public void testInstanceOf() { String src = "try { x instanceof 'x' } catch (e) { }"; ControlFlowGraph<Node> cfg = createCfg(src, true); assertCrossEdge(cfg, Token.EXPR_RESULT, Token.BLOCK, Branch.ON_EX); } public void testSynBlock() { String src = "START(); var x; END(); var y;"; ControlFlowGraph<Node> cfg = createCfg(src, true); assertCrossEdge(cfg, Token.BLOCK, Token.EXPR_RESULT, Branch.SYN_BLOCK); } public void testPartialTraversalOfScope() { Compiler compiler = new Compiler(); ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, true, true); Node script1 = compiler.parseSyntheticCode("cfgtest", "var foo;"); Node script2 = compiler.parseSyntheticCode("cfgtest2", "var bar;"); Node root = new Node(Token.BLOCK, script1, script2); cfa.process(null, script1); ControlFlowGraph<Node> cfg = cfa.getCfg(); assertNotNull(cfg.getNode(script1)); assertNull(cfg.getNode(script2)); } public void testForLoopOrder() { assertNodeOrder( createCfg("for (var i = 0; i < 5; i++) { var x = 3; } if (true) {}"), Lists.newArrayList( Token.SCRIPT, Token.VAR, Token.FOR, Token.BLOCK, Token.VAR, Token.INC /* i++ */, Token.IF, Token.BLOCK)); } public void testLabelledForInLoopOrder() { assertNodeOrder( createCfg("var i = 0; var y = {}; " + "label: for (var x in y) { " + " if (x) { break label; } else { i++ } x(); }"), Lists.newArrayList( Token.SCRIPT, Token.VAR, Token.VAR, Token.NAME, Token.FOR, Token.BLOCK, Token.IF, Token.BLOCK, Token.BREAK, Token.BLOCK, Token.EXPR_RESULT, Token.EXPR_RESULT)); } public void testLocalFunctionOrder() { ControlFlowGraph<Node> cfg = createCfg("function f() { while (x) { x++; } } var x = 3;"); assertNodeOrder( cfg, Lists.newArrayList( Token.SCRIPT, Token.VAR, Token.FUNCTION, Token.BLOCK, Token.WHILE, Token.BLOCK, Token.EXPR_RESULT)); } public void testDoWhileOrder() { assertNodeOrder( createCfg("do { var x = 3; } while (true); void x;"), Lists.newArrayList( Token.SCRIPT, Token.BLOCK, Token.VAR, Token.DO, Token.EXPR_RESULT)); } public void testBreakInFinally1() { String src = "f = function() {\n" + " var action;\n" + " a: {\n" + " var proto = null;\n" + " try {\n" + " proto = new Proto\n" + " } finally {\n" + " action = proto;\n" + " break a\n" + // Remove this... " }\n" + " }\n" + " alert(action)\n" + // but not this. "};"; String expected = "digraph AST {\n" + " node [color=lightblue2, style=filled];\n" + " node0 [label=\"SCRIPT\"];\n" + " node1 [label=\"EXPR_RESULT\"];\n" + " node0 -> node1 [weight=1];\n" + " node2 [label=\"ASSIGN\"];\n" + " node1 -> node2 [weight=1];\n" + " node3 [label=\"NAME\"];\n" + " node2 -> node3 [weight=1];\n" + " node4 [label=\"FUNCTION\"];\n" + " node2 -> node4 [weight=1];\n" + " node5 [label=\"NAME\"];\n" + " node4 -> node5 [weight=1];\n" + " node6 [label=\"PARAM_LIST\"];\n" + " node4 -> node6 [weight=1];\n" + " node7 [label=\"BLOCK\"];\n" + " node4 -> node7 [weight=1];\n" + " node8 [label=\"VAR\"];\n" + " node7 -> node8 [weight=1];\n" + " node9 [label=\"NAME\"];\n" + " node8 -> node9 [weight=1];\n" + " node10 [label=\"LABEL\"];\n" + " node7 -> node10 [weight=1];\n" + " node11 [label=\"LABEL_NAME\"];\n" + " node10 -> node11 [weight=1];\n" + " node12 [label=\"BLOCK\"];\n" + " node10 -> node12 [weight=1];\n" + " node13 [label=\"VAR\"];\n" + " node12 -> node13 [weight=1];\n" + " node14 [label=\"NAME\"];\n" + " node13 -> node14 [weight=1];\n" + " node15 [label=\"NULL\"];\n" + " node14 -> node15 [weight=1];\n" + " node16 [label=\"TRY\"];\n" + " node12 -> node16 [weight=1];\n" + " node17 [label=\"BLOCK\"];\n" + " node16 -> node17 [weight=1];\n" + " node18 [label=\"EXPR_RESULT\"];\n" + " node17 -> node18 [weight=1];\n" + " node19 [label=\"ASSIGN\"];\n" + " node18 -> node19 [weight=1];\n" + " node20 [label=\"NAME\"];\n" + " node19 -> node20 [weight=1];\n" + " node21 [label=\"NEW\"];\n" + " node19 -> node21 [weight=1];\n" + " node22 [label=\"NAME\"];\n" + " node21 -> node22 [weight=1];\n" + " node23 [label=\"BLOCK\"];\n" + " node16 -> node23 [weight=1];\n" + " node24 [label=\"BLOCK\"];\n" + " node16 -> node24 [weight=1];\n" + " node25 [label=\"EXPR_RESULT\"];\n" + " node24 -> node25 [weight=1];\n" + " node26 [label=\"ASSIGN\"];\n" + " node25 -> node26 [weight=1];\n" + " node27 [label=\"NAME\"];\n" + " node26 -> node27 [weight=1];\n" + " node28 [label=\"NAME\"];\n" + " node26 -> node28 [weight=1];\n" + " node29 [label=\"BREAK\"];\n" + " node24 -> node29 [weight=1];\n" + " node30 [label=\"LABEL_NAME\"];\n" + " node29 -> node30 [weight=1];\n" + " node31 [label=\"EXPR_RESULT\"];\n" + " node7 -> node31 [weight=1];\n" + " node32 [label=\"CALL\"];\n" + " node31 -> node32 [weight=1];\n" + " node33 [label=\"NAME\"];\n" + " node32 -> node33 [weight=1];\n" + " node34 [label=\"NAME\"];\n" + " node32 -> node34 [weight=1];\n" + " node1 -> RETURN [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + " node0 -> node1 [label=\"UNCOND\", " + "fontcolor=\"red\", weight=0.01, color=\"red\"];\n" + "}\n"; testCfg(src, expected); } public void testBreakInFinally2() { String src = "var action;\n" + "a: {\n" + " var proto = null;\n" + " try {\n" + " proto = new Proto\n" + " } finally {\n" + " action = proto;\n" + " break a\n" + " }\n" + "}\n" + "alert(action)\n"; ControlFlowGraph<Node> cfg = createCfg(src); assertCrossEdge(cfg, Token.BREAK, Token.EXPR_RESULT, Branch.UNCOND); assertNoEdge(cfg, Token.BREAK, Token.BLOCK); } /** * Asserts the priority order of CFG nodes. * * Checks that the node type of the highest-priority node matches the * first element of the list, the type of the second node matches the * second element of the list, and so on. * * @param cfg The control flow graph. * @param nodeTypes The expected node types, in order. */ private void assertNodeOrder(ControlFlowGraph<Node> cfg, List<Integer> nodeTypes) { List<DiGraphNode<Node, Branch>> cfgNodes = Lists.newArrayList(cfg.getDirectedGraphNodes()); Collections.sort(cfgNodes, cfg.getOptionalNodeComparator(true)); // IMPLICIT RETURN must always be last. Node implicitReturn = cfgNodes.remove(cfgNodes.size() - 1).getValue(); assertNull(implicitReturn == null ? "null" : implicitReturn.toStringTree(), implicitReturn); assertEquals("Wrong number of CFG nodes", nodeTypes.size(), cfgNodes.size()); for (int i = 0; i < cfgNodes.size(); i++) { int expectedType = nodeTypes.get(i); int actualType = cfgNodes.get(i).getValue().getType(); assertEquals( "node type mismatch at " + i + ".\n" + "found : " + Token.name(actualType) + "\n" + "required: " + Token.name(expectedType) + "\n", expectedType, actualType); } } }
apache-2.0
lucaswerkmeister/ceylon.language
runtime/com/redhat/ceylon/compiler/java/runtime/metamodel/FreeClassOrInterface.java
24984
package com.redhat.ceylon.compiler.java.runtime.metamodel; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import ceylon.language.Anything; import ceylon.language.Empty; import ceylon.language.Sequential; import ceylon.language.empty_; import ceylon.language.meta.declaration.OpenType; import ceylon.language.meta.model.ClassOrInterface; import ceylon.language.meta.model.Member; import com.redhat.ceylon.compiler.java.Util; import com.redhat.ceylon.compiler.java.language.ObjectArray.ObjectArrayIterable; import com.redhat.ceylon.compiler.java.metadata.Ceylon; import com.redhat.ceylon.compiler.java.metadata.Ignore; import com.redhat.ceylon.compiler.java.metadata.Name; import com.redhat.ceylon.compiler.java.metadata.Sequenced; import com.redhat.ceylon.compiler.java.metadata.TypeInfo; import com.redhat.ceylon.compiler.java.metadata.TypeParameter; import com.redhat.ceylon.compiler.java.metadata.TypeParameters; import com.redhat.ceylon.compiler.java.metadata.Variance; import com.redhat.ceylon.compiler.java.runtime.model.TypeDescriptor; import com.redhat.ceylon.compiler.typechecker.model.Declaration; import com.redhat.ceylon.compiler.typechecker.model.ProducedReference; import com.redhat.ceylon.compiler.typechecker.model.ProducedType; import com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration; @Ceylon(major = 7) @com.redhat.ceylon.compiler.java.metadata.Class public abstract class FreeClassOrInterface extends FreeNestableDeclaration implements ceylon.language.meta.declaration.ClassOrInterfaceDeclaration, AnnotationBearing { @Ignore public static final TypeDescriptor $TypeDescriptor$ = TypeDescriptor.klass(FreeClassOrInterface.class); @Ignore private static final TypeDescriptor $FunctionTypeDescriptor = TypeDescriptor.klass(ceylon.language.meta.declaration.FunctionDeclaration.class, Anything.$TypeDescriptor$, Empty.$TypeDescriptor$); @Ignore private static final TypeDescriptor $AttributeTypeDescriptor = TypeDescriptor.klass(ceylon.language.meta.declaration.ValueDeclaration.class, Anything.$TypeDescriptor$); @Ignore private static final TypeDescriptor $ClassOrInterfaceTypeDescriptor = TypeDescriptor.klass(ceylon.language.meta.declaration.ClassOrInterfaceDeclaration.class, Anything.$TypeDescriptor$); private volatile boolean initialised = false; private ceylon.language.meta.declaration.OpenClassType superclass; private Sequential<ceylon.language.meta.declaration.OpenInterfaceType> interfaces; private Sequential<? extends ceylon.language.meta.declaration.TypeParameter> typeParameters; private List<ceylon.language.meta.declaration.NestableDeclaration> declaredDeclarations; private List<ceylon.language.meta.declaration.NestableDeclaration> declarations; private Sequential<? extends ceylon.language.meta.declaration.OpenType> caseTypes; public FreeClassOrInterface(com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface declaration) { super(declaration); } @SuppressWarnings({ "unchecked", "rawtypes" }) protected void init(){ com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface declaration = (com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface) this.declaration; ProducedType superType = declaration.getExtendedType(); if(superType != null) this.superclass = (ceylon.language.meta.declaration.OpenClassType) Metamodel.getMetamodel(superType); List<ProducedType> satisfiedTypes = declaration.getSatisfiedTypes(); ceylon.language.meta.declaration.OpenInterfaceType[] interfaces = new ceylon.language.meta.declaration.OpenInterfaceType[satisfiedTypes.size()]; int i=0; for(ProducedType pt : satisfiedTypes){ interfaces[i++] = (ceylon.language.meta.declaration.OpenInterfaceType) Metamodel.getMetamodel(pt); } this.interfaces = Util.sequentialWrapper(ceylon.language.meta.declaration.OpenInterfaceType.$TypeDescriptor$, interfaces); if(declaration.getCaseTypes() != null) this.caseTypes = Metamodel.getMetamodelSequential(declaration.getCaseTypes()); else this.caseTypes = (Sequential<? extends ceylon.language.meta.declaration.OpenType>)(Sequential)empty_.get_(); this.typeParameters = Metamodel.getTypeParameters(declaration); List<com.redhat.ceylon.compiler.typechecker.model.Declaration> memberModelDeclarations = declaration.getMembers(); this.declaredDeclarations = new LinkedList<ceylon.language.meta.declaration.NestableDeclaration>(); for(com.redhat.ceylon.compiler.typechecker.model.Declaration memberModelDeclaration : memberModelDeclarations){ if(isSupportedType(memberModelDeclaration)) declaredDeclarations.add(Metamodel.getOrCreateMetamodel(memberModelDeclaration)); } Collection<com.redhat.ceylon.compiler.typechecker.model.Declaration> inheritedModelDeclarations = collectMembers(declaration); this.declarations = new LinkedList<ceylon.language.meta.declaration.NestableDeclaration>(); for(com.redhat.ceylon.compiler.typechecker.model.Declaration memberModelDeclaration : inheritedModelDeclarations){ if(isSupportedType(memberModelDeclaration)) declarations.add(Metamodel.getOrCreateMetamodel(memberModelDeclaration)); } } private boolean isSupportedType(Declaration memberModelDeclaration) { return memberModelDeclaration instanceof com.redhat.ceylon.compiler.typechecker.model.Value || (memberModelDeclaration instanceof com.redhat.ceylon.compiler.typechecker.model.Method && !((com.redhat.ceylon.compiler.typechecker.model.Method)memberModelDeclaration).isAbstraction()) || memberModelDeclaration instanceof com.redhat.ceylon.compiler.typechecker.model.TypeAlias || memberModelDeclaration instanceof com.redhat.ceylon.compiler.typechecker.model.Interface || (memberModelDeclaration instanceof com.redhat.ceylon.compiler.typechecker.model.Class && !((com.redhat.ceylon.compiler.typechecker.model.Class)memberModelDeclaration).isAbstraction()); } private Collection<com.redhat.ceylon.compiler.typechecker.model.Declaration> collectMembers(com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration base){ Map<String, com.redhat.ceylon.compiler.typechecker.model.Declaration> byName = new HashMap<String, com.redhat.ceylon.compiler.typechecker.model.Declaration>(); collectMembers(base, byName); return byName.values(); } private void collectMembers(com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration base, Map<String, Declaration> byName) { for(com.redhat.ceylon.compiler.typechecker.model.Declaration decl : base.getMembers()){ if(decl.isShared() && com.redhat.ceylon.compiler.typechecker.model.Util.isResolvable(decl)){ Declaration otherDeclaration = byName.get(decl.getName()); if(otherDeclaration == null || decl.refines(otherDeclaration)) byName.put(decl.getName(), decl); } } if(base.getExtendedTypeDeclaration() != null) collectMembers(base.getExtendedTypeDeclaration(), byName); for(com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration st : base.getSatisfiedTypeDeclarations()){ collectMembers(st, byName); } } protected final void checkInit(){ if(!initialised){ synchronized(Metamodel.getLock()){ if(!initialised){ init(); initialised = true; } } } } @Override @TypeInfo("ceylon.language::Sequential<Kind>") @TypeParameters(@TypeParameter(value = "Kind", satisfies = "ceylon.language.meta.declaration::NestableDeclaration")) public <Kind extends ceylon.language.meta.declaration.NestableDeclaration> Sequential<? extends Kind> memberDeclarations(@Ignore TypeDescriptor $reifiedKind) { Predicates.Predicate<?> predicate = Predicates.isDeclarationOfKind($reifiedKind); return filteredMembers($reifiedKind, predicate); } @Override @TypeInfo("ceylon.language::Sequential<Kind>") @TypeParameters(@TypeParameter(value = "Kind", satisfies = "ceylon.language.meta.declaration::NestableDeclaration")) public <Kind extends ceylon.language.meta.declaration.NestableDeclaration> Sequential<? extends Kind> declaredMemberDeclarations(@Ignore TypeDescriptor $reifiedKind) { Predicates.Predicate<?> predicate = Predicates.isDeclarationOfKind($reifiedKind); return filteredDeclaredMembers($reifiedKind, predicate); } @Override @TypeInfo("ceylon.language::Sequential<Kind>") @TypeParameters({ @TypeParameter(value = "Kind", satisfies = "ceylon.language.meta.declaration::NestableDeclaration"), @TypeParameter(value = "Annotation", satisfies = "ceylon.language::Annotation") }) public <Kind extends ceylon.language.meta.declaration.NestableDeclaration, Annotation extends ceylon.language.Annotation> Sequential<? extends Kind> annotatedMemberDeclarations(@Ignore TypeDescriptor $reifiedKind, @Ignore TypeDescriptor $reifiedAnnotation) { Predicates.Predicate<?> predicate = Predicates.and( Predicates.isDeclarationOfKind($reifiedKind), Predicates.isDeclarationAnnotatedWith($reifiedAnnotation)); return filteredMembers($reifiedKind, predicate); } @Override @TypeInfo("ceylon.language::Sequential<Kind>") @TypeParameters({ @TypeParameter(value = "Kind", satisfies = "ceylon.language.meta.declaration::NestableDeclaration"), @TypeParameter(value = "Annotation", satisfies = "ceylon.language::Annotation") }) public <Kind extends ceylon.language.meta.declaration.NestableDeclaration, Annotation extends ceylon.language.Annotation> Sequential<? extends Kind> annotatedDeclaredMemberDeclarations(@Ignore TypeDescriptor $reifiedKind, @Ignore TypeDescriptor $reifiedAnnotation) { Predicates.Predicate<?> predicate = Predicates.and( Predicates.isDeclarationOfKind($reifiedKind), Predicates.isDeclarationAnnotatedWith($reifiedAnnotation)); return filteredDeclaredMembers($reifiedKind, predicate); } @SuppressWarnings({ "rawtypes", "unchecked" }) private <Kind> Sequential<? extends Kind> filteredMembers( @Ignore TypeDescriptor $reifiedKind, Predicates.Predicate predicate) { if (predicate == Predicates.false_()) { return (Sequential<? extends Kind>)empty_.get_(); } checkInit(); ArrayList<Kind> members = new ArrayList<Kind>(declarations.size()); for(ceylon.language.meta.declaration.NestableDeclaration decl : declarations){ if (predicate.accept(((FreeNestableDeclaration)decl).declaration)) { members.add((Kind) decl); } } java.lang.Object[] array = members.toArray(new java.lang.Object[0]); ObjectArrayIterable<Kind> iterable = new ObjectArrayIterable<Kind>($reifiedKind, (Kind[]) array); return (ceylon.language.Sequential) iterable.sequence(); } @SuppressWarnings({ "rawtypes", "unchecked" }) private <Kind> Sequential<? extends Kind> filteredDeclaredMembers( @Ignore TypeDescriptor $reifiedKind, Predicates.Predicate predicate) { if (predicate == Predicates.false_()) { return (Sequential<? extends Kind>)empty_.get_(); } checkInit(); ArrayList<Kind> members = new ArrayList<Kind>(declarations.size()); for(ceylon.language.meta.declaration.NestableDeclaration decl : declaredDeclarations){ if (predicate.accept(((FreeNestableDeclaration)decl).declaration)) { members.add((Kind) decl); } } java.lang.Object[] array = members.toArray(new java.lang.Object[0]); ObjectArrayIterable<Kind> iterable = new ObjectArrayIterable<Kind>($reifiedKind, (Kind[]) array); return (ceylon.language.Sequential) iterable.sequence(); } @SuppressWarnings({ "rawtypes", "unchecked" }) private <Kind> Kind filteredMember( @Ignore TypeDescriptor $reifiedKind, Predicates.Predicate predicate) { if (predicate == Predicates.false_()) { return null; } checkInit(); for(ceylon.language.meta.declaration.NestableDeclaration decl : declarations){ if (predicate.accept(((FreeNestableDeclaration)decl).declaration)) { return (Kind)decl; } } return null; } @SuppressWarnings({ "rawtypes", "unchecked" }) private <Kind> Kind filteredDeclaredMember( @Ignore TypeDescriptor $reifiedKind, Predicates.Predicate predicate) { if (predicate == Predicates.false_()) { return null; } checkInit(); for(ceylon.language.meta.declaration.NestableDeclaration decl : declaredDeclarations){ if (predicate.accept(((FreeNestableDeclaration)decl).declaration)) { return (Kind)decl; } } return null; } @Override @TypeInfo("Kind") @TypeParameters(@TypeParameter(value = "Kind", satisfies = "ceylon.language.meta.declaration::NestableDeclaration")) public <Kind extends ceylon.language.meta.declaration.NestableDeclaration> Kind getMemberDeclaration(@Ignore TypeDescriptor $reifiedKind, @Name("name") String name) { Predicates.Predicate<?> predicate = Predicates.and( Predicates.isDeclarationNamed(name), Predicates.isDeclarationOfKind($reifiedKind) ); return filteredMember($reifiedKind, predicate); } @Override @TypeInfo("Kind") @TypeParameters(@TypeParameter(value = "Kind", satisfies = "ceylon.language.meta.declaration::NestableDeclaration")) public <Kind extends ceylon.language.meta.declaration.NestableDeclaration> Kind getDeclaredMemberDeclaration(@Ignore TypeDescriptor $reifiedKind, @Name("name") String name) { Predicates.Predicate<?> predicate = Predicates.and( Predicates.isDeclarationNamed(name), Predicates.isDeclarationOfKind($reifiedKind) ); return filteredDeclaredMember($reifiedKind, predicate); } @Override @TypeInfo("ceylon.language::Sequential<ceylon.language.meta.declaration::OpenInterfaceType>") public Sequential<? extends ceylon.language.meta.declaration.OpenInterfaceType> getSatisfiedTypes() { checkInit(); return interfaces; } @Override @TypeInfo("ceylon.language.meta.declaration::OpenClassType|ceylon.language::Null") public ceylon.language.meta.declaration.OpenClassType getExtendedType() { checkInit(); return superclass; } @TypeInfo("ceylon.language::Sequential<ceylon.language.meta.declaration::OpenType>") @Override public ceylon.language.Sequential<? extends ceylon.language.meta.declaration.OpenType> getCaseTypes(){ checkInit(); return caseTypes; } @Override @TypeInfo("ceylon.language::Sequential<ceylon.language.meta.declaration::TypeParameter>") public Sequential<? extends ceylon.language.meta.declaration.TypeParameter> getTypeParameterDeclarations() { checkInit(); return typeParameters; } @Override public boolean getIsAlias(){ return ((com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface)declaration).isAlias(); } @Override public OpenType getOpenType() { return Metamodel.getMetamodel(((com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface)declaration).getType()); } @Override @TypeInfo("ceylon.language.meta.declaration::TypeParameter|ceylon.language::Null") public ceylon.language.meta.declaration.TypeParameter getTypeParameterDeclaration(@Name("name") String name) { return Metamodel.findDeclarationByName(getTypeParameterDeclarations(), name); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Ignore @Override public <Type> ceylon.language.meta.model.ClassOrInterface<Type> apply(@Ignore TypeDescriptor $reifiedType){ return apply($reifiedType, (Sequential)empty_.get_()); } @SuppressWarnings("unchecked") @Override @TypeInfo("ceylon.language.meta.model::ClassOrInterface<Type>") @TypeParameters({ @TypeParameter("Type"), }) public <Type extends Object> ceylon.language.meta.model.ClassOrInterface<Type> apply(@Ignore TypeDescriptor $reifiedType, @Name("typeArguments") @TypeInfo("ceylon.language::Sequential<ceylon.language.meta.model::Type<ceylon.language::Anything>>") @Sequenced Sequential<? extends ceylon.language.meta.model.Type<?>> typeArguments){ if(!getToplevel()) throw new ceylon.language.meta.model.TypeApplicationException("Cannot apply a member declaration with no container type: use memberApply"); List<com.redhat.ceylon.compiler.typechecker.model.ProducedType> producedTypes = Metamodel.getProducedTypes(typeArguments); Metamodel.checkTypeArguments(null, declaration, producedTypes); com.redhat.ceylon.compiler.typechecker.model.ProducedReference appliedType = declaration.getProducedReference(null, producedTypes); Metamodel.checkReifiedTypeArgument("apply", "ClassOrInterface<$1>", Variance.OUT, appliedType.getType(), $reifiedType); return (ClassOrInterface<Type>) Metamodel.getAppliedMetamodel(appliedType.getType()); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Ignore @Override public <Container, Type extends Object> java.lang.Object memberApply(TypeDescriptor $reifiedContainer, TypeDescriptor $reifiedType, ceylon.language.meta.model.Type<? extends Container> containerType){ return this.<Container, Type>memberApply($reifiedContainer, $reifiedType, containerType, (Sequential)empty_.get_()); } @SuppressWarnings("rawtypes") @TypeInfo("ceylon.language.meta.model::Member<Container,ceylon.language.meta.model::ClassOrInterface<Type>>&ceylon.language.meta.model::ClassOrInterface<Type>") @TypeParameters({ @TypeParameter("Container"), @TypeParameter("Type"), }) @Override public <Container, Type extends Object> java.lang.Object memberApply( @Ignore TypeDescriptor $reifiedContainer, @Ignore TypeDescriptor $reifiedType, @Name("containerType") ceylon.language.meta.model.Type<? extends Container> containerType, @Name("typeArguments") @Sequenced Sequential<? extends ceylon.language.meta.model.Type<?>> typeArguments){ if(getToplevel()) throw new ceylon.language.meta.model.TypeApplicationException("Cannot apply a toplevel declaration to a container type: use apply"); ceylon.language.meta.model.Member<? extends Container, ceylon.language.meta.model.ClassOrInterface<?>> member = getAppliedClassOrInterface(null, null, typeArguments, containerType); // This is all very ugly but we're trying to make it cheaper and friendlier than just checking the full type and showing // implementation types to the user, such as AppliedMemberClass TypeDescriptor actualReifiedContainer; if(member instanceof AppliedMemberClass) actualReifiedContainer = ((AppliedMemberClass)member).$reifiedContainer; else actualReifiedContainer = ((AppliedMemberInterface)member).$reifiedContainer; ProducedType actualType = Metamodel.getModel((ceylon.language.meta.model.Type<?>) member); Metamodel.checkReifiedTypeArgument("memberApply", "Member<$1,ClassOrInterface<$2>>&ClassOrInterface<$2>", Variance.IN, Metamodel.getProducedType(actualReifiedContainer), $reifiedContainer, Variance.OUT, actualType, $reifiedType); return member; } @SuppressWarnings("unchecked") <Container, Kind extends ceylon.language.meta.model.ClassOrInterface<? extends Object>> ceylon.language.meta.model.Member<Container, Kind> getAppliedClassOrInterface(@Ignore TypeDescriptor $reifiedContainer, @Ignore TypeDescriptor $reifiedKind, Sequential<? extends ceylon.language.meta.model.Type<?>> types, ceylon.language.meta.model.Type<Container> container){ List<com.redhat.ceylon.compiler.typechecker.model.ProducedType> producedTypes = Metamodel.getProducedTypes(types); ProducedType qualifyingType = Metamodel.getModel(container); Metamodel.checkQualifyingType(qualifyingType, declaration); Metamodel.checkTypeArguments(qualifyingType, declaration, producedTypes); // find the proper qualifying type ProducedType memberQualifyingType = qualifyingType.getSupertype((TypeDeclaration) declaration.getContainer()); ProducedReference producedReference = declaration.getProducedReference(memberQualifyingType, producedTypes); final ProducedType appliedType = producedReference.getType(); return (Member<Container, Kind>) Metamodel.getAppliedMetamodel(appliedType); } @Override @Ignore public TypeDescriptor $getType$() { return $TypeDescriptor$; } FreeFunction findMethod(String name) { FreeNestableDeclaration decl = this.findDeclaration(null, name); return decl instanceof FreeFunction ? (FreeFunction)decl : null; } FreeFunction findDeclaredMethod(String name) { FreeNestableDeclaration decl = this.findDeclaredDeclaration(null, name); return decl instanceof FreeFunction ? (FreeFunction)decl : null; } FreeValue findValue(String name) { FreeNestableDeclaration decl = this.findDeclaration(null, name); return decl instanceof FreeValue ? (FreeValue)decl : null; } FreeValue findDeclaredValue(String name) { FreeNestableDeclaration decl = this.findDeclaredDeclaration(null, name); return decl instanceof FreeValue ? (FreeValue)decl : null; } FreeClassOrInterface findType(String name) { FreeNestableDeclaration decl = this.findDeclaration(null, name); return decl instanceof FreeClassOrInterface ? (FreeClassOrInterface)decl : null; } FreeClassOrInterface findDeclaredType(String name) { FreeNestableDeclaration decl = this.findDeclaredDeclaration(null, name); return decl instanceof FreeClassOrInterface ? (FreeClassOrInterface)decl : null; } <T extends FreeNestableDeclaration> T findDeclaration(@Ignore TypeDescriptor $reifiedT, String name) { checkInit(); return findDeclaration($reifiedT, name, declarations); } <T extends FreeNestableDeclaration> T findDeclaredDeclaration(@Ignore TypeDescriptor $reifiedT, String name) { checkInit(); return findDeclaration($reifiedT, name, declaredDeclarations); } @SuppressWarnings("unchecked") <T extends FreeNestableDeclaration> T findDeclaration(@Ignore TypeDescriptor $reifiedT, String name, List<ceylon.language.meta.declaration.NestableDeclaration> declarations) { for(ceylon.language.meta.declaration.NestableDeclaration decl : declarations){ // skip anonymous types which can't be looked up by name if(decl instanceof ceylon.language.meta.declaration.ClassDeclaration && ((ceylon.language.meta.declaration.ClassDeclaration) decl).getAnonymous()) continue; // in theory we can't have several members with the same name so no need to check the type // FIXME: interop and overloading if(decl.getName().equals(name)) return (T) decl; } return null; } @Override @Ignore public java.lang.annotation.Annotation[] $getJavaAnnotations$() { return Metamodel.getJavaClass(declaration).getAnnotations(); } }
apache-2.0
jexp/idea2
plugins/groovy/src/org/jetbrains/plugins/groovy/lang/psi/impl/javaView/GroovyClassFinder.java
1600
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.lang.psi.impl.javaView; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElementFinder; import com.intellij.psi.search.GlobalSearchScope; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.psi.impl.GroovyPsiManager; /** * @author ven */ public class GroovyClassFinder extends PsiElementFinder { private final GroovyPsiManager myGroovyPsiManager; public GroovyClassFinder(final GroovyPsiManager groovyPsiManager) { myGroovyPsiManager = groovyPsiManager; } @Nullable public PsiClass findClass(@NotNull String qualifiedName, @NotNull GlobalSearchScope scope) { return myGroovyPsiManager.getNamesCache().getClassByFQName(qualifiedName, scope); } @NotNull public PsiClass[] findClasses(@NotNull String qualifiedName, @NotNull GlobalSearchScope scope) { return myGroovyPsiManager.getNamesCache().getClassesByFQName(qualifiedName, scope); } }
apache-2.0
lioutasb/CSCartApp
app/src/main/java/gr/plushost/prototypeapp/activities/FullScreenImageActivity.java
3003
package gr.plushost.prototypeapp.activities; import android.content.res.Configuration; import android.os.Bundle; import android.support.v4.view.ViewPager; import android.support.v7.app.AppCompatActivity; import android.widget.TextView; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import java.util.ArrayList; import java.util.List; import gr.plushost.prototypeapp.R; import gr.plushost.prototypeapp.adapters.viewpagers.FullScreenImageAdapter; import gr.plushost.prototypeapp.indicators.UnderlinePageIndicator; /** * Created by Billiout on 10/8/2014. */ public class FullScreenImageActivity extends AppCompatActivity { ViewPager viewPager; UnderlinePageIndicator underlinePageIndicator; TextView photosRem; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_fullscreen_image); viewPager = (ViewPager) findViewById(R.id.pagerFull); underlinePageIndicator = (UnderlinePageIndicator) findViewById(R.id.indicatorimgFull); underlinePageIndicator.setFades(false); String jsonMyObject; Bundle extras = getIntent().getExtras(); jsonMyObject = extras.getString("img_urls"); final ArrayList<String> messages = new Gson().fromJson(jsonMyObject, new TypeToken<List<String>>() { }.getType()); int pos = extras.getInt("pos"); FullScreenImageAdapter adapter = new FullScreenImageAdapter(this, messages); photosRem = (TextView) findViewById(R.id.photosSum); photosRem.setText(String.format(getResources().getString(R.string.full_image_promo_text_count_photos), pos + 1, messages.size())); viewPager.setAdapter(adapter); viewPager.setCurrentItem(pos); if (messages.size() > 1) { underlinePageIndicator.setViewPager(viewPager, pos); underlinePageIndicator.setOnPageChangeListener(new ViewPager.OnPageChangeListener() { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { } @Override public void onPageSelected(int position) { photosRem.setText(String.format(getResources().getString(R.string.full_image_promo_text_count_photos), position + 1, messages.size())); } @Override public void onPageScrollStateChanged(int state) { } }); } } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); setContentView(R.layout.activity_fullscreen_image); } @Override public void onBackPressed() { super.onBackPressed(); overridePendingTransition(R.anim.slide_in_left, R.anim.slide_out_right); } }
apache-2.0
faulerich/java_pft
rest-sample/src/test/java/ru/stqa/pft/rest/TestBase.java
2118
package ru.stqa.pft.rest; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonParser; import com.google.gson.reflect.TypeToken; import org.apache.http.client.fluent.Executor; import org.apache.http.client.fluent.Request; import org.testng.SkipException; import java.io.IOException; import java.util.Set; /** * Created by Bond on 12.07.2017. */ public class TestBase { private Object issueType; private String issueStatus; private JsonElement testdata; public boolean isIssueOpened(int issueId) throws IOException { String status = issueStatus(issueId); if (!status.equals("Closed")) { return true; } return false; } public void skipIfNotFixed(int issueId) throws IOException { if (isIssueOpened(issueId)) { throw new SkipException("Ignored because of issue " + issueId + " was not fixed"); } } public Executor getExecutor() { //авторизуемся на сервере api return Executor.newInstance().auth("LSGjeU4yP1X493ud1hNniA==", ""); } //статус тикета по id public String issueStatus(int id) throws IOException { //авторизуемся и отправляем запрос на один тикет String json = getExecutor().execute(Request.Get("http://demo.bugify.com/api/issues/" + id + ".json")) .returnContent().asString(); //узнаем статус тикета /* распарсиваем json-ответ от сервера */ //получаем json-элемент JsonElement parsed = new JsonParser().parse(json); //извлекаем из него по ключу нужную часть JsonElement issues = parsed.getAsJsonObject().get("issues"); //преобразуем полученный элемент в множество объектов типа Issue Set<Issue> issue = new Gson().fromJson(issues, new TypeToken<Set<Issue>>(){}.getType()); issueStatus = issue.iterator().next().getState_name(); //issueStatus = issue.iterator().next().getDescription(); return issueStatus; } }
apache-2.0
peter-gergely-horvath/kylo
services/feed-manager-service/feed-manager-controller/src/main/java/com/thinkbiganalytics/feedmgr/rest/controller/NifiIntegrationRestController.java
21171
package com.thinkbiganalytics.feedmgr.rest.controller; /*- * #%L * thinkbig-feed-manager-controller * %% * Copyright (C) 2017 ThinkBig Analytics * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.google.common.collect.ImmutableMap; import com.thinkbiganalytics.discovery.schema.TableSchema; import com.thinkbiganalytics.feedmgr.nifi.CleanupStaleFeedRevisions; import com.thinkbiganalytics.feedmgr.nifi.DBCPConnectionPoolTableInfo; import com.thinkbiganalytics.feedmgr.nifi.NifiConnectionService; import com.thinkbiganalytics.feedmgr.nifi.PropertyExpressionResolver; import com.thinkbiganalytics.feedmgr.security.FeedServicesAccessControl; import com.thinkbiganalytics.feedmgr.service.template.FeedManagerTemplateService; import com.thinkbiganalytics.nifi.rest.client.LegacyNifiRestClient; import com.thinkbiganalytics.nifi.rest.client.NiFiRestClient; import com.thinkbiganalytics.nifi.rest.client.layout.AlignNiFiComponents; import com.thinkbiganalytics.nifi.rest.client.layout.AlignProcessGroupComponents; import com.thinkbiganalytics.nifi.rest.model.NiFiClusterSummary; import com.thinkbiganalytics.nifi.rest.model.NiFiPropertyDescriptorTransform; import com.thinkbiganalytics.nifi.rest.model.flow.NifiFlowDeserializer; import com.thinkbiganalytics.nifi.rest.model.flow.NifiFlowProcessGroup; import com.thinkbiganalytics.rest.model.RestResponseStatus; import com.thinkbiganalytics.security.AccessController; import com.thinkbiganalytics.spring.SpringEnvironmentProperties; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.web.api.dto.ControllerServiceDTO; import org.apache.nifi.web.api.dto.DocumentedTypeDTO; import org.apache.nifi.web.api.dto.PortDTO; import org.apache.nifi.web.api.dto.ProcessGroupDTO; import org.apache.nifi.web.api.entity.ControllerServiceTypesEntity; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.ResourceBundle; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.inject.Inject; import javax.ws.rs.BadRequestException; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.NotFoundException; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import io.swagger.annotations.SwaggerDefinition; import io.swagger.annotations.Tag; @Api(tags = "Feed Manager - NiFi", produces = "application/json") @Path(NifiIntegrationRestController.BASE) @Component @SwaggerDefinition(tags = @Tag(name = "Feed Manager - NiFi", description = "integration with NiFi")) public class NifiIntegrationRestController { private static final Logger log = LoggerFactory.getLogger(NifiIntegrationRestController.class); /** * Messages for the default locale */ private static final ResourceBundle STRINGS = ResourceBundle.getBundle("com.thinkbiganalytics.feedmgr.rest.controller.NiFiIntegrationMessages"); public static final String BASE = "/v1/feedmgr/nifi"; public static final String FLOWS = "/flows"; public static final String REUSABLE_INPUT_PORTS = "/reusable-input-ports"; @Inject DBCPConnectionPoolTableInfo dbcpConnectionPoolTableInfo; @Inject FeedManagerTemplateService feedManagerTemplateService; @Inject NiFiPropertyDescriptorTransform propertyDescriptorTransform; @Inject NifiConnectionService nifiConnectionService; /** * Legacy NiFi REST client */ @Inject private LegacyNifiRestClient legacyNifiRestClient; /** * New NiFi REST client */ @Inject private NiFiRestClient nifiRestClient; @Inject private SpringEnvironmentProperties environmentProperties; @Inject private AccessController accessController; @GET @Path("/auto-align/{processGroupId}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Organizes the components of the specified process group.") @ApiResponses( @ApiResponse(code = 200, message = "The result of the operation.", response = RestResponseStatus.class) ) public Response autoAlign(@PathParam("processGroupId") String processGroupId) { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ADMIN_FEEDS); RestResponseStatus status; if ("all".equals(processGroupId)) { AlignNiFiComponents alignNiFiComponents = new AlignNiFiComponents(); alignNiFiComponents.setNiFiRestClient(legacyNifiRestClient.getNiFiRestClient()); alignNiFiComponents.autoLayout(); String message = ""; if (alignNiFiComponents.isAligned()) { message = "Aligned All of NiFi. " + alignNiFiComponents.getAlignedProcessGroups() + " process groups were aligned "; } else { message = "Alignment failed while attempting to align all of NiFi. " + alignNiFiComponents.getAlignedProcessGroups() + " were successfully aligned. Please look at the logs for more information"; } status = new RestResponseStatus.ResponseStatusBuilder().message(message).buildSuccess(); } else { AlignProcessGroupComponents alignProcessGroupComponents = new AlignProcessGroupComponents(legacyNifiRestClient.getNiFiRestClient(), processGroupId); ProcessGroupDTO alignedGroup = alignProcessGroupComponents.autoLayout(); String message = ""; if (alignProcessGroupComponents.isAligned()) { message = "Aligned " + alignedGroup.getContents().getProcessGroups().size() + " process groups under " + alignedGroup.getName(); } else { message = "Alignment failed for process group " + processGroupId + ". Please look at the logs for more information"; } status = new RestResponseStatus.ResponseStatusBuilder().message(message).buildSuccess(); } return Response.ok(status).build(); } @GET @Path("/cleanup-versions/{processGroupId}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Performs a cleanup of the specified process group.", notes = "This method will list all of the child process groups and delete the ones where the name matches the regular expression: .* - \\d{13}") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the number of process groups deleted.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "The process group is unavailable.", response = RestResponseStatus.class) }) public Response cleanupVersionedProcessGroups(@PathParam("processGroupId") String processGroupId) { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ADMIN_FEEDS); RestResponseStatus status; CleanupStaleFeedRevisions cleanupStaleFeedRevisions = new CleanupStaleFeedRevisions(legacyNifiRestClient, processGroupId, propertyDescriptorTransform); cleanupStaleFeedRevisions.cleanup(); String msg = "Cleaned up " + cleanupStaleFeedRevisions.getDeletedProcessGroups().size() + " Process Groups"; status = new RestResponseStatus.ResponseStatusBuilder().message(msg).buildSuccess(); return Response.ok(status).build(); } @GET @Path("/flow/{processGroupId}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the flow of the specified process group.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the flow.", response = NifiFlowProcessGroup.class), @ApiResponse(code = 500, message = "The process group is unavailable.", response = RestResponseStatus.class) }) public Response getFlow(@PathParam("processGroupId") String processGroupId) { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ADMIN_FEEDS); NifiFlowProcessGroup flow = legacyNifiRestClient.getFeedFlow(processGroupId); NifiFlowDeserializer.prepareForSerialization(flow); return Response.ok(flow).build(); } @GET @Path("/flow/feed/{categoryAndFeedName}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the flow of the specified feed.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the flow.", response = NifiFlowProcessGroup.class), @ApiResponse(code = 500, message = "The process group is unavailable.", response = RestResponseStatus.class) }) public Response getFlowForCategoryAndFeed(@PathParam("categoryAndFeedName") String categoryAndFeedName) { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ADMIN_FEEDS); NifiFlowProcessGroup flow = legacyNifiRestClient.getFeedFlowForCategoryAndFeed(categoryAndFeedName); NifiFlowDeserializer.prepareForSerialization(flow); return Response.ok(flow).build(); } //walk entire graph @GET @Path(FLOWS) @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets a list of all flows.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the flows.", response = NifiFlowProcessGroup.class, responseContainer = "List"), @ApiResponse(code = 500, message = "NiFi is unavailable.", response = RestResponseStatus.class) }) public Response getFlows() { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ADMIN_FEEDS); List<NifiFlowProcessGroup> feedFlows = legacyNifiRestClient.getFeedFlows(); if (feedFlows != null) { log.info("********************** getAllFlows ({})", feedFlows.size()); feedFlows.stream().forEach(group -> NifiFlowDeserializer.prepareForSerialization(group)); } return Response.ok(feedFlows).build(); } @GET @Path("/configuration/properties") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Gets user properties for NiFi.", notes = "These are the properties beginning with 'config.' in the application.properties file.") @ApiResponses( @ApiResponse(code = 200, message = "Returns the user properties.", response = Map.class) ) public Response getFeeds() { Map<String, Object> properties = environmentProperties.getPropertiesStartingWith(PropertyExpressionResolver.configPropertyPrefix); if (properties == null) { properties = new HashMap<>(); } return Response.ok(properties).build(); } @GET @Path(REUSABLE_INPUT_PORTS) @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the input ports to reusable templates.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the input ports.", response = PortDTO.class, responseContainer = "Set"), @ApiResponse(code = 500, message = "NiFi is unavailable.", response = RestResponseStatus.class) }) public Response getReusableFeedInputPorts() { Set<PortDTO> ports = feedManagerTemplateService.getReusableFeedInputPorts(); return Response.ok(ports).build(); } /** * Finds controller services of the specified type. * * @param processGroupId the process group id * @param type the type to match * @return the list of matching controller services */ @GET @Path("/controller-services/process-group/{processGroupId}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Finds controller services of the specified type.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the matching controller services.", response = ControllerServiceDTO.class, responseContainer = "Set"), @ApiResponse(code = 400, message = "The type cannot be empty.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "The process group cannot be found.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "The process group is unavailable.", response = RestResponseStatus.class) }) public Response getControllerServices(@Nonnull @PathParam("processGroupId") final String processGroupId, @Nullable @QueryParam("type") final String type) { // Verify parameters if (StringUtils.isBlank(processGroupId)) { throw new NotFoundException(STRINGS.getString("getControllerServices.missingProcessGroup")); } if (StringUtils.isBlank(type)) { throw new BadRequestException(STRINGS.getString("getControllerServices.missingType")); } // Determine allowed service types final Stream<String> subTypes = nifiRestClient.controllerServices().getTypes(type).stream().map(DocumentedTypeDTO::getType); final Set<String> allowedTypes = Stream.concat(Stream.of(type), subTypes).collect(Collectors.toSet()); // Filter controller services final Set<ControllerServiceDTO> controllerServices = ("all".equalsIgnoreCase(processGroupId) || "root".equalsIgnoreCase(processGroupId)) ? nifiRestClient.processGroups().getControllerServices("root") : nifiRestClient.processGroups().getControllerServices(processGroupId); final Set<ControllerServiceDTO> matchingControllerServices = controllerServices.stream() .filter(controllerService -> allowedTypes.contains(controllerService.getType())) .collect(Collectors.toSet()); return Response.ok(matchingControllerServices).build(); } @GET @Path("/controller-services") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets a list of available controller services.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the controller services.", response = ControllerServiceDTO.class, responseContainer = "Set"), @ApiResponse(code = 500, message = "NiFi is unavailable.", response = RestResponseStatus.class) }) public Response getServices() { final Set<ControllerServiceDTO> controllerServices = legacyNifiRestClient.getControllerServices(); return Response.ok(ImmutableMap.of("controllerServices", controllerServices)).build(); } @GET @Path("/controller-services/types") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets a list of the available controller service types.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the controller service types.", response = ControllerServiceTypesEntity.class), @ApiResponse(code = 500, message = "NiFi is unavailable.", response = RestResponseStatus.class) }) public Response getServiceTypes() { final ControllerServiceTypesEntity entity = new ControllerServiceTypesEntity(); entity.setControllerServiceTypes(legacyNifiRestClient.getControllerServiceTypes()); return Response.ok(entity).build(); } @GET @Path("/controller-services/{serviceId}/tables") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Gets a list of table names from the specified database.", notes = "Connects to the database specified by the controller service using the password defined in Kylo's application.properties file.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the table names.", response = String.class, responseContainer = "List"), @ApiResponse(code = 500, message = "Nifi or the database are unavailable.", response = RestResponseStatus.class) }) public Response getTableNames(@PathParam("serviceId") String serviceId, @QueryParam("serviceName") @DefaultValue("") String serviceName, @QueryParam("schema") String schema, @QueryParam("tableName") String tableName) { log.info("Query for Table Names against service: {}({})", serviceName, serviceId); List<String> tables = dbcpConnectionPoolTableInfo.getTableNamesForControllerService(serviceId, serviceName, schema, tableName); return Response.ok(tables).build(); } @GET @Path("/controller-services/{serviceId}/tables/{tableName}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Gets the schema of the specified table.", notes = "Connects to the database specified by the controller service using the password defined in Kylo's application.properties file.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the table schema.", response = TableSchema.class), @ApiResponse(code = 500, message = "Nifi or the database are unavailable.", response = RestResponseStatus.class) }) public Response describeTable(@PathParam("serviceId") String serviceId, @PathParam("tableName") String tableName, @QueryParam("serviceName") @DefaultValue("") String serviceName, @QueryParam("schema") String schema) { log.info("Describe Table {} against service: {}({})", tableName, serviceName, serviceId); TableSchema tableSchema = dbcpConnectionPoolTableInfo.describeTableForControllerService(serviceId, serviceName, schema, tableName); return Response.ok(tableSchema).build(); } @GET @Path("/controller-services/{serviceId}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Gets a controller service.", notes = "returns a Nifi controller service object by the supplied identifier") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the controller service.", response = ControllerServiceDTO.class), @ApiResponse(code = 500, message = "Unable to find the controller service", response = RestResponseStatus.class) }) public Response getControllerService(@PathParam("serviceId") String serviceId) { try { final ControllerServiceDTO controllerService = legacyNifiRestClient.getControllerService(null, serviceId); return Response.ok(controllerService).build(); } catch (Exception e) { RestResponseStatus error = new RestResponseStatus.ResponseStatusBuilder().message("Unable to find controller service for " + serviceId).buildError(); return Response.ok(error).build(); } } /** * Gets the NiFi cluster status. * * @return the cluster summary */ @GET @Path("/cluster/summary") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the status of the NiFi cluster.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the cluster status.", response = NiFiClusterSummary.class), @ApiResponse(code = 500, message = "NiFi is unavailable.", response = RestResponseStatus.class) }) public Response getClusterSummary() { final NiFiClusterSummary clusterSummary = nifiRestClient.clusterSummary(); return Response.ok(clusterSummary).build(); } /** * Checks to see if NiFi is up and running * * @return true if running, false if not */ @GET @Path("/running") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the status of the NiFi cluster.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the status of NiFi if its running or not"), @ApiResponse(code = 500, message = "An error occurred accessing the NiFi status.", response = RestResponseStatus.class) }) public Response getRunning() { boolean isRunning = nifiConnectionService.isNiFiRunning(); return Response.ok(isRunning).build(); } }
apache-2.0
tjkrell/MrGEO
mrgeo-vector/src/main/java/org/mrgeo/paint/AdditiveComposite.java
2054
/* * Copyright 2009-2014 DigitalGlobe, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package org.mrgeo.paint; import java.awt.*; import java.awt.image.ColorModel; import java.awt.image.Raster; import java.awt.image.WritableRaster; /** * @author jason.surratt * */ public class AdditiveComposite implements Composite { private class AdditiveCompositeContext implements CompositeContext { /* * (non-Javadoc) * * @see java.awt.CompositeContext#compose(java.awt.image.Raster, * java.awt.image.Raster, java.awt.image.WritableRaster) */ @Override public void compose(Raster src, Raster dstIn, WritableRaster dstOut) { int minX = dstOut.getMinX(); int minY = dstOut.getMinY(); int maxX = minX + dstOut.getWidth(); int maxY = minY + dstOut.getHeight(); for (int y = minY; y < maxY; y++) { for (int x = minX; x < maxX; x++) { dstOut.setSample(x, y, 0, src.getSample(x, y, 0) + dstIn.getSample(x, y, 0)); } } } /* * (non-Javadoc) * * @see java.awt.CompositeContext#dispose() */ @Override public void dispose() { } } /* * (non-Javadoc) * * @see java.awt.Composite#createContext(java.awt.image.ColorModel, * java.awt.image.ColorModel, java.awt.RenderingHints) */ @Override public CompositeContext createContext(ColorModel srcColorModel, ColorModel dstColorModel, RenderingHints hints) { return new AdditiveCompositeContext(); } }
apache-2.0
rhusar/radargun
core/src/main/java/org/radargun/stages/cache/background/PrivateLogLogic.java
10691
package org.radargun.stages.cache.background; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.radargun.Operation; import org.radargun.stages.helpers.Range; import org.radargun.traits.BasicOperations; /** * This logic operates on {@link PrivateLogValue private log values} using only {@link BasicOperations}, * specifically put, get and remove operations. Private log values are written to by single thread. * * @author Radim Vansa &lt;rvansa@redhat.com&gt; */ class PrivateLogLogic extends AbstractLogLogic<PrivateLogValue> { // Timestamps of the last writes into given values. As we can get stale read for some period, // we cannot overwrite the value again until we can be sure that we can safely read current // value of that entry. We have to keep the timestamps here, as we cannot reliably determine // the last write timestamp from cache (because any value read could be stale). // However, we still can't wait the timeout when we decide to overwrite an entry - // due to the birthday paradox, this happens very often. We have to find out ourselves whether // the read was stale or not. // Note that this does not cause any problems with SharedLogLogic since all updates there are // conditional; if stale value is read, the conditional operation will fail (stale read must // not happen during the condition verification). private final Map<Long, OperationTimestampPair> timestamps = new HashMap<>(); // Keys modified during current transaction, should be recorded to timestamps when // the transaction is committed private final Collection<KeyOperationPair> txModifications = new ArrayList<>(Math.max(0, transactionSize)); PrivateLogLogic(BackgroundOpsManager manager, Range range) { super(manager, range); } @Override protected boolean invokeLogic(long keyId) throws Exception { Operation operation = getOperation(operationTypeRandom); OperationTimestampPair prevOperation = timestamps.get(keyId); // first we have to get the value PrivateLogValue prevValue = checkedGetValue(keyId); PrivateLogValue backupValue = null; if (prevOperation != null) { if (prevValue == null || !prevValue.contains(prevOperation.operationId)) { // non-cleaned old value or stale read, try backup backupValue = checkedGetValue(~keyId); if (backupValue == null || !backupValue.contains(prevOperation.operationId)) { // definitely stale read log.trace("Detected stale read, keyId: " + keyId); waitForStaleRead(prevOperation.timestamp); return false; } else { // pretend that we haven't read it at all prevValue = null; } } } // now for modify operations, execute it if (prevValue == null || operation == BasicOperations.PUT) { PrivateLogValue nextValue; if (prevValue != null) { nextValue = getNextValue(prevValue); } else { // the value may have been removed, look for backup if (backupValue == null) { backupValue = checkedGetValue(~keyId); } if (backupValue == null) { nextValue = new PrivateLogValue(stressor.id, operationId); } else { nextValue = getNextValue(backupValue); } } if (nextValue == null) { return false; } checkedPutValue(keyId, nextValue); if (backupValue != null) { delayedRemoveValue(~keyId, backupValue); } } else if (operation == BasicOperations.REMOVE) { PrivateLogValue nextValue = getNextValue(prevValue); if (nextValue == null) { return false; } checkedPutValue(~keyId, nextValue); delayedRemoveValue(keyId, prevValue); } else { // especially GETs are not allowed here, because these would break the deterministic order // - each operationId must be written somewhere throw new UnsupportedOperationException("Only PUT and REMOVE operations are allowed for this logic."); } if (transactionSize > 0) { txModifications.add(new KeyOperationPair(keyId, operationId)); } else { long now = System.currentTimeMillis(); timestamps.put(keyId, new OperationTimestampPair(operationId, now)); log.tracef("Operation %d on %08X finished at %d", operationId, keyId, now); } return true; } private void waitForStaleRead(long lastWriteTimestamp) throws InterruptedException { long writeApplyMaxDelay = manager.getLogLogicConfiguration().writeApplyMaxDelay; if (writeApplyMaxDelay > 0) { long now = System.currentTimeMillis(); if (lastWriteTimestamp > now - writeApplyMaxDelay){ log.debugf("Last write of %08X was at %d, waiting 5 seconds to evade stale reads", keyId); Thread.sleep(5000); } } else { manager.getStressorRecordPool().reportStaleRead(); stressor.requestTerminate(); } } @Override protected void afterRollback() { super.afterRollback(); txModifications.clear(); } @Override protected void afterCommit() { super.afterCommit(); long now = System.currentTimeMillis(); for (KeyOperationPair pair : txModifications) { timestamps.put(pair.keyId, new OperationTimestampPair(pair.operationId, now)); log.tracef("Operation %d on %08X finished at %d", pair.operationId, pair.keyId, now); } txModifications.clear(); } private PrivateLogValue getNextValue(PrivateLogValue prevValue) throws InterruptedException, BreakTxRequest { if (prevValue.size() >= manager.getLogLogicConfiguration().getValueMaxSize()) { int checkedValues; // TODO some limit after which the stressor will terminate for (;;) { if (stressor.isInterrupted() || stressor.isTerminated()) { return null; } long minReadOperationId; try { minReadOperationId = getCheckedOperation(stressor.id, prevValue.getOperationId(0)); } catch (StressorException e) { return null; } if (prevValue.getOperationId(0) <= minReadOperationId) { for (checkedValues = 1; checkedValues < prevValue.size() && prevValue.getOperationId(checkedValues) <= minReadOperationId; ++checkedValues) { log.tracef("Discarding operation %d (minReadOperationId is %d)", prevValue.getOperationId(checkedValues), minReadOperationId); } break; } else { try { Thread.sleep(100); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return null; } } } return prevValue.shift(checkedValues, operationId); } else { return prevValue.with(operationId); } } private PrivateLogValue checkedGetValue(long keyId) throws Exception { DelayedRemove removed = delayedRemoves.get(keyId); if (removed != null) { return null; } Object prevValue; long startTime = System.nanoTime(); try { prevValue = basicCache.get(keyGenerator.generateKey(keyId)); } catch (Exception e) { stressor.stats.registerError(System.nanoTime() - startTime, BasicOperations.GET); throw e; } long endTime = System.nanoTime(); if (prevValue != null && !(prevValue instanceof PrivateLogValue)) { stressor.stats.registerError(endTime - startTime, BasicOperations.GET); log.error("Value is not an instance of PrivateLogValue: " + prevValue); throw new IllegalStateException(); } else { stressor.stats.registerRequest(endTime - startTime, prevValue == null ? GET_NULL : BasicOperations.GET); return (PrivateLogValue) prevValue; } } @Override protected boolean checkedRemoveValue(long keyId, PrivateLogValue expectedValue) throws Exception { Object prevValue; long startTime = System.nanoTime(); try { // Note: with Infinspan, the returned value is sometimes unreliable anyway prevValue = basicCache.getAndRemove(keyGenerator.generateKey(keyId)); } catch (Exception e) { stressor.stats.registerError(System.nanoTime() - startTime, BasicOperations.REMOVE); throw e; } long endTime = System.nanoTime(); boolean successful = false; if (prevValue != null) { if (!(prevValue instanceof PrivateLogValue)) { log.error("Value is not an instance of PrivateLogValue: " + prevValue); } else if (!prevValue.equals(expectedValue)) { log.error("Value is not the expected one: expected=" + expectedValue + ", found=" + prevValue); } else { successful = true; } } else if (expectedValue == null) { successful = true; } else { log.error("Expected to remove " + expectedValue + " but found " + prevValue); } if (successful) { stressor.stats.registerRequest(endTime - startTime, BasicOperations.REMOVE); return true; } else { stressor.stats.registerError(endTime - startTime, BasicOperations.REMOVE); throw new IllegalStateException(); } } private void checkedPutValue(long keyId, PrivateLogValue value) throws Exception { long startTime = System.nanoTime(); try { basicCache.put(keyGenerator.generateKey(keyId), value); } catch (Exception e) { stressor.stats.registerError(System.nanoTime() - startTime, BasicOperations.PUT); throw e; } long endTime = System.nanoTime(); stressor.stats.registerRequest(endTime - startTime, BasicOperations.PUT); } protected static class OperationTimestampPair { public final long operationId; public final long timestamp; public OperationTimestampPair(long operationId, long timestamp) { this.operationId = operationId; this.timestamp = timestamp; } } protected static class KeyOperationPair { public final long keyId; public final long operationId; public KeyOperationPair(long keyId, long operationId) { this.keyId = keyId; this.operationId = operationId; } } }
apache-2.0
K7Gt/java_pft
addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/tests/ContactAddressEmailTests.java
2685
package ru.stqa.pft.addressbook.tests; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import ru.stqa.pft.addressbook.model.ContactData; import java.util.Arrays; import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; /** * Created by Golem on 08.04.2017. */ public class ContactAddressEmailTests extends TestBase { @BeforeMethod public void ensurePreconditions(){ app.gotTo().homePage(); if(app.contact().all().size() == 0){ app.gotTo().addNewPage(); app.contact().create(new ContactData() .withContactName("testname") .withContactMiddleName("testmiddlename") .withContactLastName("testlastname") .withContactNickname("test") .withContactTitle("testtitle") .withContactCompany("testcompany") .withContactCompanyAddress("testaddressoftestcompany") .withContactHomePhone("111") .withContactMobilePhone("2222") .withContactWorkPhone("33333") .withContactEmail1("test1@gmail.com") .withContactEmail2("test2@gmail.com") .withContactEmail3("test3@gmail.com") .withContactHomepage("test.com")); app.gotTo().homePage(); } } @Test public void testContactEmail(){ app.gotTo().homePage(); ContactData contact = app.contact().all().iterator().next(); ContactData contactInfoFormEditForm = app.contact().infoFromEditForm(contact); assertThat(contact.getAllEmails(), equalTo(mergeEmails(contactInfoFormEditForm))); } @Test public void testContactAddress(){ app.gotTo().homePage(); ContactData contact = app.contact().all().iterator().next(); ContactData contactInfoFormEditForm = app.contact().infoFromEditForm(contact); assertThat(cleaned(contact.getContactCompanyAddress()), equalTo(cleaned(contactInfoFormEditForm.getContactCompanyAddress()))); } private String mergeEmails(ContactData contactInfoFormEditForm) { return Arrays.asList(contactInfoFormEditForm.getContactEmail1(),contactInfoFormEditForm.getContactEmail2(),contactInfoFormEditForm.getContactEmail3()) .stream().filter((s -> !s.equals(""))) .collect(Collectors.joining("\n")); } public static String cleaned(String string){ return string.replaceAll("\\s","") .replaceAll("\n",""); } }
apache-2.0
saikrishna321/java-client
src/test/java/io/appium/java_client/touch/TouchOptionsTests.java
4126
package io.appium.java_client.touch; import static io.appium.java_client.touch.FailsWithMatcher.failsWith; import static io.appium.java_client.touch.LongPressOptions.longPressOptions; import static io.appium.java_client.touch.TapOptions.tapOptions; import static io.appium.java_client.touch.WaitOptions.waitOptions; import static io.appium.java_client.touch.offset.ElementOption.element; import static io.appium.java_client.touch.offset.PointOption.point; import static java.time.Duration.ofMillis; import static java.time.Duration.ofSeconds; import static junit.framework.TestCase.fail; import static org.hamcrest.CoreMatchers.everyItem; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.isIn; import io.appium.java_client.touch.offset.ElementOption; import io.appium.java_client.touch.offset.PointOption; import org.junit.Test; import org.openqa.selenium.Point; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebElement; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class TouchOptionsTests { private static final WebElement DUMMY_ELEMENT = new DummyElement(); @Test(expected = IllegalArgumentException.class) public void invalidEmptyPointOptionsShouldFailOnBuild() { new PointOption().build(); fail("The exception throwing was expected"); } @Test(expected = IllegalArgumentException.class) public void invalidEmptyElementOptionsShouldFailOnBuild() { new ElementOption().build(); fail("The exception throwing was expected"); } @Test public void invalidOptionsArgumentsShouldFailOnAltering() { final List<Runnable> invalidOptions = new ArrayList<>(); invalidOptions.add(() -> waitOptions(ofMillis(-1))); invalidOptions.add(() -> new ElementOption().withCoordinates(new Point(0, 0)).withElement(null)); invalidOptions.add(() -> new WaitOptions().withDuration(null)); invalidOptions.add(() -> tapOptions().withTapsCount(-1)); invalidOptions.add(() -> longPressOptions().withDuration(null)); invalidOptions.add(() -> longPressOptions().withDuration(ofMillis(-1))); for (Runnable item : invalidOptions) { assertThat(item, failsWith(RuntimeException.class)); } } @Test public void longPressOptionsShouldBuildProperly() { final Map<String, Object> actualOpts = longPressOptions() .withElement(element(DUMMY_ELEMENT).withCoordinates(0, 0)) .withDuration(ofMillis(1)) .build(); final Map<String, Object> expectedOpts = new HashMap<>(); expectedOpts.put("element", ((RemoteWebElement) DUMMY_ELEMENT).getId()); expectedOpts.put("x", 0); expectedOpts.put("y", 0); expectedOpts.put("duration", 1L); assertThat(actualOpts.entrySet(), everyItem(isIn(expectedOpts.entrySet()))); assertThat(expectedOpts.entrySet(), everyItem(isIn(actualOpts.entrySet()))); } @Test public void tapOptionsShouldBuildProperly() { final Map<String, Object> actualOpts = tapOptions() .withPosition(point(new Point(0, 0))) .withTapsCount(2) .build(); final Map<String, Object> expectedOpts = new HashMap<>(); expectedOpts.put("x", 0); expectedOpts.put("y", 0); expectedOpts.put("count", 2); assertThat(actualOpts.entrySet(), everyItem(isIn(expectedOpts.entrySet()))); assertThat(expectedOpts.entrySet(), everyItem(isIn(actualOpts.entrySet()))); } @Test public void waitOptionsShouldBuildProperly() { final Map<String, Object> actualOpts = new WaitOptions() .withDuration(ofSeconds(1)) .build(); final Map<String, Object> expectedOpts = new HashMap<>(); expectedOpts.put("ms", 1000L); assertThat(actualOpts.entrySet(), everyItem(isIn(expectedOpts.entrySet()))); assertThat(expectedOpts.entrySet(), everyItem(isIn(actualOpts.entrySet()))); } }
apache-2.0
v1ta/DeepGreen
app/src/main/java/com/defritza/control/Chess.java
18301
package com.defritza.control; import android.util.Log; import com.defritza.model.Bishop; import com.defritza.model.Board; import com.defritza.model.Enpassant; import com.defritza.model.King; import com.defritza.model.Knight; import com.defritza.model.Pawn; import com.defritza.model.Piece; import com.defritza.model.Player; import com.defritza.model.Queen; import com.defritza.model.Rook; import com.defritza.util.Location; import java.io.Serializable; import java.util.ArrayList; import java.util.StringTokenizer; /** * View * @author Joseph, Chris */ public class Chess implements ChessRules, Serializable { private static final long serialVersionUID = 1; private int turns = 0; private boolean whiteCheck = false; private boolean blackCheck = false; private boolean canMove = true; private boolean drawFlag = false; private Player playerWhite; private Player playerBlack; public Chess(Player playerWhite, Player playerBlack) { this.playerWhite = playerWhite; this.playerBlack = playerBlack; } public Player playerTurn() { if (turns % 2 == 0) { return this.playerWhite; } else { return playerBlack; } } public void turn() { turns++; } public void undo() { turns--; } public boolean whiteCheck() { return whiteCheck; } public boolean blackCheck() { return blackCheck; } public boolean drawFlag() { return this.drawFlag; } public void toggleDrawFlag() { this.drawFlag = !this.drawFlag; } public void play(Board board, Player playerWhite, Player playerBlack) { while (canMove) { if (turns % 2 == 0) { System.out.print("White's move: "); } else { System.out.print("Black's move: "); } if (turns % 2 == 0) { if (movePiece(board.getPiece(new Location(-1, -1)), board, new Location(-1, -1), playerWhite)) { turns++; } else { System.out.println("Illegal move, try again"); continue; } } else { if (movePiece(board.getPiece(new Location(-1, -1)), board, new Location(-1, -1), playerBlack)) { turns++; } else { System.out.println("Illegal move, try again"); continue; } } calculateMoves(board); if (blackCheck()) { if (checkMate(board, playerBlack)) { System.out.println("Checkmate\n\nWhite wins\n"); break; } System.out.println("Check"); } else if (whiteCheck()) { if (checkMate(board, playerWhite)) { System.out.println("Checkmate\n\nBlack wins\n"); break; } System.out.println("Check\n"); } else { if (turns % 2 == 0) { if (stalemate(board, playerWhite)) { System.out.println("\nStalemate\n\nDraw"); break; } } else { if (stalemate(board, playerBlack)) { System.out.println("\nStalemate\n\nDraw"); break; } } } if (drawFlag()) { toggleDrawFlag(); } } } public void calculateMoves(Board board) { whiteCheck = false; blackCheck = false; Piece[][] pieces = board.getBoard(); int iOff; int jOff; for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { if (pieces[i][j] == null) { continue; } Piece piece = pieces[i][j]; if (piece instanceof Enpassant) { if (piece.getTurns() == 1) { board.nukeCell(piece.getPos()); } else { piece.incrementTurn(); } continue; } piece.resetValidMoves(); int[][] vectors = piece.getMoveSet(); //iterate through move-set unit vectors for (int index = 0; index < vectors.length; index++) { int moveDist = piece.getMoves(); if (piece instanceof Pawn) { if (piece.atStart()) { moveDist++; } } iOff = piece.getPos().getI(); jOff = piece.getPos().getJ(); //iterate max length of moves for (int offset = 0; offset < moveDist; offset++) { iOff += vectors[index][0]; jOff += vectors[index][1]; if ((iOff >= 0 && iOff <= 7) && (jOff >= 0 && jOff <= 7)) { if (board.isEmpty(iOff, jOff)) { piece.addValidMove(new Location(iOff, jOff)); } else if (!(piece.getOwner().equals(board.getPiece( new Location(iOff, jOff)).getOwner()))) { if (piece instanceof Pawn) { break; } piece.addValidMove(new Location(iOff, jOff)); if (board.getPiece(new Location(iOff, jOff)) instanceof King && !(piece instanceof King)) { if (board.getPiece(new Location(iOff, jOff)) .getOwner().equals("White")) { whiteCheck = true; } else { blackCheck = true; } } break; } else { break; //friendly piece } } } } if (piece instanceof Pawn) { int[][] sVectors = piece.getSMoveSet(); int sMoveDist = piece.getMoves(); for (int index = 0; index < sVectors.length; index++) { iOff = piece.getPos().getI(); jOff = piece.getPos().getJ(); for (int offset = 0; offset < sMoveDist; offset++) { iOff += sVectors[index][0]; jOff += sVectors[index][1]; if ((iOff >= 0 && iOff <= 7) && (jOff >= 0 && jOff <= 7)) { if (!board.isEmpty(iOff, jOff) || board.isEnpassant(iOff, jOff)) { if (!(piece.getOwner().equals(board.getPiece(new Location(iOff, jOff)).getOwner()))) { //enemy piece, break (need to add checking mate checking) piece.addValidMove(new Location(iOff, jOff)); if (board.getPiece(new Location(iOff, jOff)) instanceof King) { if (board.getPiece(new Location(iOff, jOff)).getOwner().equals("White")) { whiteCheck = true; } else { blackCheck = true; } } break; } else { //friendly piece break; } } } } } } if (piece.atStart()) { if (piece instanceof King) { int[][] sVectors = piece.getSMoveSet(); int sMoveDist = piece.getMoves() + 1; for (int index = 0; index < sVectors.length; index++) { iOff = piece.getPos().getI(); jOff = piece.getPos().getJ(); for (int offset = 0; offset < sMoveDist; offset++) { iOff += sVectors[index][0]; jOff += sVectors[index][1]; if ((iOff >= 0 && iOff <= 7) && (jOff >= 0 && jOff <= 6)) { if (board.isEmpty(iOff, jOff)) { if (offset == 1) { //right if (!board.isEmpty(iOff, jOff + sVectors[index][1])) { if ((piece.getOwner().equals(board.getPiece( new Location(iOff, jOff + sVectors[index][1])).getOwner())) && (board.getPiece(new Location(iOff, jOff + sVectors[index][1])) instanceof Rook) && (board.getPiece(new Location(iOff, jOff + sVectors[index][1])).atStart())) { piece.addValidMove(new Location(iOff, jOff)); } } //left if ((iOff >= 0 && iOff <= 7) && ((jOff + 2 * sVectors[index][1]) >= 0 && (jOff + 2 * sVectors[index][1]) <= 7)) { if (!board.isEmpty(iOff, jOff + 2 * sVectors[index][1])) { if ((piece.getOwner().equals(board.getPiece(new Location(iOff, jOff + 2 * sVectors[index][1])).getOwner())) && (board.getPiece(new Location(iOff, jOff + 2 * sVectors[index][1])) instanceof Rook) && (board.getPiece(new Location(iOff, jOff + 2 * sVectors[index][1])).atStart()) && (board.isEmpty(iOff, jOff + sVectors[index][1]))) { piece.addValidMove(new Location(iOff, jOff)); break; } } } } } } else { break; } } } } } } } } public boolean movePiece(Piece piece, Board board, Location toMove, Player player) { if (piece == null) { return false; } else if (!(piece.getOwner().equals(player.toString()))) { return false; } Board tempB = new Board(board); Piece temp = null; if (piece instanceof Pawn) { temp = new Pawn(piece); } else if (piece instanceof Rook) { temp = new Rook(piece); } else if (piece instanceof Knight) { temp = new Knight(piece); } else if (piece instanceof Bishop) { temp = new Bishop(piece); } else if (piece instanceof Queen) { temp = new Queen(piece); } else { temp = new King(piece); } boolean validMoveFlag = false; ArrayList<Location> validMoves = piece.getValidMoves(); if (validMoves == null) { return false; //piece has no valid moves } for (Location validMove : validMoves) { if (toMove.equals(validMove)) { validMoveFlag = true; break; } } if (!validMoveFlag) { return false; } if (tempB != null && temp != null) { tempB.updateBoard(temp, toMove); calculateMoves(tempB); if (player.toString().equals("White") && whiteCheck) { return false; } else if (blackCheck && player.toString().equals("Black")) { return false; } } //check for enemy piece if (!board.isEmpty(toMove.getI(), toMove.getJ()) || board.isEnpassant(toMove.getI(), toMove.getJ())) { if (board.getPiece(toMove) instanceof Enpassant && piece instanceof Pawn) { Location temp1 = board.getPiece(toMove).getGhost(); player.capturePiece(board.getPiece(temp1)); board.getPiece(temp1).kill(); board.updateBoard(piece, toMove); board.nukeCell(temp1); return true; } else { Log.d("IN MOVE PIECE", "PIECE: " + board.getPiece(toMove) + " LOCATION: " + toMove); player.capturePiece(board.getPiece(toMove)); board.getPiece(toMove).kill(); board.updateBoard(piece, toMove); return true; } } else { board.updateBoard(piece, toMove); return true; } } public boolean checkMate(Board board, Player player) { String target = ""; if (player.toString().equals("White")) { target = "wK"; } else { target = "bK"; } Piece kingInCheck = null; //find king in check; outerloop: // lol jump statements for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { kingInCheck = board.getPiece(new Location(i, j)); if (kingInCheck != null) { if (kingInCheck.toString().equals(target)) { break outerloop; } } } } outerloop2: //lol jump statements again for (int i = 0; i < 8; i++) {//check if available king moves won't put the king in check for (int j = 0; j < 8; j++) { Piece piece = board.getPiece(new Location(i, j)); if (piece == null) { continue; } else { assert kingInCheck != null; if (piece.getOwner().equals(kingInCheck.getOwner())) { continue; //if piece is identical, +1 on loop } } ArrayList<Location> kingMoves = kingInCheck.getValidMoves(); ArrayList<Location> pieceMoves = piece.getValidMoves(); if (pieceMoves.size() == 0) { continue; //if no valid moves on enemy piece, +1 on loop } if (kingMoves.size() == 0) { break outerloop2; } outerloop3: // I swear I don't use jump statements this much! for (int x = 0; x < kingMoves.size(); x++) { for (int z = 0; z < pieceMoves.size(); z++) { if ((pieceMoves.get(z).getI() == kingMoves.get(x).getI()) && (pieceMoves.get(z).getJ() == kingMoves.get(x).getJ())) { kingInCheck.delMove(kingMoves.get(x)); if (x >= kingMoves.size()) { break outerloop3; } } } } } } for (int i = 0; i < 8; i++) {//check is friendly pieces can remove check for (int j = 0; j < 8; j++) { Piece piece = board.getPiece(new Location(i, j)); if (piece == null || piece instanceof Enpassant) { continue; } else { assert kingInCheck != null; if (!(piece.getOwner().equals(kingInCheck.getOwner()))) { continue; } } ArrayList<Location> moves = piece.getValidMoves(); if (moves.size() == 0) { continue; } for (int e = 0; e < moves.size(); e++) { Piece temp = null; if (piece instanceof Pawn) { temp = new Pawn(piece); } else if (piece instanceof Rook) { temp = new Rook(piece); } else if (piece instanceof Knight) { temp = new Knight(piece); } else if (piece instanceof Bishop) { temp = new Bishop(piece); } else if (piece instanceof Queen) { temp = new Queen(piece); } else { temp = new King(piece); } Board tempBoard = new Board(board); tempBoard.updateBoard(temp, moves.get(e)); calculateMoves(tempBoard); if (player.toString().equals("White")) { if (!whiteCheck) { return false; } } else { if (!blackCheck) { return false; } } } Piece testKing = new King(kingInCheck); ArrayList<Location> kingMoves2 = testKing.getValidMoves(); for (int r = 0; r < kingMoves2.size(); r++) { Board tempBoard = new Board(board); boolean canMove = false; try { canMove = movePiece(testKing, tempBoard, kingMoves2.get(r), player); } catch (NullPointerException e) { // Not proud of this. } if (!canMove) { kingInCheck.delMove(kingMoves2.get(r)); } } } } assert kingInCheck != null; if (kingInCheck.getValidMoves().size() == 0) { return true; } else { System.out.println("king has moves"); return false; } } public ArrayList<Location> parseInput(String input) { ArrayList<String> userCommands = new ArrayList<>(); ArrayList<Location> moves = new ArrayList<>(); StringTokenizer tk = new StringTokenizer(input); for (int i = 0; tk.hasMoreTokens(); i++) { userCommands.add(tk.nextToken()); if (userCommands.get(i).length() == 2) { if (!Character.isLetter(userCommands.get(i).charAt(1))) { userCommands.add(userCommands.get(i).substring(1)); userCommands.set(i, userCommands.get(i).substring(0, 1)); i++; } } } if (userCommands.size() == 4 || userCommands.size() == 5) { for (int i = 0; i < 4; i += 2) { if (!((int) userCommands.get(i).charAt(0) - 97 >= 0 && (int) userCommands.get(i).charAt(0) - 97 < 8)) { System.err.println("Error: please enter a valid board location, i.e. \"e5\""); return null; } moves.add(new Location(8 - Integer.parseInt(userCommands.get(i + 1)), ((int) userCommands.get(i).charAt(0) - 97))); } if (userCommands.size() == 5) { if (!userCommands.get(4).equalsIgnoreCase("draw?")) { System.err.println("Error: The only accepted command following board locations is \"draw?\""); return null; } else { moves.add(new Location(0, 0)); } } return moves; } else if (userCommands.size() == 1) { if (userCommands.get(0).equals("resign")) { moves.add(new Location(0, 0)); return moves; } else if (userCommands.get(0).equals("draw")) { moves.add(new Location(1, 1)); return moves; } } System.err.println("Error: Improper input, please use <piece location> <location to move> [\"draw?\"]"); return null; } public boolean stalemate(Board board, Player player) { for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { Piece piece = board.getPiece(new Location(i, j)); if (piece == null) { continue; } else if (!(piece.getOwner().equals(player.toString()))) { continue; } ArrayList<Location> moves = piece.getValidMoves(); for (Location test : moves) { Piece temp = null; if (piece instanceof Pawn) { temp = new Pawn(piece); } else if (piece instanceof Rook) { temp = new Rook(piece); } else if (piece instanceof Knight) { temp = new Knight(piece); } else if (piece instanceof Bishop) { temp = new Bishop(piece); } else if (piece instanceof Queen) { temp = new Queen(piece); } else { temp = new King(piece); } Board tempBoard = new Board(board); tempBoard.updateBoard(temp, test); calculateMoves(tempBoard); if (player.toString().equals("White")) { if (!whiteCheck) { return false; } } else { if (!blackCheck) { return false; } } } } } return true; } }
apache-2.0
benjchristensen/RxJava
src/main/java/io/reactivex/processors/SerializedProcessor.java
5584
/** * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.processors; import org.reactivestreams.*; import io.reactivex.internal.util.*; import io.reactivex.plugins.RxJavaPlugins; /** * Serializes calls to the Subscriber methods. * <p>All other Publisher and Subject methods are thread-safe by design. * * @param <T> the item value type */ /* public */ final class SerializedProcessor<T> extends FlowableProcessor<T> { /** The actual subscriber to serialize Subscriber calls to. */ final FlowableProcessor<T> actual; /** Indicates an emission is going on, guarded by this. */ boolean emitting; /** If not null, it holds the missed NotificationLite events. */ AppendOnlyLinkedArrayList<Object> queue; /** Indicates a terminal event has been received and all further events will be dropped. */ volatile boolean done; /** * Constructor that wraps an actual subject. * @param actual the subject wrapped */ SerializedProcessor(final FlowableProcessor<T> actual) { this.actual = actual; } @Override protected void subscribeActual(Subscriber<? super T> s) { actual.subscribe(s); } @Override public void onSubscribe(Subscription s) { boolean cancel; if (!done) { synchronized (this) { if (done) { cancel = true; } else { if (emitting) { AppendOnlyLinkedArrayList<Object> q = queue; if (q == null) { q = new AppendOnlyLinkedArrayList<Object>(4); queue = q; } q.add(NotificationLite.subscription(s)); return; } emitting = true; cancel = false; } } } else { cancel = true; } if (cancel) { s.cancel(); } else { actual.onSubscribe(s); emitLoop(); } } @Override public void onNext(T t) { if (done) { return; } synchronized (this) { if (done) { return; } if (emitting) { AppendOnlyLinkedArrayList<Object> q = queue; if (q == null) { q = new AppendOnlyLinkedArrayList<Object>(4); queue = q; } q.add(NotificationLite.next(t)); return; } emitting = true; } actual.onNext(t); emitLoop(); } @Override public void onError(Throwable t) { if (done) { RxJavaPlugins.onError(t); return; } boolean reportError; synchronized (this) { if (done) { reportError = true; } else { done = true; if (emitting) { AppendOnlyLinkedArrayList<Object> q = queue; if (q == null) { q = new AppendOnlyLinkedArrayList<Object>(4); queue = q; } q.setFirst(NotificationLite.error(t)); return; } reportError = false; emitting = true; } } if (reportError) { RxJavaPlugins.onError(t); return; } actual.onError(t); } @Override public void onComplete() { if (done) { return; } synchronized (this) { if (done) { return; } done = true; if (emitting) { AppendOnlyLinkedArrayList<Object> q = queue; if (q == null) { q = new AppendOnlyLinkedArrayList<Object>(4); queue = q; } q.add(NotificationLite.complete()); return; } emitting = true; } actual.onComplete(); } /** Loops until all notifications in the queue has been processed. */ void emitLoop() { for (;;) { AppendOnlyLinkedArrayList<Object> q; synchronized (this) { q = queue; if (q == null) { emitting = false; return; } queue = null; } q.accept(actual); } } @Override public boolean hasSubscribers() { return actual.hasSubscribers(); } @Override public boolean hasThrowable() { return actual.hasThrowable(); } @Override public Throwable getThrowable() { return actual.getThrowable(); } @Override public boolean hasComplete() { return actual.hasComplete(); } }
apache-2.0
azurvii/misc
gwtcrypto/com/googlecode/gwt/crypto/bouncycastle/crypto/Digest.java
1364
package com.googlecode.gwt.crypto.bouncycastle.crypto; /** * interface that a message digest conforms to. */ public interface Digest { /** * return the algorithm name * * @return the algorithm name */ public String getAlgorithmName(); /** * return the size, in bytes, of the digest produced by this message digest. * * @return the size, in bytes, of the digest produced by this message digest. */ public int getDigestSize(); /** * update the message digest with a single byte. * * @param in * the input byte to be entered. */ public void update(byte in); /** * update the message digest with a block of bytes. * * @param in * the byte array containing the data. * @param inOff * the offset into the byte array where the data starts. * @param len * the length of the data. */ public void update(byte[] in, int inOff, int len); /** * close the digest, producing the final digest value. The doFinal call leaves the digest reset. * * @param out * the array the digest is to be copied into. * @param outOff * the offset into the out array the digest is to start at. */ public int doFinal(byte[] out, int outOff); /** * reset the digest back to it's initial state. */ public void reset(); }
apache-2.0
googleapis/java-certificate-manager
proto-google-cloud-certificate-manager-v1/src/main/java/com/google/cloud/certificatemanager/v1/UpdateCertificateRequest.java
35932
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/certificatemanager/v1/certificate_manager.proto package com.google.cloud.certificatemanager.v1; /** * * * <pre> * Request for the `UpdateCertificate` method. * </pre> * * Protobuf type {@code google.cloud.certificatemanager.v1.UpdateCertificateRequest} */ public final class UpdateCertificateRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.certificatemanager.v1.UpdateCertificateRequest) UpdateCertificateRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateCertificateRequest.newBuilder() to construct. private UpdateCertificateRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateCertificateRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateCertificateRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UpdateCertificateRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.certificatemanager.v1.Certificate.Builder subBuilder = null; if (certificate_ != null) { subBuilder = certificate_.toBuilder(); } certificate_ = input.readMessage( com.google.cloud.certificatemanager.v1.Certificate.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(certificate_); certificate_ = subBuilder.buildPartial(); } break; } case 18: { com.google.protobuf.FieldMask.Builder subBuilder = null; if (updateMask_ != null) { subBuilder = updateMask_.toBuilder(); } updateMask_ = input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(updateMask_); updateMask_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.certificatemanager.v1.CertificateManagerProto .internal_static_google_cloud_certificatemanager_v1_UpdateCertificateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.certificatemanager.v1.CertificateManagerProto .internal_static_google_cloud_certificatemanager_v1_UpdateCertificateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.class, com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.Builder.class); } public static final int CERTIFICATE_FIELD_NUMBER = 1; private com.google.cloud.certificatemanager.v1.Certificate certificate_; /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the certificate field is set. */ @java.lang.Override public boolean hasCertificate() { return certificate_ != null; } /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The certificate. */ @java.lang.Override public com.google.cloud.certificatemanager.v1.Certificate getCertificate() { return certificate_ == null ? com.google.cloud.certificatemanager.v1.Certificate.getDefaultInstance() : certificate_; } /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.certificatemanager.v1.CertificateOrBuilder getCertificateOrBuilder() { return getCertificate(); } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return updateMask_ != null; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return getUpdateMask(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (certificate_ != null) { output.writeMessage(1, getCertificate()); } if (updateMask_ != null) { output.writeMessage(2, getUpdateMask()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (certificate_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCertificate()); } if (updateMask_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.certificatemanager.v1.UpdateCertificateRequest)) { return super.equals(obj); } com.google.cloud.certificatemanager.v1.UpdateCertificateRequest other = (com.google.cloud.certificatemanager.v1.UpdateCertificateRequest) obj; if (hasCertificate() != other.hasCertificate()) return false; if (hasCertificate()) { if (!getCertificate().equals(other.getCertificate())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCertificate()) { hash = (37 * hash) + CERTIFICATE_FIELD_NUMBER; hash = (53 * hash) + getCertificate().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.certificatemanager.v1.UpdateCertificateRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for the `UpdateCertificate` method. * </pre> * * Protobuf type {@code google.cloud.certificatemanager.v1.UpdateCertificateRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.certificatemanager.v1.UpdateCertificateRequest) com.google.cloud.certificatemanager.v1.UpdateCertificateRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.certificatemanager.v1.CertificateManagerProto .internal_static_google_cloud_certificatemanager_v1_UpdateCertificateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.certificatemanager.v1.CertificateManagerProto .internal_static_google_cloud_certificatemanager_v1_UpdateCertificateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.class, com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.Builder.class); } // Construct using com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (certificateBuilder_ == null) { certificate_ = null; } else { certificate_ = null; certificateBuilder_ = null; } if (updateMaskBuilder_ == null) { updateMask_ = null; } else { updateMask_ = null; updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.certificatemanager.v1.CertificateManagerProto .internal_static_google_cloud_certificatemanager_v1_UpdateCertificateRequest_descriptor; } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateCertificateRequest getDefaultInstanceForType() { return com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateCertificateRequest build() { com.google.cloud.certificatemanager.v1.UpdateCertificateRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateCertificateRequest buildPartial() { com.google.cloud.certificatemanager.v1.UpdateCertificateRequest result = new com.google.cloud.certificatemanager.v1.UpdateCertificateRequest(this); if (certificateBuilder_ == null) { result.certificate_ = certificate_; } else { result.certificate_ = certificateBuilder_.build(); } if (updateMaskBuilder_ == null) { result.updateMask_ = updateMask_; } else { result.updateMask_ = updateMaskBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.certificatemanager.v1.UpdateCertificateRequest) { return mergeFrom((com.google.cloud.certificatemanager.v1.UpdateCertificateRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.certificatemanager.v1.UpdateCertificateRequest other) { if (other == com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.getDefaultInstance()) return this; if (other.hasCertificate()) { mergeCertificate(other.getCertificate()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.certificatemanager.v1.UpdateCertificateRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.cloud.certificatemanager.v1.Certificate certificate_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.certificatemanager.v1.Certificate, com.google.cloud.certificatemanager.v1.Certificate.Builder, com.google.cloud.certificatemanager.v1.CertificateOrBuilder> certificateBuilder_; /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the certificate field is set. */ public boolean hasCertificate() { return certificateBuilder_ != null || certificate_ != null; } /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The certificate. */ public com.google.cloud.certificatemanager.v1.Certificate getCertificate() { if (certificateBuilder_ == null) { return certificate_ == null ? com.google.cloud.certificatemanager.v1.Certificate.getDefaultInstance() : certificate_; } else { return certificateBuilder_.getMessage(); } } /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCertificate(com.google.cloud.certificatemanager.v1.Certificate value) { if (certificateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } certificate_ = value; onChanged(); } else { certificateBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCertificate( com.google.cloud.certificatemanager.v1.Certificate.Builder builderForValue) { if (certificateBuilder_ == null) { certificate_ = builderForValue.build(); onChanged(); } else { certificateBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeCertificate(com.google.cloud.certificatemanager.v1.Certificate value) { if (certificateBuilder_ == null) { if (certificate_ != null) { certificate_ = com.google.cloud.certificatemanager.v1.Certificate.newBuilder(certificate_) .mergeFrom(value) .buildPartial(); } else { certificate_ = value; } onChanged(); } else { certificateBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearCertificate() { if (certificateBuilder_ == null) { certificate_ = null; onChanged(); } else { certificate_ = null; certificateBuilder_ = null; } return this; } /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.certificatemanager.v1.Certificate.Builder getCertificateBuilder() { onChanged(); return getCertificateFieldBuilder().getBuilder(); } /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.certificatemanager.v1.CertificateOrBuilder getCertificateOrBuilder() { if (certificateBuilder_ != null) { return certificateBuilder_.getMessageOrBuilder(); } else { return certificate_ == null ? com.google.cloud.certificatemanager.v1.Certificate.getDefaultInstance() : certificate_; } } /** * * * <pre> * Required. A definition of the certificate to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.certificatemanager.v1.Certificate, com.google.cloud.certificatemanager.v1.Certificate.Builder, com.google.cloud.certificatemanager.v1.CertificateOrBuilder> getCertificateFieldBuilder() { if (certificateBuilder_ == null) { certificateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.certificatemanager.v1.Certificate, com.google.cloud.certificatemanager.v1.Certificate.Builder, com.google.cloud.certificatemanager.v1.CertificateOrBuilder>( getCertificate(), getParentForChildren(), isClean()); certificate_ = null; } return certificateBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return updateMaskBuilder_ != null || updateMask_ != null; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; onChanged(); } else { updateMaskBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); onChanged(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (updateMask_ != null) { updateMask_ = com.google.protobuf.FieldMask.newBuilder(updateMask_).mergeFrom(value).buildPartial(); } else { updateMask_ = value; } onChanged(); } else { updateMaskBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { if (updateMaskBuilder_ == null) { updateMask_ = null; onChanged(); } else { updateMask_ = null; updateMaskBuilder_ = null; } return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` definition, * see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.certificatemanager.v1.UpdateCertificateRequest) } // @@protoc_insertion_point(class_scope:google.cloud.certificatemanager.v1.UpdateCertificateRequest) private static final com.google.cloud.certificatemanager.v1.UpdateCertificateRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.certificatemanager.v1.UpdateCertificateRequest(); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateCertificateRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateCertificateRequest>() { @java.lang.Override public UpdateCertificateRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new UpdateCertificateRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<UpdateCertificateRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateCertificateRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateCertificateRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache-2.0
shiver-me-timbers/smt-waiting-parent
smt-waiting-test/smt-waiting-aspect-load-time/smt-waiting-aspect-load-time-test/src/test/java/shiver/me/timbers/waiting/ITLoadTimeAspectWaiterClass.java
401
package shiver.me.timbers.waiting; import org.junit.runner.RunWith; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(classes = WaiterConfiguration.class) public class ITLoadTimeAspectWaiterClass extends AbstractITAspectWaiterClass { }
apache-2.0
ctripcorp/x-pipe
redis/redis-keeper/src/test/java/com/ctrip/xpipe/redis/keeper/ratelimit/AllTests.java
413
package com.ctrip.xpipe.redis.keeper.ratelimit; import com.ctrip.xpipe.redis.keeper.impl.fakeredis.*; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; /** * @author wenchao.meng * <p> * May 17, 2016 2:05:50 PM */ @RunWith(Suite.class) @SuiteClasses({ FakeRedisRdbDumpLong.class, RateLimitTest.class, }) public class AllTests { }
apache-2.0
lpicanco/grails
src/commons/grails/spring/WebBeanBuilder.java
1658
/* Copyright 2004-2005 Graeme Rocher * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package grails.spring; import org.codehaus.groovy.grails.commons.spring.RuntimeSpringConfiguration; import org.codehaus.groovy.grails.commons.spring.WebRuntimeSpringConfiguration; import org.springframework.context.ApplicationContext; /** * Extended version of the BeanBuilder class that provides support for constructing WebApplicationContext instances * * @author Graeme Rocher * @since 1.0 * <p/> * Created: Nov 23, 2007 */ public class WebBeanBuilder extends BeanBuilder { public WebBeanBuilder() { super(); } public WebBeanBuilder(ClassLoader classLoader) { super(classLoader); } public WebBeanBuilder(ApplicationContext parent) { super(parent); } public WebBeanBuilder(ApplicationContext parent, ClassLoader classLoader) { super(parent, classLoader); } protected RuntimeSpringConfiguration createRuntimeSpringConfiguration(ApplicationContext parent, ClassLoader classLoader) { return new WebRuntimeSpringConfiguration(parent, classLoader); } }
apache-2.0
Garbriel/AndroidExamples
src/io/android_tech/myexample/AdapterView/AdapterView_GridView_01_Activity.java
3374
package io.android_tech.myexample.AdapterView; import android.Manifest; import android.app.Activity; import android.app.ListActivity; import android.app.LoaderManager; import android.content.CursorLoader; import android.content.Loader; import android.content.pm.PackageManager; import android.database.Cursor; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.provider.ContactsContract; import android.widget.GridView; import android.widget.Toast; import java.util.ArrayList; import io.android_tech.myexample.R; public class AdapterView_GridView_01_Activity extends Activity implements LoaderManager.LoaderCallbacks<Cursor> { private static final int PERMISSIONS_REQUEST_READ_CONTACTS = 100; private static final int viewList[] = {R.id.custom_list_title_main, R.id.custom_list_title_sub, R.id.custom_list_image}; private static final String[] CONTACT_PROJECTION = new String[]{ContactsContract.Contacts._ID, ContactsContract.Contacts.DISPLAY_NAME, ContactsContract.CommonDataKinds.Phone.NUMBER, ContactsContract.CommonDataKinds.Phone.CONTACT_ID}; AdapterView_ListView_CursorAdapter_02_Adapter mAdapter; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_adapterview_gridview_01); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && checkSelfPermission(Manifest.permission.READ_CONTACTS) != PackageManager.PERMISSION_GRANTED) { requestPermissions(new String[]{Manifest.permission.READ_CONTACTS}, PERMISSIONS_REQUEST_READ_CONTACTS); } else { startQuery(); } } private void startQuery() { mAdapter = new AdapterView_ListView_CursorAdapter_02_Adapter(this, R.layout.activity_adapterview_gridview_01_row, null, new String[]{ContactsContract.Contacts.DISPLAY_NAME, ContactsContract.CommonDataKinds.Phone.NUMBER, ContactsContract.CommonDataKinds.Phone.CONTACT_ID}, viewList, 0); GridView gridView = (GridView) findViewById(io.android_tech.myexample.R.id.gridView1); gridView.setAdapter(mAdapter); LoaderManager.LoaderCallbacks<Cursor> loaderCallbacks = this; LoaderManager loaderManager = getLoaderManager(); loaderManager.initLoader(0, null, loaderCallbacks); } @Override public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { if (requestCode == PERMISSIONS_REQUEST_READ_CONTACTS) { if (grantResults[0] == PackageManager.PERMISSION_GRANTED) { startQuery(); } else { Toast.makeText(this, "연락처에 대한 권한이 없어 실행할수 없습니다.", Toast.LENGTH_SHORT).show(); } } } @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { return new CursorLoader(this, ContactsContract.CommonDataKinds.Phone.CONTENT_URI, CONTACT_PROJECTION, null, null, null); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor data) { mAdapter.swapCursor(data); } @Override public void onLoaderReset(Loader<Cursor> loader) { mAdapter.swapCursor(null); } }
apache-2.0
povphearom/AfinosSDK-Android
auth/src/main/java/com/afinos/skd/auth/ui/email/CheckEmailFragment.java
10291
package com.afinos.skd.auth.ui.email; import android.annotation.SuppressLint; import android.app.PendingIntent; import android.content.Intent; import android.content.IntentSender; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.RestrictTo; import android.support.design.widget.TextInputLayout; import android.text.TextUtils; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.EditText; import com.afinos.skd.auth.R; import com.afinos.skd.auth.User; import com.afinos.skd.auth.provider.ProviderUtils; import com.afinos.skd.auth.ui.ExtraConstants; import com.afinos.skd.auth.ui.FlowParameters; import com.afinos.skd.auth.ui.FragmentBase; import com.afinos.skd.auth.ui.ImeHelper; import com.afinos.skd.auth.ui.email.fieldvalidators.EmailFieldValidator; import com.afinos.skd.auth.util.GoogleApiHelper; import com.google.android.gms.auth.api.Auth; import com.google.android.gms.auth.api.credentials.Credential; import com.google.android.gms.auth.api.credentials.CredentialPickerConfig; import com.google.android.gms.auth.api.credentials.HintRequest; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.android.gms.tasks.Task; import com.google.firebase.auth.EmailAuthProvider; import com.google.firebase.auth.FirebaseAuth; /** * Fragment that shows a form with an email field and checks for existing accounts with that email. * <p> * Host Activities should implement {@link CheckEmailListener}. */ @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) public class CheckEmailFragment extends FragmentBase implements View.OnClickListener, ImeHelper.DonePressedListener { /** * Interface to be implemented by Activities hosting this Fragment. */ interface CheckEmailListener { /** * Email entered belongs to an existing email user. */ void onExistingEmailUser(User user); /** * Email entered belongs to an existing IDP user. */ void onExistingIdpUser(User user); /** * Email entered does not belong to an existing user. */ void onNewUser(User user); } public static final String TAG = "CheckEmailFragment"; private static final int RC_HINT = 13; private static final int RC_WELCOME_BACK_IDP = 15; private static final int RC_SIGN_IN = 16; private EditText mEmailEditText; private TextInputLayout mEmailLayout; private EmailFieldValidator mEmailFieldValidator; private CheckEmailListener mListener; private Credential mLastCredential; public static CheckEmailFragment newInstance(@NonNull FlowParameters flowParameters, @Nullable String email) { CheckEmailFragment fragment = new CheckEmailFragment(); Bundle args = new Bundle(); args.putParcelable(ExtraConstants.EXTRA_FLOW_PARAMS, flowParameters); args.putString(ExtraConstants.EXTRA_EMAIL, email); fragment.setArguments(args); return fragment; } @SuppressLint("NewApi") // TODO remove once lint understands Build.VERSION_CODES.O @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fui_check_email_layout, container, false); // Email field and validator mEmailLayout = v.findViewById(R.id.email_layout); mEmailEditText = v.findViewById(R.id.email); mEmailFieldValidator = new EmailFieldValidator(mEmailLayout); mEmailLayout.setOnClickListener(this); mEmailEditText.setOnClickListener(this); ImeHelper.setImeOnDoneListener(mEmailEditText, this); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && getFlowParams().enableHints) { mEmailEditText.setImportantForAutofill(View.IMPORTANT_FOR_AUTOFILL_NO); } // "Next" button v.findViewById(R.id.button_next).setOnClickListener(this); return v; } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // Set listener if (!(getActivity() instanceof CheckEmailListener)) { throw new IllegalStateException("Activity must implement CheckEmailListener"); } mListener = (CheckEmailListener) getActivity(); if (savedInstanceState != null) { return; } // Check for email String email = getArguments().getString(ExtraConstants.EXTRA_EMAIL); if (!TextUtils.isEmpty(email)) { // Use email passed in mEmailEditText.setText(email); validateAndProceed(); } else if (getFlowParams().enableHints) { // Try SmartLock email autocomplete hint showEmailAutoCompleteHint(); } } @Override public void onSaveInstanceState(Bundle outState) { outState.putBoolean(ExtraConstants.HAS_EXISTING_INSTANCE, true); super.onSaveInstanceState(outState); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); switch (requestCode) { case RC_HINT: if (data != null) { mLastCredential = data.getParcelableExtra(Credential.EXTRA_KEY); if (mLastCredential != null) { // Get the email from the credential mEmailEditText.setText(mLastCredential.getId()); // Attempt to proceed validateAndProceed(); } } break; case RC_SIGN_IN: case RC_WELCOME_BACK_IDP: finish(resultCode, data); break; } } private void validateAndProceed() { String email = mEmailEditText.getText().toString(); if (mEmailFieldValidator.validate(email)) { checkAccountExists(email); } } private void checkAccountExists(@NonNull final String email) { getDialogHolder().showLoadingDialog(R.string.fui_progress_dialog_checking_accounts); // Get name from SmartLock, if possible String name = null; Uri photoUri = null; if (mLastCredential != null && mLastCredential.getId().equals(email)) { name = mLastCredential.getName(); photoUri = mLastCredential.getProfilePictureUri(); } final String finalName = name; final Uri finalPhotoUri = photoUri; FirebaseAuth auth = getAuthHelper().getFirebaseAuth(); ProviderUtils.fetchTopProvider(auth, email) .addOnSuccessListener(getActivity(), new OnSuccessListener<String>() { @Override public void onSuccess(String provider) { if (provider == null) { mListener.onNewUser(new User.Builder(EmailAuthProvider.PROVIDER_ID, email) .setName(finalName) .setPhotoUri(finalPhotoUri) .build()); } else if (EmailAuthProvider.PROVIDER_ID.equalsIgnoreCase(provider)) { mListener.onExistingEmailUser( new User.Builder(EmailAuthProvider.PROVIDER_ID, email).build()); } else { mListener.onExistingIdpUser(new User.Builder(provider, email).build()); } } }) .addOnCompleteListener( getActivity(), new OnCompleteListener<String>() { @Override public void onComplete(@NonNull Task<String> task) { getDialogHolder().dismissDialog(); } }); } private void showEmailAutoCompleteHint() { try { startIntentSenderForResult(getEmailHintIntent().getIntentSender(), RC_HINT); } catch (IntentSender.SendIntentException e) { Log.e(TAG, "Unable to start hint intent", e); } } private PendingIntent getEmailHintIntent() { GoogleApiClient client = new GoogleApiClient.Builder(getContext()) .addApi(Auth.CREDENTIALS_API) .enableAutoManage(getActivity(), GoogleApiHelper.getSafeAutoManageId(), new GoogleApiClient.OnConnectionFailedListener() { @Override public void onConnectionFailed(@NonNull ConnectionResult connectionResult) { Log.e(TAG, "Client connection failed: " + connectionResult.getErrorMessage()); } }) .build(); HintRequest hintRequest = new HintRequest.Builder() .setHintPickerConfig(new CredentialPickerConfig.Builder() .setShowCancelButton(true) .build()) .setEmailAddressIdentifierSupported(true) .build(); return Auth.CredentialsApi.getHintPickerIntent(client, hintRequest); } @Override public void onClick(View view) { int id = view.getId(); if (id == R.id.button_next) { validateAndProceed(); } else if (id == R.id.email_layout || id == R.id.email) { mEmailLayout.setError(null); } } @Override public void onDonePressed() { validateAndProceed(); } }
apache-2.0
sourcepit/common-manifest
src/gen/emf/org/sourcepit/common/manifest/osgi/impl/ParameterImpl.java
12344
/* * Copyright 2014 Bernd Vogt and others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sourcepit.common.manifest.osgi.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EcoreUtil; import org.sourcepit.common.manifest.impl.ParseableImpl; import org.sourcepit.common.manifest.osgi.BundleManifestPackage; import org.sourcepit.common.manifest.osgi.Parameter; import org.sourcepit.common.manifest.osgi.ParameterType; import org.sourcepit.common.manifest.osgi.Parameterized; import org.sourcepit.common.manifest.parser.HeaderParser; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Parameter</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link org.sourcepit.common.manifest.osgi.impl.ParameterImpl#getName <em>Name</em>}</li> * <li>{@link org.sourcepit.common.manifest.osgi.impl.ParameterImpl#isQuoted <em>Quoted</em>}</li> * <li>{@link org.sourcepit.common.manifest.osgi.impl.ParameterImpl#getType <em>Type</em>}</li> * <li>{@link org.sourcepit.common.manifest.osgi.impl.ParameterImpl#getParameterized <em>Parameterized</em>}</li> * </ul> * </p> * * @generated */ public class ParameterImpl extends ParseableImpl implements Parameter { /** * The default value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @see #getName() * @generated * @ordered */ protected static final String NAME_EDEFAULT = null; /** * The cached value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @see #getName() * @generated * @ordered */ protected String name = NAME_EDEFAULT; /** * The default value of the '{@link #isQuoted() <em>Quoted</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @see #isQuoted() * @generated * @ordered */ protected static final boolean QUOTED_EDEFAULT = false; /** * The cached value of the '{@link #isQuoted() <em>Quoted</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @see #isQuoted() * @generated * @ordered */ protected boolean quoted = QUOTED_EDEFAULT; /** * The default value of the '{@link #getType() <em>Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @see #getType() * @generated * @ordered */ protected static final ParameterType TYPE_EDEFAULT = ParameterType.ATTRIBUTE; /** * The cached value of the '{@link #getType() <em>Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @see #getType() * @generated * @ordered */ protected ParameterType type = TYPE_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ protected ParameterImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override protected EClass eStaticClass() { return BundleManifestPackage.Literals.PARAMETER; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ public String getName() { return name; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ public void setName(String newName) { String oldName = name; name = newName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BundleManifestPackage.PARAMETER__NAME, oldName, name)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ public boolean isQuoted() { return quoted; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ public void setQuoted(boolean newQuoted) { boolean oldQuoted = quoted; quoted = newQuoted; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BundleManifestPackage.PARAMETER__QUOTED, oldQuoted, quoted)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ public ParameterType getType() { return type; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ public void setType(ParameterType newType) { ParameterType oldType = type; type = newType == null ? TYPE_EDEFAULT : newType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BundleManifestPackage.PARAMETER__TYPE, oldType, type)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ public Parameterized getParameterized() { if (eContainerFeatureID() != BundleManifestPackage.PARAMETER__PARAMETERIZED) return null; return (Parameterized) eInternalContainer(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ public NotificationChain basicSetParameterized(Parameterized newParameterized, NotificationChain msgs) { msgs = eBasicSetContainer((InternalEObject) newParameterized, BundleManifestPackage.PARAMETER__PARAMETERIZED, msgs); return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ public void setParameterized(Parameterized newParameterized) { if (newParameterized != eInternalContainer() || (eContainerFeatureID() != BundleManifestPackage.PARAMETER__PARAMETERIZED && newParameterized != null)) { if (EcoreUtil.isAncestor(this, newParameterized)) throw new IllegalArgumentException("Recursive containment not allowed for " + toString()); NotificationChain msgs = null; if (eInternalContainer() != null) msgs = eBasicRemoveFromContainer(msgs); if (newParameterized != null) msgs = ((InternalEObject) newParameterized).eInverseAdd(this, BundleManifestPackage.PARAMETERIZED__PARAMETERS, Parameterized.class, msgs); msgs = basicSetParameterized(newParameterized, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BundleManifestPackage.PARAMETER__PARAMETERIZED, newParameterized, newParameterized)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case BundleManifestPackage.PARAMETER__PARAMETERIZED : if (eInternalContainer() != null) msgs = eBasicRemoveFromContainer(msgs); return basicSetParameterized((Parameterized) otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case BundleManifestPackage.PARAMETER__PARAMETERIZED : return basicSetParameterized(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override public NotificationChain eBasicRemoveFromContainerFeature(NotificationChain msgs) { switch (eContainerFeatureID()) { case BundleManifestPackage.PARAMETER__PARAMETERIZED : return eInternalContainer().eInverseRemove(this, BundleManifestPackage.PARAMETERIZED__PARAMETERS, Parameterized.class, msgs); } return super.eBasicRemoveFromContainerFeature(msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case BundleManifestPackage.PARAMETER__NAME : return getName(); case BundleManifestPackage.PARAMETER__QUOTED : return isQuoted(); case BundleManifestPackage.PARAMETER__TYPE : return getType(); case BundleManifestPackage.PARAMETER__PARAMETERIZED : return getParameterized(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case BundleManifestPackage.PARAMETER__NAME : setName((String) newValue); return; case BundleManifestPackage.PARAMETER__QUOTED : setQuoted((Boolean) newValue); return; case BundleManifestPackage.PARAMETER__TYPE : setType((ParameterType) newValue); return; case BundleManifestPackage.PARAMETER__PARAMETERIZED : setParameterized((Parameterized) newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case BundleManifestPackage.PARAMETER__NAME : setName(NAME_EDEFAULT); return; case BundleManifestPackage.PARAMETER__QUOTED : setQuoted(QUOTED_EDEFAULT); return; case BundleManifestPackage.PARAMETER__TYPE : setType(TYPE_EDEFAULT); return; case BundleManifestPackage.PARAMETER__PARAMETERIZED : setParameterized((Parameterized) null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case BundleManifestPackage.PARAMETER__NAME : return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name); case BundleManifestPackage.PARAMETER__QUOTED : return quoted != QUOTED_EDEFAULT; case BundleManifestPackage.PARAMETER__TYPE : return type != TYPE_EDEFAULT; case BundleManifestPackage.PARAMETER__PARAMETERIZED : return getParameterized() != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated NOT */ @Override public String toString() { final String string = HeaderParser.INSTANCE.toString(this); if (string != null) { return string; } if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (name: "); result.append(name); result.append(", value: "); result.append(value); result.append(", quoted: "); result.append(quoted); result.append(", type: "); result.append(type); result.append(')'); return result.toString(); } } // ParameterImpl
apache-2.0
VinodKumarS-Huawei/ietf96yang
utils/yangutils/plugin/src/main/java/org/onosproject/yangutils/parser/impl/listeners/FractionDigitsListener.java
4947
/* * Copyright 2016-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.yangutils.parser.impl.listeners; /* * Reference: RFC6020 and YANG ANTLR Grammar * * ABNF grammar as per RFC6020 * decimal64-specification = fraction-digits-stmt * * fraction-digits-stmt = fraction-digits-keyword sep * fraction-digits-arg-str stmtend * * fraction-digits-arg-str = < a string that matches the rule * fraction-digits-arg > * * fraction-digits-arg = ("1" ["0" / "1" / "2" / "3" / "4" / * "5" / "6" / "7" / "8"]) * / "2" / "3" / "4" / "5" / "6" / "7" / "8" / "9" * * ANTLR grammar rule * decimal64Specification : FRACTION_DIGITS_KEYWORD fraction STMTEND; * * fraction : string; */ import org.onosproject.yangutils.datamodel.YangDecimal64; import org.onosproject.yangutils.datamodel.YangRangeRestriction; import org.onosproject.yangutils.datamodel.YangType; import org.onosproject.yangutils.datamodel.utils.Parsable; import org.onosproject.yangutils.parser.antlrgencode.GeneratedYangParser; import org.onosproject.yangutils.parser.exceptions.ParserException; import org.onosproject.yangutils.parser.impl.TreeWalkListener; import static org.onosproject.yangutils.datamodel.utils.YangConstructType.FRACTION_DIGITS_DATA; import static org.onosproject.yangutils.parser.impl.parserutils.ListenerErrorLocation.ENTRY; import static org.onosproject.yangutils.parser.impl.parserutils.ListenerErrorMessageConstruction.constructListenerErrorMessage; import static org.onosproject.yangutils.parser.impl.parserutils.ListenerErrorType.INVALID_HOLDER; import static org.onosproject.yangutils.parser.impl.parserutils.ListenerErrorType.MISSING_HOLDER; import static org.onosproject.yangutils.parser.impl.parserutils.ListenerValidation.checkStackIsNotEmpty; /** * Represents listener based call back function corresponding to the "fraction-digits" * rule defined in ANTLR grammar file for corresponding ABNF rule in RFC 6020. */ public final class FractionDigitsListener { /** * Creates a new bit listener. */ private FractionDigitsListener() { } /** * It is called when parser enters grammar rule (fraction-digits), it perform * validations and updates the data model tree. * * @param listener listener's object * @param ctx context object of the grammar rule */ public static void processFractionDigitsEntry(TreeWalkListener listener, GeneratedYangParser.FractionDigitStatementContext ctx) { // Check for stack to be non empty. checkStackIsNotEmpty(listener, MISSING_HOLDER, FRACTION_DIGITS_DATA, ctx.fraction().getText(), ENTRY); int value = getValidFractionDigits(ctx); Parsable tmpNode = listener.getParsedDataStack().peek(); if (tmpNode instanceof YangType) { YangType<YangDecimal64<YangRangeRestriction>> typeNode = (YangType<YangDecimal64<YangRangeRestriction>>) tmpNode; YangDecimal64 decimal64Node = typeNode.getDataTypeExtendedInfo(); decimal64Node.setFractionDigit(value); } else { throw new ParserException(constructListenerErrorMessage(INVALID_HOLDER, FRACTION_DIGITS_DATA, ctx.fraction().getText(), ENTRY)); } } /** * Validate fraction digits. * * @param ctx context object of the grammar rule * @return validated fraction-digits */ public static int getValidFractionDigits(GeneratedYangParser.FractionDigitStatementContext ctx) { String value = ctx.fraction().getText().trim(); ParserException parserException; int fractionDigits = Integer.parseInt(value); if ((fractionDigits >= YangDecimal64.MIN_FRACTION_DIGITS_VALUE) && (fractionDigits <= YangDecimal64.MAX_FRACTION_DIGITS_VALUE)) { return fractionDigits; } else { parserException = new ParserException("YANG file error : fraction-digits value should be between 1 and 18."); parserException.setLine(ctx.getStart().getLine()); parserException.setCharPosition(ctx.getStart().getCharPositionInLine()); throw parserException; } } }
apache-2.0
Archinamon/persefone
src/main/java/mobi/anoda/archinamon/kernel/persefone/model/ISignalEvaluable.java
334
package mobi.anoda.archinamon.kernel.persefone.model; import com.google.common.collect.ImmutableList; import org.apache.http.NameValuePair; /** * @author: Archinamon * @project: FavorMe */ public interface ISignalEvaluable<Elem extends NameValuePair> { ISignalEvaluable packModel(); ImmutableList<Elem> getPackage(); }
apache-2.0
maxml/JULS
juls-parent/juls-persist/src/main/java/com/juls/persist/CartGoodDAOImpl.java
1468
package com.juls.persist; import java.util.List; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.Transaction; import com.juls.model.CartGood; import com.juls.model.CartGoodId; public class CartGoodDAOImpl implements IDAO<CartGood>{ private static SessionFactory sessionFactory = HibernateUtil.createSessionFactory(); public List<CartGood> getAll() { throw new UnsupportedOperationException(); } public CartGood getById(String id) { throw new UnsupportedOperationException(); } public CartGood getById(CartGoodId id) { Session session = sessionFactory.openSession(); Transaction transaction = session.beginTransaction(); CartGood cartGood = (CartGood) session.get(CartGood.class, id); transaction.commit(); session.close(); return cartGood; } public boolean insert(CartGood value) { throw new UnsupportedOperationException(); } public boolean update(CartGood value) { Session session = sessionFactory.openSession(); Transaction transaction = session.beginTransaction(); session.update(value); transaction.commit(); session.close(); return true; } public boolean delete(CartGood value) { Session session = sessionFactory.openSession(); Transaction transaction = session.beginTransaction(); session.delete(value); transaction.commit(); session.close(); return true; } }
apache-2.0
sdeleuze/reactor-core
reactor-core/src/test/java/reactor/core/publisher/FluxWindowPredicateTest.java
33565
/* * Copyright (c) 2011-2017 Pivotal Software Inc, All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package reactor.core.publisher; import java.time.Duration; import java.util.Arrays; import java.util.List; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Predicate; import java.util.logging.Level; import org.assertj.core.api.Assertions; import org.junit.Assert; import org.junit.Test; import org.reactivestreams.Subscription; import reactor.core.CoreSubscriber; import reactor.core.Fuseable; import reactor.core.Scannable; import reactor.core.publisher.FluxBufferPredicate.Mode; import reactor.test.StepVerifier; import reactor.test.StepVerifierOptions; import reactor.test.publisher.FluxOperatorTest; import reactor.test.publisher.TestPublisher; import reactor.util.concurrent.Queues; import static org.assertj.core.api.Assertions.assertThat; public class FluxWindowPredicateTest extends FluxOperatorTest<String, Flux<String>> { @Override protected Scenario<String, Flux<String>> defaultScenarioOptions(Scenario<String, Flux<String>> defaultOptions) { return defaultOptions.shouldAssertPostTerminateState(false) .fusionMode(Fuseable.ASYNC) .fusionModeThreadBarrier(Fuseable.ANY) .prefetch(Queues.SMALL_BUFFER_SIZE); } @Override protected List<Scenario<String, Flux<String>>> scenarios_operatorSuccess() { return Arrays.asList( scenario(f -> f.windowUntil(t -> true, true, 1)) .prefetch(1) .receive(s -> s.buffer().subscribe(b -> Assert.fail()), s -> s.buffer().subscribe(b -> assertThat(b).containsExactly(item(0))), s -> s.buffer().subscribe(b -> assertThat(b).containsExactly(item(1))), s -> s.buffer().subscribe(b -> assertThat(b).containsExactly(item(2)))), scenario(f -> f.windowWhile(t -> true)) .receive(s -> s.buffer().subscribe(b -> assertThat(b).containsExactly(item(0), item(1), item(2)))), scenario(f -> f.windowUntil(t -> true)) .receive(s -> s.buffer().subscribe(b -> assertThat(b).containsExactly(item(0))), s -> s.buffer().subscribe(b -> assertThat(b).containsExactly(item(1))), s -> s.buffer().subscribe(b -> assertThat(b).containsExactly(item(2))), s -> s.buffer().subscribe(b -> assertThat(b).isEmpty())), // scenario(f -> f.windowUntil(t -> true, false, 1)) // .prefetch(1) // .verifier(step -> step.consumeNextWith(s -> s.buffer().subscribe(b -> assertThat(b).containsExactly(item(0)))) // .consumeNextWith(s -> s.buffer().subscribe(b -> assertThat(b).containsExactly(item(1)))) // .consumeNextWith(s -> s.buffer().subscribe(b -> assertThat(b).containsExactly(item(2)))) // .thenRequest(3) // .consumeNextWith(s -> s.buffer().subscribe(b -> assertThat(b).isEmpty())) // .verifyComplete()), scenario(f -> f.windowUntil(t -> false)) .receive(s -> s.buffer().subscribe(b -> assertThat(b).containsExactly(item(0), item(1), item(2)))) ); } @Override protected List<Scenario<String, Flux<String>>> scenarios_operatorError() { return Arrays.asList( scenario(f -> f.windowWhile(t -> { throw exception(); })) .receive(s -> s.buffer().subscribe(null, e -> assertThat(e).hasMessage("test"))), scenario(f -> f.windowUntil(t -> { throw exception(); })) .receive(s -> s.buffer().subscribe(null, e -> assertThat(e).hasMessage("test"))), scenario(f -> f.windowUntil(t -> { throw exception(); }, true)) .receive(s -> s.buffer().subscribe(null, e -> assertThat(e).hasMessage("test"))) ); } @Override protected List<Scenario<String, Flux<String>>> scenarios_errorFromUpstreamFailure() { return Arrays.asList( scenario(f -> f.windowWhile(t -> true)) .receive(s -> s.buffer().subscribe(null, e -> assertThat(e).hasMessage("test"))), scenario(f -> f.windowUntil(t -> true)) .receive(s -> s.buffer().subscribe(null, e -> assertThat(e).hasMessage("test"))), scenario(f -> f.windowUntil(t -> true, true)) .receive(s -> s.buffer().subscribe(null, e -> assertThat(e).hasMessage("test"))) ); } @Test public void apiUntil() { StepVerifier.create(Flux.just("red", "green", "#", "orange", "blue", "#", "black", "white") .windowUntil(color -> color.equals("#")) .flatMap(Flux::materialize) .map(s -> s.isOnComplete() ? "WINDOW CLOSED" : s.get())) .expectNext("red", "green", "#", "WINDOW CLOSED") .expectNext("orange", "blue", "#", "WINDOW CLOSED") .expectNext("black", "white", "WINDOW CLOSED") .verifyComplete(); } @Test public void apiUntilCutAfter() { StepVerifier.create(Flux.just("red", "green", "#", "orange", "blue", "#", "black", "white") .windowUntil(color -> color.equals("#"), false) .flatMap(Flux::materialize) .map(s -> s.isOnComplete() ? "WINDOW CLOSED" : s.get())) .expectNext("red", "green", "#", "WINDOW CLOSED") .expectNext("orange", "blue", "#", "WINDOW CLOSED") .expectNext("black", "white", "WINDOW CLOSED") .verifyComplete(); } @Test public void apiUntilCutBefore() { StepVerifier.create(Flux.just("red", "green", "#", "orange", "blue", "#", "black", "white") .windowUntil(color -> color.equals("#"), true) .flatMap(Flux::materialize) .map(s -> s.isOnComplete() ? "WINDOW CLOSED" : s.get())) .expectNext("red", "green", "WINDOW CLOSED", "#") .expectNext("orange", "blue", "WINDOW CLOSED", "#") .expectNext("black", "white", "WINDOW CLOSED") .verifyComplete(); } @Test public void apiWhile() { StepVerifier.create(Flux.just("red", "green", "#", "orange", "blue", "#", "black", "white") .windowWhile(color -> !color.equals("#")) .flatMap(Flux::materialize) .map(s -> s.isOnComplete() ? "WINDOW CLOSED" : s.get())) .expectNext("red", "green", "WINDOW CLOSED") .expectNext("orange", "blue", "WINDOW CLOSED") .expectNext("black", "white", "WINDOW CLOSED") .verifyComplete(); } @Test public void normalUntil() { DirectProcessor<Integer> sp1 = DirectProcessor.create(); FluxWindowPredicate<Integer> windowUntil = new FluxWindowPredicate<>(sp1, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> i % 3 == 0, Mode.UNTIL); StepVerifier.create(windowUntil.flatMap(Flux::materialize)) .expectSubscription() .then(() -> sp1.onNext(1)) .expectNext(Signal.next(1)) .then(() -> sp1.onNext(2)) .expectNext(Signal.next(2)) .then(() -> sp1.onNext(3)) .expectNext(Signal.next(3), Signal.complete()) .then(() -> sp1.onNext(4)) .expectNext(Signal.next(4)) .then(() -> sp1.onNext(5)) .expectNext(Signal.next(5)) .then(() -> sp1.onNext(6)) .expectNext(Signal.next(6), Signal.complete()) .then(() -> sp1.onNext(7)) .expectNext(Signal.next(7)) .then(() -> sp1.onNext(8)) .expectNext(Signal.next(8)) .then(sp1::onComplete) .expectNext(Signal.complete()) .verifyComplete(); assertThat(sp1.hasDownstreams()).isFalse(); } @Test public void onCompletionBeforeLastBoundaryWindowEmitted() { Flux<Integer> source = Flux.just(1, 2); FluxWindowPredicate<Integer> windowUntil = new FluxWindowPredicate<>(source, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> i >= 3, Mode.UNTIL); FluxWindowPredicate<Integer> windowUntilCutBefore = new FluxWindowPredicate<>(source, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> i >= 3, Mode.UNTIL_CUT_BEFORE); FluxWindowPredicate<Integer> windowWhile = new FluxWindowPredicate<>(source, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> i < 3, Mode.WHILE); StepVerifier.create(windowUntil.flatMap(Flux::collectList)) .expectNext(Arrays.asList(1, 2)) .expectComplete() .verify(); StepVerifier.create(windowUntilCutBefore.flatMap(Flux::collectList)) .expectNext(Arrays.asList(1, 2)) .expectComplete() .verify(); StepVerifier.create(windowWhile.flatMap(Flux::collectList)) .expectNext(Arrays.asList(1, 2)) .expectComplete() .verify(); } @Test public void mainErrorUntilIsPropagatedToBothWindowAndMain() { DirectProcessor<Integer> sp1 = DirectProcessor.create(); FluxWindowPredicate<Integer> windowUntil = new FluxWindowPredicate<>( sp1, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> i % 3 == 0, Mode.UNTIL); StepVerifier.create(windowUntil.flatMap(Flux::materialize)) .expectSubscription() .then(() -> sp1.onNext(1)) .expectNext(Signal.next(1)) .then(() -> sp1.onNext(2)) .expectNext(Signal.next(2)) .then(() -> sp1.onNext(3)) .expectNext(Signal.next(3), Signal.complete()) .then(() -> sp1.onNext(4)) .expectNext(Signal.next(4)) .then(() -> sp1.onError(new RuntimeException("forced failure"))) //this is the error in the window: .expectNextMatches(signalErrorMessage("forced failure")) //this is the error in the main: .expectErrorMessage("forced failure") .verify(); assertThat(sp1.hasDownstreams()).isFalse(); } @Test public void predicateErrorUntil() { DirectProcessor<Integer> sp1 = DirectProcessor.create(); FluxWindowPredicate<Integer> windowUntil = new FluxWindowPredicate<>( sp1, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> { if (i == 5) throw new IllegalStateException("predicate failure"); return i % 3 == 0; }, Mode.UNTIL); StepVerifier.create(windowUntil.flatMap(Flux::materialize)) .expectSubscription() .then(() -> sp1.onNext(1)) .expectNext(Signal.next(1)) .then(() -> sp1.onNext(2)) .expectNext(Signal.next(2)) .then(() -> sp1.onNext(3)) .expectNext(Signal.next(3), Signal.complete()) .then(() -> sp1.onNext(4)) .expectNext(Signal.next(4)) .then(() -> sp1.onNext(5)) //error in the window: .expectNextMatches(signalErrorMessage("predicate failure")) .expectErrorMessage("predicate failure") .verify(); assertThat(sp1.hasDownstreams()).isFalse(); } @Test public void normalUntilCutBefore() { DirectProcessor<Integer> sp1 = DirectProcessor.create(); FluxWindowPredicate<Integer> windowUntilCutBefore = new FluxWindowPredicate<>(sp1, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> i % 3 == 0, Mode.UNTIL_CUT_BEFORE); StepVerifier.create(windowUntilCutBefore.flatMap(Flux::materialize)) .expectSubscription() .then(() -> sp1.onNext(1)) .expectNext(Signal.next(1)) .then(() -> sp1.onNext(2)) .expectNext(Signal.next(2)) .then(() -> sp1.onNext(3)) .expectNext(Signal.complete(), Signal.next(3)) .then(() -> sp1.onNext(4)) .expectNext(Signal.next(4)) .then(() -> sp1.onNext(5)) .expectNext(Signal.next(5)) .then(() -> sp1.onNext(6)) .expectNext(Signal.complete(), Signal.next(6)) .then(() -> sp1.onNext(7)) .expectNext(Signal.next(7)) .then(() -> sp1.onNext(8)) .expectNext(Signal.next(8)) .then(sp1::onComplete) .expectNext(Signal.complete()) .verifyComplete(); assertThat(sp1.hasDownstreams()).isFalse(); } @Test public void mainErrorUntilCutBeforeIsPropagatedToBothWindowAndMain() { DirectProcessor<Integer> sp1 = DirectProcessor.create(); FluxWindowPredicate<Integer> windowUntilCutBefore = new FluxWindowPredicate<>(sp1, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> i % 3 == 0, Mode.UNTIL_CUT_BEFORE); StepVerifier.create(windowUntilCutBefore.flatMap(Flux::materialize)) .expectSubscription() .then(() -> sp1.onNext(1)) .expectNext(Signal.next(1)) .then(() -> sp1.onNext(2)) .expectNext(Signal.next(2)) .then(() -> sp1.onNext(3)) .expectNext(Signal.complete()) .expectNext(Signal.next(3)) .then(() -> sp1.onNext(4)) .expectNext(Signal.next(4)) .then(() -> sp1.onError(new RuntimeException("forced failure"))) //this is the error in the window: .expectNextMatches(signalErrorMessage("forced failure")) //this is the error in the main: .expectErrorMessage("forced failure") .verify(); assertThat(sp1.hasDownstreams()).isFalse(); } @Test public void predicateErrorUntilCutBefore() { DirectProcessor<Integer> sp1 = DirectProcessor.create(); FluxWindowPredicate<Integer> windowUntilCutBefore = new FluxWindowPredicate<>(sp1, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> { if (i == 5) throw new IllegalStateException("predicate failure"); return i % 3 == 0; }, Mode.UNTIL_CUT_BEFORE); StepVerifier.create(windowUntilCutBefore.flatMap(Flux::materialize)) .expectSubscription() .then(() -> sp1.onNext(1)) .expectNext(Signal.next(1)) .then(() -> sp1.onNext(2)) .expectNext(Signal.next(2)) .then(() -> sp1.onNext(3)) .expectNext(Signal.complete(), Signal.next(3)) .then(() -> sp1.onNext(4)) .expectNext(Signal.next(4)) .then(() -> sp1.onNext(5)) //error in the window: .expectNextMatches(signalErrorMessage("predicate failure")) .expectErrorMessage("predicate failure") .verify(); assertThat(sp1.hasDownstreams()).isFalse(); } private <T> Predicate<? super Signal<T>> signalErrorMessage(String expectedMessage) { return signal -> signal.isOnError() && signal.getThrowable() != null && expectedMessage.equals(signal.getThrowable().getMessage()); } @Test public void normalWhile() { DirectProcessor<Integer> sp1 = DirectProcessor.create(); FluxWindowPredicate<Integer> windowWhile = new FluxWindowPredicate<>( sp1, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> i % 3 != 0, Mode.WHILE); StepVerifier.create(windowWhile.flatMap(Flux::materialize)) .expectSubscription() .then(() -> sp1.onNext(1)) .expectNext(Signal.next(1)) .then(() -> sp1.onNext(2)) .expectNext(Signal.next(2)) .then(() -> sp1.onNext(3)) .expectNext(Signal.complete()) .then(() -> sp1.onNext(4)) .expectNext(Signal.next(4)) .then(() -> sp1.onNext(5)) .expectNext(Signal.next(5)) .then(() -> sp1.onNext(6)) .expectNext(Signal.complete()) .then(() -> sp1.onNext(7)) .expectNext(Signal.next(7)) .then(() -> sp1.onNext(8)) .expectNext(Signal.next(8)) .then(sp1::onComplete) .expectNext(Signal.complete()) .verifyComplete(); assertThat(sp1.hasDownstreams()).isFalse(); } @Test public void normalWhileDoesntInitiallyMatch() { DirectProcessor<Integer> sp1 = DirectProcessor.create(); FluxWindowPredicate<Integer> windowWhile = new FluxWindowPredicate<>( sp1, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> i % 3 == 0, Mode.WHILE); StepVerifier.create(windowWhile.flatMap(Flux::materialize)) .expectSubscription() .expectNoEvent(Duration.ofMillis(10)) .then(() -> sp1.onNext(1)) //closes initial, open 2nd .expectNext(Signal.complete()) .then(() -> sp1.onNext(2)) //closes second, open 3rd .expectNext(Signal.complete()) .then(() -> sp1.onNext(3)) //emits 3 .expectNext(Signal.next(3)) .expectNoEvent(Duration.ofMillis(10)) .then(() -> sp1.onNext(4)) //closes 3rd, open 4th .expectNext(Signal.complete()) .then(() -> sp1.onNext(5)) //closes 4th, open 5th .expectNext(Signal.complete()) .then(() -> sp1.onNext(6)) //emits 6 .expectNext(Signal.next(6)) .expectNoEvent(Duration.ofMillis(10)) .then(() -> sp1.onNext(7)) //closes 5th, open 6th .expectNext(Signal.complete()) .then(() -> sp1.onNext(8)) //closes 6th, open 7th .expectNext(Signal.complete()) .then(() -> sp1.onNext(9)) //emits 9 .expectNext(Signal.next(9)) .expectNoEvent(Duration.ofMillis(10)) .then(sp1::onComplete) // completion triggers completion of the last window (7th) .expectNext(Signal.complete()) .expectComplete() .verify(Duration.ofSeconds(1)); assertThat(sp1.hasDownstreams()).isFalse(); } @Test public void normalWhileDoesntMatch() { DirectProcessor<Integer> sp1 = DirectProcessor.create(); FluxWindowPredicate<Integer> windowWhile = new FluxWindowPredicate<>( sp1, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> i > 4, Mode.WHILE); StepVerifier.create(windowWhile.flatMap(Flux::materialize)) .expectSubscription() .expectNoEvent(Duration.ofMillis(10)) .then(() -> sp1.onNext(1)) .expectNext(Signal.complete()) .then(() -> sp1.onNext(2)) .expectNext(Signal.complete()) .then(() -> sp1.onNext(3)) .expectNext(Signal.complete()) .then(() -> sp1.onNext(4)) .expectNext(Signal.complete()) .expectNoEvent(Duration.ofMillis(10)) .then(() -> sp1.onNext(1)) .expectNext(Signal.complete()) .then(() -> sp1.onNext(2)) .expectNext(Signal.complete()) .then(() -> sp1.onNext(3)) .expectNext(Signal.complete()) .then(() -> sp1.onNext(4)) .expectNext(Signal.complete()) //closing window opened by 3 .expectNoEvent(Duration.ofMillis(10)) .then(sp1::onComplete) .expectNext(Signal.complete()) //closing window opened by 4 .expectComplete() .verify(Duration.ofSeconds(1)); assertThat(sp1.hasDownstreams()).isFalse(); } @Test public void mainErrorWhileIsPropagatedToBothWindowAndMain() { DirectProcessor<Integer> sp1 = DirectProcessor.create(); FluxWindowPredicate<Integer> windowWhile = new FluxWindowPredicate<>( sp1, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> i % 3 == 0, Mode.WHILE); StepVerifier.create(windowWhile.flatMap(Flux::materialize)) .expectSubscription() .then(() -> sp1.onNext(1)) .expectNext(Signal.complete()) .then(() -> sp1.onNext(2)) .expectNext(Signal.complete()) .then(() -> sp1.onNext(3)) //at this point, new window, need another data to close it .then(() -> sp1.onNext(4)) .expectNext(Signal.next(3), Signal.complete()) .then(() -> sp1.onError(new RuntimeException("forced failure"))) //this is the error in the window: .expectNextMatches(signalErrorMessage("forced failure")) //this is the error in the main: .expectErrorMessage("forced failure") .verify(Duration.ofMillis(100)); assertThat(sp1.hasDownstreams()).isFalse(); } @Test public void whileStartingSeveralSeparatorsEachCreateEmptyWindow() { StepVerifier.create(Flux.just("#") .repeat(10) .concatWith(Flux.just("other", "value")) .windowWhile(s -> !s.equals("#")) .flatMap(Flux::count) ) .expectNext(0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L) .expectNext(2L) .verifyComplete(); } @Test public void whileOnlySeparatorsGivesSequenceOfWindows() { StepVerifier.create(Flux.just("#") .repeat(10) .windowWhile(s -> !s.equals("#")) .flatMap(w -> w.count())) .expectNext(0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L) .expectNext(0L) //"remainder" window .verifyComplete(); } @Test public void predicateErrorWhile() { DirectProcessor<Integer> sp1 = DirectProcessor.create(); FluxWindowPredicate<Integer> windowWhile = new FluxWindowPredicate<>( sp1, Queues.small(), Queues.unbounded(), Queues.SMALL_BUFFER_SIZE, i -> { if (i == 3) return true; if (i == 5) throw new IllegalStateException("predicate failure"); return false; }, Mode.WHILE); StepVerifier.create(windowWhile.flatMap(Flux::materialize)) .expectSubscription() .then(() -> sp1.onNext(1)) //empty window .expectNext(Signal.complete()) .then(() -> sp1.onNext(2)) //empty window .expectNext(Signal.complete()) .then(() -> sp1.onNext(3)) //window opens .expectNext(Signal.next(3)) .then(() -> sp1.onNext(4)) //previous window closes, new (empty) window .expectNext(Signal.complete()) .then(() -> sp1.onNext(5)) //fails, the empty window receives onError //error in the window: .expectNextMatches(signalErrorMessage("predicate failure")) .expectErrorMessage("predicate failure") .verify(Duration.ofMillis(100)); assertThat(sp1.hasDownstreams()).isFalse(); } @Test public void whileRequestOneByOne() { StepVerifier.create(Flux.just("red", "green", "#", "orange", "blue", "#", "black", "white") .hide() .windowWhile(color -> !color.equals("#")) .flatMap(w -> w, 1), 1) .expectNext("red") .thenRequest(1) .expectNext("green") .thenRequest(1) .expectNext("orange") .thenRequest(1) .expectNext("blue") .thenRequest(1) .expectNext("black") .thenRequest(1) .expectNext("white") .thenRequest(1) .verifyComplete(); } @Test public void mismatchAtBeginningUntil() { StepVerifier.create(Flux.just("#", "red", "green") .windowUntil(s -> s.equals("#")) .flatMap(Flux::materialize) .map(sig -> sig.isOnComplete() ? "END" : sig.get())) .expectNext("#", "END") .expectNext("red", "green", "END") .verifyComplete(); } @Test public void mismatchAtBeginningUntilCutBefore() { StepVerifier.create(Flux.just("#", "red", "green") .windowUntil(s -> s.equals("#"), true) .flatMap(Flux::materialize) .map(sig -> sig.isOnComplete() ? "END" : sig.get())) .expectNext("END") .expectNext("#", "red", "green", "END") .verifyComplete(); } @Test public void mismatchAtBeginningWhile() { StepVerifier.create(Flux.just("#", "red", "green") .windowWhile(s -> !s.equals("#")) .flatMap(Flux::materialize) .map(sig -> sig.isOnComplete() ? "END" : sig.get())) .expectNext("END") .expectNext("red", "green", "END") .verifyComplete(); } @Test public void innerCancellationCancelsMainSequence() { StepVerifier.create(Flux.just("red", "green", "#", "black", "white") .log() .windowWhile(s -> !s.equals("#")) .flatMap(w -> w.take(1))) .expectNext("red") .thenCancel() .verify(); } @Test public void prefetchIntegerMaxIsRequestUnboundedUntil() { TestPublisher<?> tp = TestPublisher.create(); tp.flux().windowUntil(s -> true, true, Integer.MAX_VALUE).subscribe(); tp.assertMinRequested(Long.MAX_VALUE); } @Test public void prefetchIntegerMaxIsRequestUnboundedWhile() { TestPublisher<?> tp = TestPublisher.create(); tp.flux().windowWhile(s -> true, Integer.MAX_VALUE).subscribe(); tp.assertMinRequested(Long.MAX_VALUE); } @Test public void manualRequestWindowUntilOverRequestingSourceByPrefetch() { AtomicLong req = new AtomicLong(); int prefetch = 4; Flux<Integer> source = Flux.range(1, 20) .doOnRequest(req::addAndGet) .log("source", Level.FINE) .hide(); StepVerifier.create(source.windowUntil(i -> i % 5 == 0, false, prefetch) .concatMap(w -> w, 1) .log("downstream", Level.FINE), 0) .thenRequest(2) .expectNext(1, 2) .thenRequest(6) .expectNext(3, 4, 5, 6, 7, 8) .expectNoEvent(Duration.ofMillis(100)) .thenCancel() .verify(); assertThat(req.get()).isEqualTo(8 + prefetch); } @Test public void manualRequestWindowWhileOverRequestingSourceByPrefetch() { AtomicLong req = new AtomicLong(); int prefetch = 4; Flux<Integer> source = Flux.range(1, 20) .doOnRequest(req::addAndGet) .log("source", Level.FINE) .hide(); StepVerifier.create(source.windowWhile(i -> i % 5 != 0, prefetch) .concatMap(w -> w.log("window", Level.FINE), 1) .log("downstream", Level.FINE), 0) .thenRequest(2) .expectNext(1, 2) .thenRequest(6) .expectNext(3, 4, 6, 7, 8, 9) .expectNoEvent(Duration.ofMillis(100)) .thenCancel() .verify(); assertThat(req.get()).isEqualTo(12 + prefetch); //9 forwarded elements, 2 // delimiters, 1 cancel and prefetch } // see https://github.com/reactor/reactor-core/issues/477 @Test public void windowWhileOneByOneStartingDelimiterReplenishes() { AtomicLong req = new AtomicLong(); Flux<String> source = Flux.just("#", "1A", "1B", "1C", "#", "2A", "2B", "2C", "2D", "#", "3A").hide(); StepVerifier.create( source .doOnRequest(req::addAndGet) .log("source", Level.FINE) .windowWhile(s -> !"#".equals(s), 2) .log("windowWhile", Level.FINE) .concatMap(w -> w.collectList() .log("window", Level.FINE) , 1) .log("downstream", Level.FINE) , StepVerifierOptions.create().checkUnderRequesting(false).initialRequest(1)) .expectNextMatches(List::isEmpty) .thenRequest(1) .assertNext(l -> assertThat(l).containsExactly("1A", "1B", "1C")) .thenRequest(1) .assertNext(l -> assertThat(l).containsExactly("2A", "2B", "2C", "2D")) .thenRequest(1) .assertNext(l -> assertThat(l).containsExactly("3A")) .expectComplete() .verify(Duration.ofSeconds(1)); assertThat(req.get()).isEqualTo(13); //11 elements + the prefetch } // see https://github.com/reactor/reactor-core/issues/477 @Test public void windowWhileUnboundedStartingDelimiterReplenishes() { AtomicLong req = new AtomicLong(); Flux<String> source = Flux.just("#", "1A", "1B", "1C", "#", "2A", "2B", "2C", "2D", "#", "3A").hide(); StepVerifier.create( source .doOnRequest(req::addAndGet) .log("source", Level.FINE) .windowWhile(s -> !"#".equals(s), 2) .log("windowWhile", Level.FINE) .concatMap(w -> w.collectList() .log("window", Level.FINE) , 1) .log("downstream", Level.FINE) ) .expectNextMatches(List::isEmpty) .assertNext(l -> assertThat(l).containsExactly("1A", "1B", "1C")) .assertNext(l -> assertThat(l).containsExactly("2A", "2B", "2C", "2D")) .assertNext(l -> assertThat(l).containsExactly("3A")) .expectComplete() .verify(Duration.ofSeconds(1)); assertThat(req.get()).isEqualTo(13); //11 elements + the prefetch } @Test public void windowUntilUnboundedStartingDelimiterReplenishes() { AtomicLong req = new AtomicLong(); Flux<String> source = Flux.just("#", "1A", "1B", "1C", "#", "2A", "2B", "2C", "2D", "#", "3A").hide(); StepVerifier.create( source .doOnRequest(req::addAndGet) .log("source", Level.FINE) .windowUntil(s -> "#".equals(s), false, 2) .log("windowUntil", Level.FINE) .concatMap(w -> w.collectList() .log("window", Level.FINE) , 1) .log("downstream", Level.FINE) ) .assertNext(l -> assertThat(l).containsExactly("#")) .assertNext(l -> assertThat(l).containsExactly("1A", "1B", "1C", "#")) .assertNext(l -> assertThat(l).containsExactly("2A", "2B", "2C", "2D", "#")) .assertNext(l -> assertThat(l).containsExactly("3A")) .expectComplete() .verify(Duration.ofSeconds(1)); assertThat(req.get()).isEqualTo(13); //11 elements + the prefetch } @Test public void scanMainSubscriber() { CoreSubscriber<Flux<Integer>> actual = new LambdaSubscriber<>(null, e -> {}, null, null); FluxWindowPredicate.WindowPredicateMain<Integer> test = new FluxWindowPredicate.WindowPredicateMain<>(actual, Queues.<Flux<Integer>>unbounded().get(), Queues.unbounded(), 123, i -> true, Mode.WHILE); Subscription parent = Operators.emptySubscription(); test.onSubscribe(parent); Assertions.assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent); Assertions.assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual); Assertions.assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(123); test.requested = 35; Assertions.assertThat(test.scan(Scannable.Attr.REQUESTED_FROM_DOWNSTREAM)).isEqualTo(35); test.queue.offer(Flux.just(1).groupBy(i -> i).blockFirst()); Assertions.assertThat(test.scan(Scannable.Attr.BUFFERED)).isEqualTo(1); Assertions.assertThat(test.scan(Scannable.Attr.ERROR)).isNull(); test.error = new IllegalStateException("boom"); Assertions.assertThat(test.scan(Scannable.Attr.ERROR)).hasMessage("boom"); Assertions.assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse(); test.onComplete(); Assertions.assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue(); Assertions.assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse(); test.cancel(); Assertions.assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue(); } @Test public void scanOtherSubscriber() { CoreSubscriber<Flux<Integer>> actual = new LambdaSubscriber<>(null, e -> {}, null, null); FluxWindowPredicate.WindowPredicateMain<Integer> main = new FluxWindowPredicate.WindowPredicateMain<>(actual, Queues.<Flux<Integer>>unbounded().get(), Queues.unbounded(), 123, i -> true, Mode.WHILE); FluxWindowPredicate.WindowFlux<Integer> test = new FluxWindowPredicate.WindowFlux<>( Queues.<Integer>unbounded().get(), main); Subscription parent = Operators.emptySubscription(); test.onSubscribe(parent); Assertions.assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(main); Assertions.assertThat(test.scan(Scannable.Attr.ACTUAL)).isNull(); // RS: TODO Need to make actual non-null test.requested = 35; Assertions.assertThat(test.scan(Scannable.Attr.REQUESTED_FROM_DOWNSTREAM)).isEqualTo(35); test.queue.offer(27); Assertions.assertThat(test.scan(Scannable.Attr.BUFFERED)).isEqualTo(1); Assertions.assertThat(test.scan(Scannable.Attr.ERROR)).isNull(); test.error = new IllegalStateException("boom"); Assertions.assertThat(test.scan(Scannable.Attr.ERROR)).hasMessage("boom"); Assertions.assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse(); test.onComplete(); Assertions.assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue(); Assertions.assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse(); test.cancel(); Assertions.assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue(); } }
apache-2.0
yauritux/venice-legacy
Venice/Venice-Interface-Model/src/main/java/com/gdn/venice/persistence/KpiPartyTarget.java
2708
package com.gdn.venice.persistence; import java.io.Serializable; import javax.persistence.*; /** * The persistent class for the kpi_party_target database table. * */ @Entity @Table(name="kpi_party_target") public class KpiPartyTarget implements Serializable { private static final long serialVersionUID = 1L; //@EmbeddedId //private KpiPartyTargetPK id; //add new methode to create Id for KpiPartyTarget table by arifin @Id @GeneratedValue(strategy=GenerationType.TABLE, generator="kpi_party_target") @TableGenerator(name="kpi_party_target", table="openjpaseq", pkColumnName="id", valueColumnName="sequence_value", allocationSize=1) //flush every 1 insert @Column(name="kpi_party_target_id", unique=true, nullable=false) private Long kpiPartyTargetId; @Column(name="kpi_target_value", nullable=false) private Integer kpiTargetValue; //add new methode to create relation by arifin //bi-directional many-to-one association to KpiKeyPerformanceIndicator @ManyToOne @JoinColumn(name="kpi_id", nullable=false)//, insertable=false, updatable=false) private KpiKeyPerformanceIndicator kpiKeyPerformanceIndicator; //add new methode to create relation by arifin //bi-directional many-to-one association to KpiPartySla @ManyToOne @JoinColumn(name="party_sla_id", nullable=false)//, insertable=false, updatable=false) private KpiPartySla kpiPartySla; //add new methode to create relation by arifin //bi-directional many-to-one association to KpiTargetBaseline @ManyToOne @JoinColumn(name="target_baseline_id", nullable=false)//, insertable=false, updatable=false) private KpiTargetBaseline kpiTargetBaseline; public KpiPartyTarget() { } public Long getKpiPartyTargetId() { return this.kpiPartyTargetId; } public void setKpiPartyTargetId(Long kpiPartyTargetId) { this.kpiPartyTargetId = kpiPartyTargetId; } public Integer getKpiTargetValue() { return this.kpiTargetValue; } public void setKpiTargetValue(Integer kpiTargetValue) { this.kpiTargetValue = kpiTargetValue; } public KpiKeyPerformanceIndicator getKpiKeyPerformanceIndicator() { return this.kpiKeyPerformanceIndicator; } public void setKpiKeyPerformanceIndicator(KpiKeyPerformanceIndicator kpiKeyPerformanceIndicator) { this.kpiKeyPerformanceIndicator = kpiKeyPerformanceIndicator; } public KpiPartySla getKpiPartySla() { return this.kpiPartySla; } public void setKpiPartySla(KpiPartySla kpiPartySla) { this.kpiPartySla = kpiPartySla; } public KpiTargetBaseline getKpiTargetBaseline() { return this.kpiTargetBaseline; } public void setKpiTargetBaseline(KpiTargetBaseline kpiTargetBaseline) { this.kpiTargetBaseline = kpiTargetBaseline; } }
apache-2.0
jasjisdo/spark-newsreel-recommender
metarecommender/src/test/java/de/dailab/newsreel/recommender/metarecommender/util/UpdateableTreeSetTest.java
1847
//package de.dailab.newsreel.recommender.metarecommender.util; // //import de.dailab.newsreel.recommender.common.util.UpdateableTreeSet; //import de.dailab.newsreel.recommender.metarecommender.rank.RecommenderRank; //import org.junit.Before; //import org.junit.Test; // //import java.util.HashMap; //import java.util.Map; //import java.util.SortedSet; // //import static org.junit.Assert.assertEquals; //import static org.junit.Assert.assertTrue; // ///** // * Created by domann on 21.12.15. // */ //public class UpdateableTreeSetTest { // // private static UpdateableTreeSet<RecommenderRank> ranking; // // @Before // public void setUp() throws Exception { // ranking = new UpdateableTreeSet<RecommenderRank>(); // ranking.add(new RecommenderRank("CF", 10)); // ranking.add(new RecommenderRank("IBF", 5)); // ranking.add(new RecommenderRank("MP", 13)); // } // // @Test // public void nameUnique() { // assertEquals(3, ranking.size()); // ranking.add(new RecommenderRank("CF", 9)); // assertEquals(3, ranking.size()); // } // // @Test // public void initialRanking() { // assertEquals("MP", ranking.first().getRecommenderName()); // assertEquals("IBF", ranking.last().getRecommenderName()); // } // // private boolean isSortOrderOk(SortedSet<RecommenderRank> ranking) { // // Check total sort order by comparing pairs of neighbours // RecommenderRank previousMedalCount = null; // for (RecommenderRank currentMedalCount : ranking) { // if (previousMedalCount != null && previousMedalCount.compareTo(currentMedalCount) > 0) // return false; // previousMedalCount = currentMedalCount; // } // return true; // } // // @Test // public void bulkUpdate() { // // } // //}
apache-2.0
ahxm/cloud
cloud-util/src/main/java/registry/util/SystemClock.java
2073
package registry.util; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; /** * Created by anzhen on 2016/5/3. */ public class SystemClock { private static final SystemClock instance = new SystemClock(); private final long precision; private final AtomicLong now; private ScheduledExecutorService scheduledExecutorService; public static SystemClock getInstance() { return instance; } public SystemClock() { this(1L); } public long now() { return now.get(); } public long precision() { return precision; } public SystemClock(long precision) { this.precision = precision; now = new AtomicLong(System.currentTimeMillis()); scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread thread = new Thread(r,"System_Clock"); thread.setDaemon(true); return thread; } }); scheduledExecutorService.scheduleAtFixedRate(new Timer(now),precision,precision, TimeUnit.MILLISECONDS); } protected class Timer implements Runnable{ private final AtomicLong now; public Timer(AtomicLong now) { this.now = now; } /** * When an object implementing interface <code>Runnable</code> is used * to create a thread, starting the thread causes the object's * <code>run</code> method to be called in that separately executing * thread. * <p/> * The general contract of the method <code>run</code> is that it may * take any action whatsoever. * * @see Thread#run() */ @Override public void run() { now.set(System.currentTimeMillis()); } } }
apache-2.0
liuzedong/learning
my-jdk8/src/main/java/com/dongdongxia/my/jdk8/uitl/SampleRB.java
654
package com.dongdongxia.my.jdk8.uitl; import java.util.ListResourceBundle; /** * * <P> * Description: 中文默认资源包 * </P> * * @ClassName: SampleRB * @author java_liudong@163.com 2017年11月15日 上午10:46:14 */ public class SampleRB extends ListResourceBundle { @Override protected Object[][] getContents() { Object[][] resources = new Object[3][2]; resources[0][0] = "title"; resources[0][1] = "我的程序"; resources[1][0] = "StopText"; resources[1][1] = "停止"; resources[2][0] = "StartText"; resources[2][1] = "开始"; return resources; } }
apache-2.0
nirima/jDocker
src/main/java/com/nirima/docker/api/MiscClient.java
6709
package com.nirima.docker.api; import com.nirima.docker.client.model.Event; import com.nirima.docker.client.model.IdResponse; import com.nirima.docker.client.model.Info; import com.nirima.docker.client.model.Version; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.InputStream; /** * @author Nigel Magnay [nigel.magnay@gmail.com] */ public interface MiscClient { /* 2.3 Misc Build an image from Dockerfile via stdin POST /build Build an image from Dockerfile via stdin Example request: POST /build HTTP/1.1 {{ STREAM }} Example response: HTTP/1.1 200 OK Content-Type: application/json {"stream":"Step 1..."} {"stream":"..."} {"error":"Error...", "errorDetail":{"code": 123, "message": "Error..."}} The stream must be a tar archive compressed with one of the following algorithms: identity (no compression), gzip, bzip2, xz. The archive must include a file called Dockerfile at its root. It may include any number of other files, which will be accessible in the build context (See the ADD build command). Query Parameters: t – repository name (and optionally a tag) to be applied to the resulting image in case of success q – suppress verbose build output nocache – do not use the cache when building the image Request Headers: Content-type – should be set to "application/tar". X-Registry-Auth – base64-encoded AuthConfig object Status Codes: 200 – no error 500 – server error Check auth configuration */ @POST @Path("/build") @Consumes("application/tar") @Produces(MediaType.TEXT_PLAIN) InputStream build(@QueryParam("t")String repositoryNameAndTag, @QueryParam("q")boolean supressVerboseOutput, @QueryParam("nocache")boolean suppressCache, @QueryParam("rm")boolean removeIntermediateContainers, @QueryParam("forcerm")boolean alwaysRemoveIntermediateContainers, InputStream content); /* POST/auth Get the default username and email Example request: POST /auth HTTP/1.1 Content-Type: application/json { "username":"hannibal", "password:"xxxx", "email":"hannibal@a-team.com", "serveraddress":"https://index.docker.io/v1/" } Example response: HTTP/1.1 200 OK Status Codes: 200 – no error 204 – no error 500 – server error Display system-wide information */ @POST @Path("/auth") Void auth(); /* GET /info Display system-wide information Example request: GET /info HTTP/1.1 Example response: HTTP/1.1 200 OK Content-Type: application/json { "Containers":11, "Images":16, "Debug":false, "NFd": 11, "NGoroutines":21, "MemoryLimit":true, "SwapLimit":false, "IPv4Forwarding":true } Status Codes: 200 – no error 500 – server error Show the docker version information */ @GET @Path("/info") Info info(); /* GET /version Show the docker version information Example request: GET /version HTTP/1.1 Example response: HTTP/1.1 200 OK Content-Type: application/json { "Version":"0.2.2", "GitCommit":"5a2a5cc+CHANGES", "GoVersion":"go1.0.3" } Status Codes: 200 – no error 500 – server error */ @GET @Path("/version") Version version(); /* Create a new image from a container’s changes POST /commit Create a new image from a container’s changes Example request: POST /commit?container=44c004db4b17&m=message&repo=myrepo HTTP/1.1 Example response: HTTP/1.1 201 OK Content-Type: application/vnd.docker.raw-stream {"Id":"596069db4bf5"} Query Parameters: container – source container repo – repository tag – tag m – commit message author – author (eg. “John Hannibal Smith <hannibal@a-team.com>”) run – config automatically applied when the image is run. (ex: {“Cmd”: [“cat”, “/world”], “PortSpecs”:[“22”]}) Status Codes: 201 – no error 404 – no such container 500 – server error Monitor Docker’s events */ // params.add("container", commitConfig.getContainer()); // params.add("repo", commitConfig.getRepo()); // params.add("tag", commitConfig.getTag()); // params.add("m", commitConfig.getMessage()); // params.add("author", commitConfig.getAuthor()); // params.add("run", commitConfig.getRun()); @POST @Path("/commit") IdResponse commit(@QueryParam("container")String container, @QueryParam("m") String commitMessage, @QueryParam("repo") String repository, @QueryParam("tag") String tag, @QueryParam("author") String author, @QueryParam("run") String run ); /* GET /events Get events from docker, either in real time via streaming, or via polling (using since) Example request: GET /events?since=1374067924 Example response: HTTP/1.1 200 OK Content-Type: application/json {"status":"create","id":"dfdf82bd3881","from":"base:latest","time":1374067924} {"status":"start","id":"dfdf82bd3881","from":"base:latest","time":1374067924} {"status":"stop","id":"dfdf82bd3881","from":"base:latest","time":1374067966} {"status":"destroy","id":"dfdf82bd3881","from":"base:latest","time":1374067970} Query Parameters: since – timestamp used for polling Status Codes: 200 – no error 500 – server error Get a tarball containing all images and tags in a repository */ /** * Monitor Docker's events * @param since timestamp used for polling * @param until timestamp used for polling * @return stream of events */ @GET @Path("/events") Response events(@QueryParam("since") Long since, @QueryParam("since") Long until); /* GET /images/(name)/list Get a tarball containing all images and metadata for the repository specified by name. Example request GET /images/ubuntu/list **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Type: application/x-tar Binary data stream :statuscode 200: no error :statuscode 500: server error Load a tarball with a set of images and tags into docker */ @GET @Path("/images/{name}/list") byte[] getTarball(@PathParam("name")String name); /* POST /images/load Load a set of images and tags into the docker repository. Example request POST /images/load Tarball in body **Example response**: .. sourcecode:: http HTTP/1.1 200 OK :statuscode 200: no error :statuscode 500: server error */ @POST @Path("/images/load") Void postTarball(byte[] data); /** * Ping the docker server. * @since 1.11 */ @GET @Path("/_ping") String ping(); }
apache-2.0
NationalSecurityAgency/ghidra
Ghidra/Features/Base/src/main/java/ghidra/app/util/bin/format/dwarf4/encoding/DWARFEndianity.java
1739
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.util.bin.format.dwarf4.encoding; import ghidra.app.util.bin.format.dwarf4.DWARFUtil; /** * DWARF Endianity consts from www.dwarfstd.org/doc/DWARF4.pdf */ public final class DWARFEndianity { public static final int DW_END_default = 0x0; public static final int DW_END_big = 0x1; public static final int DW_END_little = 0x2; public static final int DW_END_lo_user = 0x40; public static final int DW_END_hi_user = 0xff; /** * Get the endianity given a DWARFEndianity value. * @param endian DWARFEndianity value to check * @param defaultisBigEndian true if by default is big endian and false otherwise * @return true if big endian and false if little endian * @throws IllegalArgumentException if an unknown endian value is given */ public static boolean getEndianity(long endian, boolean defaultisBigEndian) { switch ((int) endian) { case DW_END_default: return defaultisBigEndian; case DW_END_big: return true; case DW_END_little: return false; default: throw new IllegalArgumentException( "Unhandled endian type: " + DWARFUtil.toString(DWARFEndianity.class, endian)); } } }
apache-2.0
clovertrail/rocket
java/Azure-Storage-Test/src/main/java/net/local/test/AccessStorage.java
12126
package net.local.test; import java.net.URI; import java.net.URISyntaxException; import java.security.InvalidKeyException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.StringTokenizer; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.log4j.Logger; import com.microsoft.azure.storage.CloudStorageAccount; import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.blob.BlockEntry; import com.microsoft.azure.storage.blob.CloudAppendBlob; import com.microsoft.azure.storage.blob.CloudBlob; import com.microsoft.azure.storage.blob.CloudBlobClient; import com.microsoft.azure.storage.blob.CloudBlobContainer; import com.microsoft.azure.storage.blob.CloudBlockBlob; import com.microsoft.azure.storage.blob.CloudPageBlob; import com.microsoft.azure.storage.blob.ListBlobItem; import com.microsoft.azure.storage.blob.PageRange; class AzureBlobStorage { String name; long capacity; long size; AzureBlobStorage() { capacity = size = 0; } } public class AccessStorage { final static Logger logger = Logger.getLogger(AccessStorage.class); static String gOnBlobUri = null; private static String humanReadableByteCount(long bytes, boolean si) { int unit = si ? 1000 : 1024; if (bytes < unit) return bytes + " B"; int exp = (int) (Math.log(bytes) / Math.log(unit)); String pre = (si ? "kMGTPE" : "KMGTPE").charAt(exp-1) + (si ? "" : "i"); return String.format("%.1f %sB", bytes / Math.pow(unit, exp), pre); } public static String convertByte2HumanReadable(long number) { return humanReadableByteCount(number, false); //return FileUtils.byteCountToDisplaySize(number); } public static AzureBlobStorage getBlobUsage(ListBlobItem listBlobItem, String expecteBlob) { final String systemInUseError = "SystemInUse"; final String snapRateExceeded = "SnaphotOperationRateExceeded"; boolean needSnapshot = false; boolean needWait = false; int waitTrylimit = 5; long blobSize = 0; AzureBlobStorage result = new AzureBlobStorage(); if (listBlobItem instanceof CloudPageBlob) { CloudPageBlob cpb = (CloudPageBlob)listBlobItem; result.capacity = cpb.getProperties().getLength(); try { result.name = cpb.getName(); } catch (URISyntaxException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } List<PageRange> pageRangeList = null; try { pageRangeList = cpb.downloadPageRanges(); for (PageRange pr : pageRangeList) { blobSize += pr.getEndOffset() - pr.getStartOffset() + 12; } } catch (StorageException se) { String exception = se.getErrorCode() + ":" + se.getLocalizedMessage(); logger.info(exception); if (systemInUseError.equals(se.getErrorCode())) { // if the storage is in use, we need to take snapshot. // This is by design. needSnapshot = true; } } if (needSnapshot) { int tryNum = 0; do { try { CloudBlob cbSnap = cpb.createSnapshot(); CloudPageBlob cpbSnap = (CloudPageBlob)cbSnap; pageRangeList = cpbSnap.downloadPageRanges(); for (PageRange pr : pageRangeList) { blobSize += pr.getEndOffset() - pr.getStartOffset() + 12; } needWait = false; } catch (StorageException se) { String exception = se.getErrorCode() + ":" + se.getLocalizedMessage(); System.out.println(exception); logger.info(exception); if (snapRateExceeded.equals(se.getErrorCode())) { // handle 'SnaphotOperationRateExceeded' // with error message 'The rate of snapshot blob calls is exceeded' // Azure does not allow to snapshot frequently. // For premium storage, the rate of snapshot is ~2min. needWait = true; } } if (needWait) { try { logger.info("Wait for next snapshot"); Thread.currentThread().sleep(60 * 1000); // wait for 1min } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } tryNum++; } while (tryNum < waitTrylimit && needWait); } result.size = blobSize; } else if (listBlobItem instanceof CloudBlockBlob) { CloudBlockBlob cbb = (CloudBlockBlob)listBlobItem; try { result.name = cbb.getName(); } catch (URISyntaxException e) { // TODO Auto-generated catch block e.printStackTrace(); } try { ArrayList<BlockEntry> blockEntryList = cbb.downloadBlockList(); for (BlockEntry be : blockEntryList) { blobSize += be.getSize(); } } catch (StorageException e) { // TODO Auto-generated catch block e.printStackTrace(); } result.size = blobSize; } else if (listBlobItem instanceof CloudAppendBlob) { //TODO CloudAppendBlob cab = (CloudAppendBlob)listBlobItem; try { result.name = cab.getName(); } catch (URISyntaxException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.info("CloudAppendBlob"); } return result; } public static void storageUsage(final String storageConnectionString, String expectedBlob) throws InvalidKeyException, URISyntaxException, StorageException { long blobCapacity = 0; long blobUsage = 0; boolean matched = false; String expectedContainer = null, expectedBlobName = null; if (expectedBlob != null) { URI uri = new URI(expectedBlob); StringTokenizer st = new StringTokenizer(uri.getPath(), "/"); expectedContainer = st.nextToken(); expectedBlobName = st.nextToken(); } CloudStorageAccount storageAccount = CloudStorageAccount.parse(storageConnectionString); CloudBlobClient cbc = storageAccount.createCloudBlobClient(); Iterable<CloudBlobContainer> cIter = cbc.listContainers(); Iterator<CloudBlobContainer> iter = cIter.iterator(); while (iter.hasNext()) { CloudBlobContainer container = iter.next(); String name = container.getName(); if (expectedContainer != null && !expectedContainer.equals(name)) { continue; } /* * Blob Containers: The following is how to estimate the amount * of storage consumed per blob container: * 48 bytes + Len(ContainerName) * 2 bytes + * For-Each Metadata[3 bytes + Len(MetadataName) + Len(Value)] + * For-Each Signed Identifier[512 bytes] * * The following is the breakdown: * 1) 48 bytes of overhead for each container includes the Last * Modified Time, Permissions, Public Settings, and some system metadata. * 2) the container name is stored as Unicode so take the * number of characters and multiply by 2. * 3) or each blob container metadata stored, we store the * length of the name (stored as ASCII), plus the length of the string value. * 4) the 512 bytes per Signed Identifier includes signed identifier name, * start time, expiry time and permissions. */ long containerSize = 48 + name.length() * 2; long metaSize = 0; HashMap<String, String> metaData = container.getMetadata(); Set<String> keys = metaData.keySet(); for (Iterator<String> kIter = keys.iterator(); kIter.hasNext();) { String key = kIter.next(); metaSize += 3 + key.length() + metaData.get(key).length(); } long blobSize = 0; Iterable<ListBlobItem> blobList = container.listBlobs(); Iterator<ListBlobItem> blobIter = blobList.iterator(); while (blobIter.hasNext()) { ListBlobItem lbi = blobIter.next(); if (expectedBlobName != null) { boolean find = false; if (lbi instanceof CloudPageBlob) { if (((CloudPageBlob)lbi).getName().equals(expectedBlobName)) { find = true; } } else if (lbi instanceof CloudBlockBlob) { if (((CloudBlockBlob)lbi).getName().equals(expectedBlobName)) { find = true; } } if (!find) { continue; } else { AzureBlobStorage as = getBlobUsage(lbi, expectedBlob); blobCapacity = as.capacity; blobUsage = as.size; matched = true; } } else { AzureBlobStorage as = getBlobUsage(lbi, expectedBlob); blobSize += as.size; if (expectedBlob == null) { String result = "container '" + name + "' blob '" + as.name + "' capacity is " + convertByte2HumanReadable(as.capacity) + " (" + as.capacity + ")" + ", used size is " + convertByte2HumanReadable(as.size) + " (" + as.size + ")"; logger.info(result); } } } containerSize += metaSize + blobSize; if (expectedBlob == null) { String containerUsedSize = "container '" + name + "' size is " + convertByte2HumanReadable(containerSize) + " (" + containerSize + ")"; logger.info(containerUsedSize); } else if (matched) { String result = "Processing " + expectedBlob; logger.info(result); result = "Size: " + convertByte2HumanReadable(blobCapacity) + " (" + blobCapacity + ")"; logger.info(result); result = "BillingSize: " + convertByte2HumanReadable(blobUsage) + " (" + blobUsage + ")"; logger.info(result); break; } } } public static String parseConnectString(String args[]) { final String storageAccount = "storageAccount"; final String storagePrimaryKey = "storagePrimaryKey"; final String onMooncake = "onMooncake"; final String blobUriPara = "blobUri"; Options options = new Options(); Option input = new Option("n", storageAccount, true, "input storage account"); input.setRequired(true); options.addOption(input); Option primaryKey = new Option("k", storagePrimaryKey, true, "input the storage primary key"); primaryKey.setRequired(true); options.addOption(primaryKey); Option chinacloud = new Option("c", onMooncake, true, "true: run on Mooncake (azure china), false: not on Mooncake"); chinacloud.setRequired(true); options.addOption(chinacloud); Option blobUri = new Option("u", blobUriPara, true, "Optional: only return the usage for your specified blob URI"); blobUri.setRequired(false); options.addOption(blobUri); CommandLineParser parser = new DefaultParser(); HelpFormatter formatter = new HelpFormatter(); CommandLine cmd; try { cmd = parser.parse(options, args); } catch (ParseException e) { System.out.println(e.getMessage()); formatter.printHelp("StorageUsage", options); System.exit(1); return null; } String storageAccountValue = cmd.getOptionValue(storageAccount); String storagePrimaryKeyValue = cmd.getOptionValue(storagePrimaryKey); String onMooncakeCloud = cmd.getOptionValue(onMooncake); gOnBlobUri = cmd.getOptionValue(blobUriPara); StringBuilder sb = new StringBuilder(); if (onMooncakeCloud != null && Boolean.valueOf(onMooncakeCloud)) { // Mooncake can not be accessed through https because // the server CA was not in JDK's CA trust chain sb.append("DefaultEndpointsProtocol=http;") .append("AccountName=").append(storageAccountValue) .append(";AccountKey=").append(storagePrimaryKeyValue); sb.append(";EndpointSuffix=core.chinacloudapi.cn"); } else { sb.append("DefaultEndpointsProtocol=https;") .append("AccountName=").append(storageAccountValue) .append(";AccountKey=").append(storagePrimaryKeyValue); } String ret = sb.toString(); logger.info(ret); return ret; } public static void main(String args[]) throws InvalidKeyException, URISyntaxException, StorageException { String connectStr = parseConnectString(args); storageUsage(connectStr, gOnBlobUri); } }
apache-2.0
zhe-thoughts/hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
50739
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.PrintStream; import java.io.PrintWriter; import java.io.RandomAccessFile; import java.io.StringWriter; import java.io.Writer; import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.channels.FileChannel; import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.google.common.collect.Sets; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSInputStream; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager; import org.apache.hadoop.hdfs.server.namenode.NamenodeFsck.Result; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.hdfs.tools.DFSck; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ToolRunner; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.apache.log4j.RollingFileAppender; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * A JUnit test for doing fsck */ public class TestFsck { static final String auditLogFile = System.getProperty("test.build.dir", "build/test") + "/TestFsck-audit.log"; // Pattern for: // allowed=true ugi=name ip=/address cmd=FSCK src=/ dst=null perm=null static final Pattern fsckPattern = Pattern.compile( "allowed=.*?\\s" + "ugi=.*?\\s" + "ip=/\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\s" + "cmd=fsck\\ssrc=\\/\\sdst=null\\s" + "perm=null\\s" + "proto=.*"); static final Pattern getfileinfoPattern = Pattern.compile( "allowed=.*?\\s" + "ugi=.*?\\s" + "ip=/\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\s" + "cmd=getfileinfo\\ssrc=\\/\\sdst=null\\s" + "perm=null\\s" + "proto=.*"); static final Pattern numCorruptBlocksPattern = Pattern.compile( ".*Corrupt blocks:\t\t([0123456789]*).*"); private static final String LINE_SEPARATOR = System.getProperty("line.separator"); static String runFsck(Configuration conf, int expectedErrCode, boolean checkErrorCode,String... path) throws Exception { ByteArrayOutputStream bStream = new ByteArrayOutputStream(); PrintStream out = new PrintStream(bStream, true); ((Log4JLogger)FSPermissionChecker.LOG).getLogger().setLevel(Level.ALL); int errCode = ToolRunner.run(new DFSck(conf, out), path); if (checkErrorCode) { assertEquals(expectedErrCode, errCode); } ((Log4JLogger)FSPermissionChecker.LOG).getLogger().setLevel(Level.INFO); FSImage.LOG.error("OUTPUT = " + bStream.toString()); return bStream.toString(); } /** do fsck */ @Test public void testFsck() throws Exception { DFSTestUtil util = new DFSTestUtil.Builder().setName("TestFsck"). setNumFiles(20).build(); MiniDFSCluster cluster = null; FileSystem fs = null; try { Configuration conf = new HdfsConfiguration(); final long precision = 1L; conf.setLong(DFSConfigKeys.DFS_NAMENODE_ACCESSTIME_PRECISION_KEY, precision); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build(); fs = cluster.getFileSystem(); final String fileName = "/srcdat"; util.createFiles(fs, fileName); util.waitReplication(fs, fileName, (short)3); final Path file = new Path(fileName); long aTime = fs.getFileStatus(file).getAccessTime(); Thread.sleep(precision); setupAuditLogs(); String outStr = runFsck(conf, 0, true, "/"); verifyAuditLogs(); assertEquals(aTime, fs.getFileStatus(file).getAccessTime()); System.out.println(outStr); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); if (fs != null) {try{fs.close();} catch(Exception e){}} cluster.shutdown(); // restart the cluster; bring up namenode but not the data nodes cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(0).format(false).build(); outStr = runFsck(conf, 1, true, "/"); // expect the result is corrupt assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS)); System.out.println(outStr); // bring up data nodes & cleanup cluster cluster.startDataNodes(conf, 4, true, null, null); cluster.waitActive(); cluster.waitClusterUp(); fs = cluster.getFileSystem(); util.cleanup(fs, "/srcdat"); } finally { if (fs != null) {try{fs.close();} catch(Exception e){}} if (cluster != null) { cluster.shutdown(); } } } /** Sets up log4j logger for auditlogs */ private void setupAuditLogs() throws IOException { File file = new File(auditLogFile); if (file.exists()) { file.delete(); } Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); logger.setLevel(Level.INFO); PatternLayout layout = new PatternLayout("%m%n"); RollingFileAppender appender = new RollingFileAppender(layout, auditLogFile); logger.addAppender(appender); } private void verifyAuditLogs() throws IOException { // Turn off the logs Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); logger.setLevel(Level.OFF); BufferedReader reader = null; try { // Audit log should contain one getfileinfo and one fsck reader = new BufferedReader(new FileReader(auditLogFile)); String line = reader.readLine(); assertNotNull(line); assertTrue("Expected getfileinfo event not found in audit log", getfileinfoPattern.matcher(line).matches()); line = reader.readLine(); assertNotNull(line); assertTrue("Expected fsck event not found in audit log", fsckPattern .matcher(line).matches()); assertNull("Unexpected event in audit log", reader.readLine()); } finally { // Close the reader and remove the appender to release the audit log file // handle after verifying the content of the file. if (reader != null) { reader.close(); } if (logger != null) { logger.removeAllAppenders(); } } } @Test public void testFsckNonExistent() throws Exception { DFSTestUtil util = new DFSTestUtil.Builder().setName("TestFsck"). setNumFiles(20).build(); MiniDFSCluster cluster = null; FileSystem fs = null; try { Configuration conf = new HdfsConfiguration(); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build(); fs = cluster.getFileSystem(); util.createFiles(fs, "/srcdat"); util.waitReplication(fs, "/srcdat", (short)3); String outStr = runFsck(conf, 0, true, "/non-existent"); assertEquals(-1, outStr.indexOf(NamenodeFsck.HEALTHY_STATUS)); System.out.println(outStr); util.cleanup(fs, "/srcdat"); } finally { if (fs != null) {try{fs.close();} catch(Exception e){}} if (cluster != null) { cluster.shutdown(); } } } /** Test fsck with permission set on inodes */ @Test public void testFsckPermission() throws Exception { final DFSTestUtil util = new DFSTestUtil.Builder(). setName(getClass().getSimpleName()).setNumFiles(20).build(); final Configuration conf = new HdfsConfiguration(); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L); MiniDFSCluster cluster = null; try { // Create a cluster with the current user, write some files cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build(); final MiniDFSCluster c2 = cluster; final String dir = "/dfsck"; final Path dirpath = new Path(dir); final FileSystem fs = c2.getFileSystem(); util.createFiles(fs, dir); util.waitReplication(fs, dir, (short) 3); fs.setPermission(dirpath, new FsPermission((short) 0700)); // run DFSck as another user, should fail with permission issue UserGroupInformation fakeUGI = UserGroupInformation.createUserForTesting( "ProbablyNotARealUserName", new String[] { "ShangriLa" }); fakeUGI.doAs(new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { System.out.println(runFsck(conf, -1, true, dir)); return null; } }); // set permission and try DFSck again as the fake user, should succeed fs.setPermission(dirpath, new FsPermission((short) 0777)); fakeUGI.doAs(new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { final String outStr = runFsck(conf, 0, true, dir); System.out.println(outStr); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); return null; } }); util.cleanup(fs, dir); } finally { if (cluster != null) { cluster.shutdown(); } } } @Test public void testFsckMove() throws Exception { Configuration conf = new HdfsConfiguration(); final int DFS_BLOCK_SIZE = 1024; final int NUM_DATANODES = 4; conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, DFS_BLOCK_SIZE); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L); conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1); DFSTestUtil util = new DFSTestUtil("TestFsck", 5, 3, (5 * DFS_BLOCK_SIZE) + (DFS_BLOCK_SIZE - 1), 5 * DFS_BLOCK_SIZE); MiniDFSCluster cluster = null; FileSystem fs = null; try { cluster = new MiniDFSCluster.Builder(conf). numDataNodes(NUM_DATANODES).build(); String topDir = "/srcdat"; fs = cluster.getFileSystem(); cluster.waitActive(); util.createFiles(fs, topDir); util.waitReplication(fs, topDir, (short)3); String outStr = runFsck(conf, 0, true, "/"); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); DFSClient dfsClient = new DFSClient(new InetSocketAddress("localhost", cluster.getNameNodePort()), conf); String fileNames[] = util.getFileNames(topDir); CorruptedTestFile ctFiles[] = new CorruptedTestFile[] { new CorruptedTestFile(fileNames[0], Sets.newHashSet(0), dfsClient, NUM_DATANODES, DFS_BLOCK_SIZE), new CorruptedTestFile(fileNames[1], Sets.newHashSet(2, 3), dfsClient, NUM_DATANODES, DFS_BLOCK_SIZE), new CorruptedTestFile(fileNames[2], Sets.newHashSet(4), dfsClient, NUM_DATANODES, DFS_BLOCK_SIZE), new CorruptedTestFile(fileNames[3], Sets.newHashSet(0, 1, 2, 3), dfsClient, NUM_DATANODES, DFS_BLOCK_SIZE), new CorruptedTestFile(fileNames[4], Sets.newHashSet(1, 2, 3, 4), dfsClient, NUM_DATANODES, DFS_BLOCK_SIZE) }; int totalMissingBlocks = 0; for (CorruptedTestFile ctFile : ctFiles) { totalMissingBlocks += ctFile.getTotalMissingBlocks(); } for (CorruptedTestFile ctFile : ctFiles) { ctFile.removeBlocks(cluster); } // Wait for fsck to discover all the missing blocks while (true) { outStr = runFsck(conf, 1, false, "/"); String numCorrupt = null; for (String line : outStr.split(LINE_SEPARATOR)) { Matcher m = numCorruptBlocksPattern.matcher(line); if (m.matches()) { numCorrupt = m.group(1); break; } } if (numCorrupt == null) { throw new IOException("failed to find number of corrupt " + "blocks in fsck output."); } if (numCorrupt.equals(Integer.toString(totalMissingBlocks))) { assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS)); break; } try { Thread.sleep(100); } catch (InterruptedException ignore) { } } // Copy the non-corrupt blocks of corruptFileName to lost+found. outStr = runFsck(conf, 1, false, "/", "-move"); assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS)); // Make sure that we properly copied the block files from the DataNodes // to lost+found for (CorruptedTestFile ctFile : ctFiles) { ctFile.checkSalvagedRemains(); } // Fix the filesystem by removing corruptFileName outStr = runFsck(conf, 1, true, "/", "-delete"); assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS)); // Check to make sure we have a healthy filesystem outStr = runFsck(conf, 0, true, "/"); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); util.cleanup(fs, topDir); } finally { if (fs != null) {try{fs.close();} catch(Exception e){}} if (cluster != null) { cluster.shutdown(); } } } static private class CorruptedTestFile { final private String name; final private Set<Integer> blocksToCorrupt; final private DFSClient dfsClient; final private int numDataNodes; final private int blockSize; final private byte[] initialContents; public CorruptedTestFile(String name, Set<Integer> blocksToCorrupt, DFSClient dfsClient, int numDataNodes, int blockSize) throws IOException { this.name = name; this.blocksToCorrupt = blocksToCorrupt; this.dfsClient = dfsClient; this.numDataNodes = numDataNodes; this.blockSize = blockSize; this.initialContents = cacheInitialContents(); } public int getTotalMissingBlocks() { return blocksToCorrupt.size(); } private byte[] cacheInitialContents() throws IOException { HdfsFileStatus status = dfsClient.getFileInfo(name); byte[] content = new byte[(int)status.getLen()]; DFSInputStream in = null; try { in = dfsClient.open(name); IOUtils.readFully(in, content, 0, content.length); } finally { in.close(); } return content; } public void removeBlocks(MiniDFSCluster cluster) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { for (int corruptIdx : blocksToCorrupt) { // Corrupt a block by deleting it ExtendedBlock block = dfsClient.getNamenode().getBlockLocations( name, blockSize * corruptIdx, Long.MAX_VALUE).get(0).getBlock(); for (int i = 0; i < numDataNodes; i++) { File blockFile = cluster.getBlockFile(i, block); if(blockFile != null && blockFile.exists()) { assertTrue(blockFile.delete()); } } } } public void checkSalvagedRemains() throws IOException { int chainIdx = 0; HdfsFileStatus status = dfsClient.getFileInfo(name); long length = status.getLen(); int numBlocks = (int)((length + blockSize - 1) / blockSize); DFSInputStream in = null; byte[] blockBuffer = new byte[blockSize]; try { for (int blockIdx = 0; blockIdx < numBlocks; blockIdx++) { if (blocksToCorrupt.contains(blockIdx)) { if (in != null) { in.close(); in = null; } continue; } if (in == null) { in = dfsClient.open("/lost+found" + name + "/" + chainIdx); chainIdx++; } int len = blockBuffer.length; if (blockIdx == (numBlocks - 1)) { // The last block might not be full-length len = (int)(in.getFileLength() % blockSize); if (len == 0) len = blockBuffer.length; } IOUtils.readFully(in, blockBuffer, 0, len); int startIdx = blockIdx * blockSize; for (int i = 0; i < len; i++) { if (initialContents[startIdx + i] != blockBuffer[i]) { throw new IOException("salvaged file " + name + " differed " + "from what we expected on block " + blockIdx); } } } } finally { IOUtils.cleanup(null, in); } } } @Test public void testFsckMoveAndDelete() throws Exception { final int MAX_MOVE_TRIES = 5; DFSTestUtil util = new DFSTestUtil.Builder(). setName("TestFsckMoveAndDelete").setNumFiles(5).build(); MiniDFSCluster cluster = null; FileSystem fs = null; try { Configuration conf = new HdfsConfiguration(); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L); conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build(); String topDir = "/srcdat"; fs = cluster.getFileSystem(); cluster.waitActive(); util.createFiles(fs, topDir); util.waitReplication(fs, topDir, (short)3); String outStr = runFsck(conf, 0, true, "/"); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); // Corrupt a block by deleting it String[] fileNames = util.getFileNames(topDir); DFSClient dfsClient = new DFSClient(new InetSocketAddress("localhost", cluster.getNameNodePort()), conf); String corruptFileName = fileNames[0]; ExtendedBlock block = dfsClient.getNamenode().getBlockLocations( corruptFileName, 0, Long.MAX_VALUE).get(0).getBlock(); for (int i=0; i<4; i++) { File blockFile = cluster.getBlockFile(i, block); if(blockFile != null && blockFile.exists()) { assertTrue(blockFile.delete()); } } // We excpect the filesystem to be corrupted outStr = runFsck(conf, 1, false, "/"); while (!outStr.contains(NamenodeFsck.CORRUPT_STATUS)) { try { Thread.sleep(100); } catch (InterruptedException ignore) { } outStr = runFsck(conf, 1, false, "/"); } // After a fsck -move, the corrupted file should still exist. for (int i = 0; i < MAX_MOVE_TRIES; i++) { outStr = runFsck(conf, 1, true, "/", "-move" ); assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS)); String[] newFileNames = util.getFileNames(topDir); boolean found = false; for (String f : newFileNames) { if (f.equals(corruptFileName)) { found = true; break; } } assertTrue(found); } // Fix the filesystem by moving corrupted files to lost+found outStr = runFsck(conf, 1, true, "/", "-move", "-delete"); assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS)); // Check to make sure we have healthy filesystem outStr = runFsck(conf, 0, true, "/"); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); util.cleanup(fs, topDir); if (fs != null) {try{fs.close();} catch(Exception e){}} cluster.shutdown(); } finally { if (fs != null) {try{fs.close();} catch(Exception e){}} if (cluster != null) { cluster.shutdown(); } } } @Test public void testFsckOpenFiles() throws Exception { DFSTestUtil util = new DFSTestUtil.Builder().setName("TestFsck"). setNumFiles(4).build(); MiniDFSCluster cluster = null; FileSystem fs = null; try { Configuration conf = new HdfsConfiguration(); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build(); String topDir = "/srcdat"; String randomString = "HADOOP "; fs = cluster.getFileSystem(); cluster.waitActive(); util.createFiles(fs, topDir); util.waitReplication(fs, topDir, (short)3); String outStr = runFsck(conf, 0, true, "/"); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); // Open a file for writing and do not close for now Path openFile = new Path(topDir + "/openFile"); FSDataOutputStream out = fs.create(openFile); int writeCount = 0; while (writeCount != 100) { out.write(randomString.getBytes()); writeCount++; } // We expect the filesystem to be HEALTHY and show one open file outStr = runFsck(conf, 0, true, topDir); System.out.println(outStr); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); assertFalse(outStr.contains("OPENFORWRITE")); // Use -openforwrite option to list open files outStr = runFsck(conf, 0, true, topDir, "-openforwrite"); System.out.println(outStr); assertTrue(outStr.contains("OPENFORWRITE")); assertTrue(outStr.contains("openFile")); // Close the file out.close(); // Now, fsck should show HEALTHY fs and should not show any open files outStr = runFsck(conf, 0, true, topDir); System.out.println(outStr); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); assertFalse(outStr.contains("OPENFORWRITE")); util.cleanup(fs, topDir); if (fs != null) {try{fs.close();} catch(Exception e){}} cluster.shutdown(); } finally { if (fs != null) {try{fs.close();} catch(Exception e){}} if (cluster != null) { cluster.shutdown(); } } } @Test public void testCorruptBlock() throws Exception { Configuration conf = new HdfsConfiguration(); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000); // Set short retry timeouts so this test runs faster conf.setInt(DFSConfigKeys.DFS_CLIENT_RETRY_WINDOW_BASE, 10); FileSystem fs = null; DFSClient dfsClient = null; LocatedBlocks blocks = null; int replicaCount = 0; Random random = new Random(); String outStr = null; short factor = 1; MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); cluster.waitActive(); fs = cluster.getFileSystem(); Path file1 = new Path("/testCorruptBlock"); DFSTestUtil.createFile(fs, file1, 1024, factor, 0); // Wait until file replication has completed DFSTestUtil.waitReplication(fs, file1, factor); ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, file1); // Make sure filesystem is in healthy state outStr = runFsck(conf, 0, true, "/"); System.out.println(outStr); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); // corrupt replicas File blockFile = cluster.getBlockFile(0, block); if (blockFile != null && blockFile.exists()) { RandomAccessFile raFile = new RandomAccessFile(blockFile, "rw"); FileChannel channel = raFile.getChannel(); String badString = "BADBAD"; int rand = random.nextInt((int) channel.size()/2); raFile.seek(rand); raFile.write(badString.getBytes()); raFile.close(); } // Read the file to trigger reportBadBlocks try { IOUtils.copyBytes(fs.open(file1), new IOUtils.NullOutputStream(), conf, true); } catch (IOException ie) { // Ignore exception } dfsClient = new DFSClient(new InetSocketAddress("localhost", cluster.getNameNodePort()), conf); blocks = dfsClient.getNamenode(). getBlockLocations(file1.toString(), 0, Long.MAX_VALUE); replicaCount = blocks.get(0).getLocations().length; while (replicaCount != factor) { try { Thread.sleep(100); } catch (InterruptedException ignore) { } blocks = dfsClient.getNamenode(). getBlockLocations(file1.toString(), 0, Long.MAX_VALUE); replicaCount = blocks.get(0).getLocations().length; } assertTrue (blocks.get(0).isCorrupt()); // Check if fsck reports the same outStr = runFsck(conf, 1, true, "/"); System.out.println(outStr); assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS)); assertTrue(outStr.contains("testCorruptBlock")); } finally { if (cluster != null) {cluster.shutdown();} } } /** Test if fsck can return -1 in case of failure * * @throws Exception */ @Test public void testFsckError() throws Exception { MiniDFSCluster cluster = null; try { // bring up a one-node cluster Configuration conf = new HdfsConfiguration(); cluster = new MiniDFSCluster.Builder(conf).build(); String fileName = "/test.txt"; Path filePath = new Path(fileName); FileSystem fs = cluster.getFileSystem(); // create a one-block file DFSTestUtil.createFile(fs, filePath, 1L, (short)1, 1L); DFSTestUtil.waitReplication(fs, filePath, (short)1); // intentionally corrupt NN data structure INodeFile node = (INodeFile) cluster.getNamesystem().dir.getINode (fileName, true); final BlockInfoContiguous[] blocks = node.getBlocks(); assertEquals(blocks.length, 1); blocks[0].setNumBytes(-1L); // set the block length to be negative // run fsck and expect a failure with -1 as the error code String outStr = runFsck(conf, -1, true, fileName); System.out.println(outStr); assertTrue(outStr.contains(NamenodeFsck.FAILURE_STATUS)); // clean up file system fs.delete(filePath, true); } finally { if (cluster != null) {cluster.shutdown();} } } /** check if option -list-corruptfiles of fsck command works properly */ @Test public void testFsckListCorruptFilesBlocks() throws Exception { Configuration conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000); conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1); FileSystem fs = null; MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).build(); cluster.waitActive(); fs = cluster.getFileSystem(); DFSTestUtil util = new DFSTestUtil.Builder(). setName("testGetCorruptFiles").setNumFiles(3).setMaxLevels(1). setMaxSize(1024).build(); util.createFiles(fs, "/corruptData", (short) 1); util.waitReplication(fs, "/corruptData", (short) 1); // String outStr = runFsck(conf, 0, true, "/corruptData", "-list-corruptfileblocks"); String outStr = runFsck(conf, 0, false, "/corruptData", "-list-corruptfileblocks"); System.out.println("1. good fsck out: " + outStr); assertTrue(outStr.contains("has 0 CORRUPT files")); // delete the blocks final String bpid = cluster.getNamesystem().getBlockPoolId(); for (int i=0; i<4; i++) { for (int j=0; j<=1; j++) { File storageDir = cluster.getInstanceStorageDir(i, j); File data_dir = MiniDFSCluster.getFinalizedDir(storageDir, bpid); List<File> metadataFiles = MiniDFSCluster.getAllBlockMetadataFiles( data_dir); if (metadataFiles == null) continue; for (File metadataFile : metadataFiles) { File blockFile = Block.metaToBlockFile(metadataFile); assertTrue("Cannot remove file.", blockFile.delete()); assertTrue("Cannot remove file.", metadataFile.delete()); } } } // wait for the namenode to see the corruption final NamenodeProtocols namenode = cluster.getNameNodeRpc(); CorruptFileBlocks corruptFileBlocks = namenode .listCorruptFileBlocks("/corruptData", null); int numCorrupt = corruptFileBlocks.getFiles().length; while (numCorrupt == 0) { Thread.sleep(1000); corruptFileBlocks = namenode .listCorruptFileBlocks("/corruptData", null); numCorrupt = corruptFileBlocks.getFiles().length; } outStr = runFsck(conf, -1, true, "/corruptData", "-list-corruptfileblocks"); System.out.println("2. bad fsck out: " + outStr); assertTrue(outStr.contains("has 3 CORRUPT files")); // Do a listing on a dir which doesn't have any corrupt blocks and validate util.createFiles(fs, "/goodData"); outStr = runFsck(conf, 0, true, "/goodData", "-list-corruptfileblocks"); System.out.println("3. good fsck out: " + outStr); assertTrue(outStr.contains("has 0 CORRUPT files")); util.cleanup(fs,"/corruptData"); util.cleanup(fs, "/goodData"); } finally { if (cluster != null) {cluster.shutdown();} } } /** * Test for checking fsck command on illegal arguments should print the proper * usage. */ @Test public void testToCheckTheFsckCommandOnIllegalArguments() throws Exception { MiniDFSCluster cluster = null; try { // bring up a one-node cluster Configuration conf = new HdfsConfiguration(); cluster = new MiniDFSCluster.Builder(conf).build(); String fileName = "/test.txt"; Path filePath = new Path(fileName); FileSystem fs = cluster.getFileSystem(); // create a one-block file DFSTestUtil.createFile(fs, filePath, 1L, (short) 1, 1L); DFSTestUtil.waitReplication(fs, filePath, (short) 1); // passing illegal option String outStr = runFsck(conf, -1, true, fileName, "-thisIsNotAValidFlag"); System.out.println(outStr); assertTrue(!outStr.contains(NamenodeFsck.HEALTHY_STATUS)); // passing multiple paths are arguments outStr = runFsck(conf, -1, true, "/", fileName); System.out.println(outStr); assertTrue(!outStr.contains(NamenodeFsck.HEALTHY_STATUS)); // clean up file system fs.delete(filePath, true); } finally { if (cluster != null) { cluster.shutdown(); } } } /** * Tests that the # of missing block replicas and expected replicas is correct * @throws IOException */ @Test public void testFsckMissingReplicas() throws IOException { // Desired replication factor // Set this higher than NUM_REPLICAS so it's under-replicated final short REPL_FACTOR = 2; // Number of replicas to actually start final short NUM_REPLICAS = 1; // Number of blocks to write final short NUM_BLOCKS = 3; // Set a small-ish blocksize final long blockSize = 512; Configuration conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); MiniDFSCluster cluster = null; DistributedFileSystem dfs = null; try { // Startup a minicluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_REPLICAS).build(); assertNotNull("Failed Cluster Creation", cluster); cluster.waitClusterUp(); dfs = cluster.getFileSystem(); assertNotNull("Failed to get FileSystem", dfs); // Create a file that will be intentionally under-replicated final String pathString = new String("/testfile"); final Path path = new Path(pathString); long fileLen = blockSize * NUM_BLOCKS; DFSTestUtil.createFile(dfs, path, fileLen, REPL_FACTOR, 1); // Create an under-replicated file NameNode namenode = cluster.getNameNode(); NetworkTopology nettop = cluster.getNamesystem().getBlockManager() .getDatanodeManager().getNetworkTopology(); Map<String,String[]> pmap = new HashMap<String, String[]>(); Writer result = new StringWriter(); PrintWriter out = new PrintWriter(result, true); InetAddress remoteAddress = InetAddress.getLocalHost(); NamenodeFsck fsck = new NamenodeFsck(conf, namenode, nettop, pmap, out, NUM_REPLICAS, (short)1, remoteAddress); // Run the fsck and check the Result final HdfsFileStatus file = namenode.getRpcServer().getFileInfo(pathString); assertNotNull(file); Result res = new Result(conf); fsck.check(pathString, file, res); // Also print the output from the fsck, for ex post facto sanity checks System.out.println(result.toString()); assertEquals(res.missingReplicas, (NUM_BLOCKS*REPL_FACTOR) - (NUM_BLOCKS*NUM_REPLICAS)); assertEquals(res.numExpectedReplicas, NUM_BLOCKS*REPL_FACTOR); } finally { if(dfs != null) { dfs.close(); } if(cluster != null) { cluster.shutdown(); } } } /** * Tests that the # of misreplaced replicas is correct * @throws IOException */ @Test public void testFsckMisPlacedReplicas() throws IOException { // Desired replication factor final short REPL_FACTOR = 2; // Number of replicas to actually start short NUM_DN = 2; // Number of blocks to write final short NUM_BLOCKS = 3; // Set a small-ish blocksize final long blockSize = 512; String [] racks = {"/rack1", "/rack1"}; String [] hosts = {"host1", "host2"}; Configuration conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); MiniDFSCluster cluster = null; DistributedFileSystem dfs = null; try { // Startup a minicluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DN).hosts(hosts) .racks(racks).build(); assertNotNull("Failed Cluster Creation", cluster); cluster.waitClusterUp(); dfs = cluster.getFileSystem(); assertNotNull("Failed to get FileSystem", dfs); // Create a file that will be intentionally under-replicated final String pathString = new String("/testfile"); final Path path = new Path(pathString); long fileLen = blockSize * NUM_BLOCKS; DFSTestUtil.createFile(dfs, path, fileLen, REPL_FACTOR, 1); // Create an under-replicated file NameNode namenode = cluster.getNameNode(); NetworkTopology nettop = cluster.getNamesystem().getBlockManager() .getDatanodeManager().getNetworkTopology(); // Add a new node on different rack, so previous blocks' replicas // are considered to be misplaced nettop.add(DFSTestUtil.getDatanodeDescriptor("/rack2", "/host3")); NUM_DN++; Map<String,String[]> pmap = new HashMap<String, String[]>(); Writer result = new StringWriter(); PrintWriter out = new PrintWriter(result, true); InetAddress remoteAddress = InetAddress.getLocalHost(); NamenodeFsck fsck = new NamenodeFsck(conf, namenode, nettop, pmap, out, NUM_DN, REPL_FACTOR, remoteAddress); // Run the fsck and check the Result final HdfsFileStatus file = namenode.getRpcServer().getFileInfo(pathString); assertNotNull(file); Result res = new Result(conf); fsck.check(pathString, file, res); // check misReplicatedBlock number. assertEquals(res.numMisReplicatedBlocks, NUM_BLOCKS); } finally { if(dfs != null) { dfs.close(); } if(cluster != null) { cluster.shutdown(); } } } /** Test fsck with FileNotFound */ @Test public void testFsckFileNotFound() throws Exception { // Number of replicas to actually start final short NUM_REPLICAS = 1; Configuration conf = new Configuration(); NameNode namenode = mock(NameNode.class); NetworkTopology nettop = mock(NetworkTopology.class); Map<String,String[]> pmap = new HashMap<String, String[]>(); Writer result = new StringWriter(); PrintWriter out = new PrintWriter(result, true); InetAddress remoteAddress = InetAddress.getLocalHost(); FSNamesystem fsName = mock(FSNamesystem.class); BlockManager blockManager = mock(BlockManager.class); DatanodeManager dnManager = mock(DatanodeManager.class); when(namenode.getNamesystem()).thenReturn(fsName); when(fsName.getBlockLocations( anyString(), anyLong(), anyLong(), anyBoolean(), anyBoolean())) .thenThrow(new FileNotFoundException()); when(fsName.getBlockManager()).thenReturn(blockManager); when(blockManager.getDatanodeManager()).thenReturn(dnManager); NamenodeFsck fsck = new NamenodeFsck(conf, namenode, nettop, pmap, out, NUM_REPLICAS, (short)1, remoteAddress); String pathString = "/tmp/testFile"; long length = 123L; boolean isDir = false; int blockReplication = 1; long blockSize = 128 *1024L; long modTime = 123123123L; long accessTime = 123123120L; FsPermission perms = FsPermission.getDefault(); String owner = "foo"; String group = "bar"; byte [] symlink = null; byte [] path = new byte[128]; path = DFSUtil.string2Bytes(pathString); long fileId = 312321L; int numChildren = 1; byte storagePolicy = 0; HdfsFileStatus file = new HdfsFileStatus(length, isDir, blockReplication, blockSize, modTime, accessTime, perms, owner, group, symlink, path, fileId, numChildren, null, storagePolicy); Result res = new Result(conf); try { fsck.check(pathString, file, res); } catch (Exception e) { fail("Unexpected exception "+ e.getMessage()); } assertTrue(res.toString().contains("HEALTHY")); } /** Test fsck with symlinks in the filesystem */ @Test public void testFsckSymlink() throws Exception { final DFSTestUtil util = new DFSTestUtil.Builder(). setName(getClass().getSimpleName()).setNumFiles(1).build(); final Configuration conf = new HdfsConfiguration(); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L); MiniDFSCluster cluster = null; FileSystem fs = null; try { final long precision = 1L; conf.setLong(DFSConfigKeys.DFS_NAMENODE_ACCESSTIME_PRECISION_KEY, precision); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build(); fs = cluster.getFileSystem(); final String fileName = "/srcdat"; util.createFiles(fs, fileName); final FileContext fc = FileContext.getFileContext( cluster.getConfiguration(0)); final Path file = new Path(fileName); final Path symlink = new Path("/srcdat-symlink"); fc.createSymlink(file, symlink, false); util.waitReplication(fs, fileName, (short)3); long aTime = fc.getFileStatus(symlink).getAccessTime(); Thread.sleep(precision); setupAuditLogs(); String outStr = runFsck(conf, 0, true, "/"); verifyAuditLogs(); assertEquals(aTime, fc.getFileStatus(symlink).getAccessTime()); System.out.println(outStr); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); assertTrue(outStr.contains("Total symlinks:\t\t1")); util.cleanup(fs, fileName); } finally { if (fs != null) {try{fs.close();} catch(Exception e){}} if (cluster != null) { cluster.shutdown(); } } } /** * Test for including the snapshot files in fsck report */ @Test public void testFsckForSnapshotFiles() throws Exception { final Configuration conf = new HdfsConfiguration(); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1) .build(); try { String runFsck = runFsck(conf, 0, true, "/", "-includeSnapshots", "-files"); assertTrue(runFsck.contains("HEALTHY")); final String fileName = "/srcdat"; DistributedFileSystem hdfs = cluster.getFileSystem(); Path file1 = new Path(fileName); DFSTestUtil.createFile(hdfs, file1, 1024, (short) 1, 1000L); hdfs.allowSnapshot(new Path("/")); hdfs.createSnapshot(new Path("/"), "mySnapShot"); runFsck = runFsck(conf, 0, true, "/", "-includeSnapshots", "-files"); assertTrue(runFsck.contains("/.snapshot/mySnapShot/srcdat")); runFsck = runFsck(conf, 0, true, "/", "-files"); assertFalse(runFsck.contains("mySnapShot")); } finally { cluster.shutdown(); } } /** * Test for blockIdCK */ @Test public void testBlockIdCK() throws Exception { final short REPL_FACTOR = 2; short NUM_DN = 2; final long blockSize = 512; String [] racks = {"/rack1", "/rack2"}; String [] hosts = {"host1", "host2"}; Configuration conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 2); MiniDFSCluster cluster = null; DistributedFileSystem dfs = null; cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DN).hosts(hosts) .racks(racks).build(); assertNotNull("Failed Cluster Creation", cluster); cluster.waitClusterUp(); dfs = cluster.getFileSystem(); assertNotNull("Failed to get FileSystem", dfs); DFSTestUtil util = new DFSTestUtil.Builder(). setName(getClass().getSimpleName()).setNumFiles(1).build(); //create files final String pathString = new String("/testfile"); final Path path = new Path(pathString); util.createFile(dfs, path, 1024, REPL_FACTOR , 1000L); util.waitReplication(dfs, path, REPL_FACTOR); StringBuilder sb = new StringBuilder(); for (LocatedBlock lb: util.getAllBlocks(dfs, path)){ sb.append(lb.getBlock().getLocalBlock().getBlockName()+" "); } String[] bIds = sb.toString().split(" "); //run fsck try { //illegal input test String runFsckResult = runFsck(conf, 0, true, "/", "-blockId", "not_a_block_id"); assertTrue(runFsckResult.contains("Incorrect blockId format:")); //general test runFsckResult = runFsck(conf, 0, true, "/", "-blockId", sb.toString()); assertTrue(runFsckResult.contains(bIds[0])); assertTrue(runFsckResult.contains(bIds[1])); assertTrue(runFsckResult.contains( "Block replica on datanode/rack: host1/rack1 is HEALTHY")); assertTrue(runFsckResult.contains( "Block replica on datanode/rack: host2/rack2 is HEALTHY")); } finally { cluster.shutdown(); } } /** * Test for blockIdCK with datanode decommission */ @Test public void testBlockIdCKDecommission() throws Exception { final short REPL_FACTOR = 1; short NUM_DN = 2; final long blockSize = 512; boolean checkDecommissionInProgress = false; String [] racks = {"/rack1", "/rack2"}; String [] hosts = {"host1", "host2"}; Configuration conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 2); MiniDFSCluster cluster; DistributedFileSystem dfs ; cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DN).hosts(hosts) .racks(racks).build(); assertNotNull("Failed Cluster Creation", cluster); cluster.waitClusterUp(); dfs = cluster.getFileSystem(); assertNotNull("Failed to get FileSystem", dfs); DFSTestUtil util = new DFSTestUtil.Builder(). setName(getClass().getSimpleName()).setNumFiles(1).build(); //create files final String pathString = new String("/testfile"); final Path path = new Path(pathString); util.createFile(dfs, path, 1024, REPL_FACTOR, 1000L); util.waitReplication(dfs, path, REPL_FACTOR); StringBuilder sb = new StringBuilder(); for (LocatedBlock lb: util.getAllBlocks(dfs, path)){ sb.append(lb.getBlock().getLocalBlock().getBlockName()+" "); } String[] bIds = sb.toString().split(" "); try { //make sure datanode that has replica is fine before decommission String outStr = runFsck(conf, 0, true, "/", "-blockId", bIds[0]); System.out.println(outStr); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); //decommission datanode ExtendedBlock eb = util.getFirstBlock(dfs, path); DatanodeDescriptor dn = cluster.getNameNode().getNamesystem() .getBlockManager().getBlockCollection(eb.getLocalBlock()) .getBlocks()[0].getDatanode(0); cluster.getNameNode().getNamesystem().getBlockManager() .getDatanodeManager().startDecommission(dn); String dnName = dn.getXferAddr(); //wait for decommission start DatanodeInfo datanodeInfo = null; int count = 0; do { Thread.sleep(2000); for (DatanodeInfo info : dfs.getDataNodeStats()) { if (dnName.equals(info.getXferAddr())) { datanodeInfo = info; } } //check decommissioning only once if(!checkDecommissionInProgress && datanodeInfo != null && datanodeInfo.isDecommissionInProgress()) { String fsckOut = runFsck(conf, 3, true, "/", "-blockId", bIds[0]); assertTrue(fsckOut.contains(NamenodeFsck.DECOMMISSIONING_STATUS)); checkDecommissionInProgress = true; } } while (datanodeInfo != null && !datanodeInfo.isDecommissioned()); //check decommissioned String fsckOut = runFsck(conf, 2, true, "/", "-blockId", bIds[0]); assertTrue(fsckOut.contains(NamenodeFsck.DECOMMISSIONED_STATUS)); } finally { if (cluster != null) { cluster.shutdown(); } } } /** * Test for blockIdCK with block corruption */ @Test public void testBlockIdCKCorruption() throws Exception { short NUM_DN = 1; final long blockSize = 512; Random random = new Random(); DFSClient dfsClient; LocatedBlocks blocks; ExtendedBlock block; short repFactor = 1; String [] racks = {"/rack1"}; String [] hosts = {"host1"}; Configuration conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000); // Set short retry timeouts so this test runs faster conf.setInt(DFSConfigKeys.DFS_CLIENT_RETRY_WINDOW_BASE, 10); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1); MiniDFSCluster cluster = null; DistributedFileSystem dfs = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DN).hosts(hosts) .racks(racks).build(); assertNotNull("Failed Cluster Creation", cluster); cluster.waitClusterUp(); dfs = cluster.getFileSystem(); assertNotNull("Failed to get FileSystem", dfs); DFSTestUtil util = new DFSTestUtil.Builder(). setName(getClass().getSimpleName()).setNumFiles(1).build(); //create files final String pathString = new String("/testfile"); final Path path = new Path(pathString); util.createFile(dfs, path, 1024, repFactor, 1000L); util.waitReplication(dfs, path, repFactor); StringBuilder sb = new StringBuilder(); for (LocatedBlock lb: util.getAllBlocks(dfs, path)){ sb.append(lb.getBlock().getLocalBlock().getBlockName()+" "); } String[] bIds = sb.toString().split(" "); //make sure block is healthy before we corrupt it String outStr = runFsck(conf, 0, true, "/", "-blockId", bIds[0]); System.out.println(outStr); assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); // corrupt replicas block = DFSTestUtil.getFirstBlock(dfs, path); File blockFile = cluster.getBlockFile(0, block); if (blockFile != null && blockFile.exists()) { RandomAccessFile raFile = new RandomAccessFile(blockFile, "rw"); FileChannel channel = raFile.getChannel(); String badString = "BADBAD"; int rand = random.nextInt((int) channel.size()/2); raFile.seek(rand); raFile.write(badString.getBytes()); raFile.close(); } util.waitCorruptReplicas(dfs, cluster.getNamesystem(), path, block, 1); outStr = runFsck(conf, 1, false, "/", "-blockId", block.getBlockName()); System.out.println(outStr); assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS)); } finally { if (cluster != null) { cluster.shutdown(); } } } }
apache-2.0
arrayexpress/annotare2
prototypes/otrs-soap-client/src/main/java/uk/ac/ebi/fg/annotare/proto/soap/SoapMapFactory.java
1226
package uk.ac.ebi.fg.annotare.proto.soap; import org.w3c.dom.DOMException; import javax.xml.soap.SOAPElement; import javax.xml.soap.SOAPException; import javax.xml.soap.SOAPFactory; import java.util.Map; class SoapMapFactory { protected SOAPElement createSoapMap(String name, Map<?, ?> values) throws DOMException, SOAPException { SOAPElement element = SOAPFactory.newInstance().createElement(name); element.addNamespaceDeclaration("xml-soap", "http://xml.apache.org/xml-soap"); element.setAttribute("xsi:type", "xml-soap:Map"); SOAPElement item = SOAPFactory.newInstance().createElement("item"); for (Map.Entry<?, ?> entry : values.entrySet()) { item.addChildElement("key").addTextNode(entry.getKey().toString()).setAttribute("xsi:type", "xsd:string"); SOAPElement valueElement = item.addChildElement("value"); if (entry.getValue() == null) { valueElement.setAttribute("xsi:nil", "true"); } else { valueElement.addTextNode(entry.getValue().toString()).setAttribute("xsi:type", "xsd:string"); } } element.addChildElement(item); return element; } }
apache-2.0
theborakompanioni/spring-boot-shiro-orient-ionic
spring-boot-shiro-orient-ionic-web/src/main/java/com/github/theborakompanioni/OrientDbConfiguration.java
1046
package com.github.theborakompanioni; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.data.orient.commons.repository.config.EnableOrientRepositories; import org.springframework.data.orient.object.OrientObjectDatabaseFactory; import org.springframework.data.orient.object.repository.support.OrientObjectRepositoryFactoryBean; import org.springframework.transaction.annotation.Transactional; import javax.annotation.PostConstruct; @Configuration @EnableOrientRepositories(basePackageClasses = com.github.theborakompanioni.repository._package.class, repositoryFactoryBeanClass = OrientObjectRepositoryFactoryBean.class) public class OrientDbConfiguration { @Autowired private OrientObjectDatabaseFactory factory; @PostConstruct @Transactional public void registerEntities() { factory.db().getEntityManager().registerEntityClasses(com.github.theborakompanioni.model._package.class.getPackage().getName()); } }
apache-2.0
Sable/mclab-core
languages/Natlab/src/mclint/analyses/UnreachableCode.java
2193
package mclint.analyses; import java.util.Arrays; import java.util.List; import mclint.Lint; import mclint.LintAnalysis; import mclint.Message; import mclint.Project; import natlab.utils.NodeFinder; import nodecases.AbstractNodeCaseHandler; import ast.ASTNode; import ast.BreakStmt; import ast.ContinueStmt; import ast.NameExpr; import ast.ParameterizedExpr; import ast.ReturnStmt; import ast.Stmt; public class UnreachableCode extends AbstractNodeCaseHandler implements LintAnalysis { private static final String WARNING = "This statement (and possibly following ones) cannot be reached."; private static final List<String> THROWS = Arrays.asList("throw", "rethrow", "error", "throwAsCaller"); private ASTNode<?> tree; private Lint lint; public UnreachableCode(Project project) { this.tree = project.asCompilationUnits(); } private Message unreachableCode(ASTNode<?> node) { return Message.regarding(node, "DEAD_CODE", WARNING); } @SuppressWarnings("rawtypes") @Override public void caseASTNode(ASTNode node) { for (int i = 0; i < node.getNumChild(); ++i) { node.getChild(i).analyze(this); } } @Override public void analyze(Lint lint) { this.lint = lint; tree.analyze(this); } private void caseAbruptControlFlow(ASTNode<?> node) { @SuppressWarnings("unchecked") ast.List<Stmt> body = (ast.List<Stmt>) node.getParent(); int index = body.getIndexOfChild(node); if (index < body.getNumChild() - 1) { lint.report(unreachableCode(body.getChild(index + 1))); } } @Override public void caseBreakStmt(BreakStmt node) { caseAbruptControlFlow(node); } @Override public void caseContinueStmt(ContinueStmt node) { caseAbruptControlFlow(node); } @Override public void caseReturnStmt(ReturnStmt node) { caseAbruptControlFlow(node); } @Override public void caseParameterizedExpr(ParameterizedExpr node) { if (!(node.getTarget() instanceof NameExpr)) { return; } String name = ((NameExpr)(node.getTarget())).getName().getID(); if (THROWS.contains(name)) { caseAbruptControlFlow(NodeFinder.findParent(Stmt.class, node)); } } }
apache-2.0
nickman/jmxlocal
src/main/java/com/heliosapm/utils/jmx/builtins/DynamicBuiltIn.java
8498
/** Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.heliosapm.utils.jmx.builtins; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.StringWriter; import java.lang.reflect.Method; import java.net.URI; import java.net.URL; import java.nio.charset.Charset; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.concurrent.atomic.AtomicLong; import javax.management.MBeanServerConnection; import javax.tools.DiagnosticCollector; import javax.tools.FileObject; import javax.tools.ForwardingJavaFileManager; import javax.tools.JavaCompiler; import javax.tools.JavaFileManager; import javax.tools.JavaFileObject; import javax.tools.SimpleJavaFileObject; import javax.tools.StandardJavaFileManager; import javax.tools.ToolProvider; import javax.tools.JavaFileObject.Kind; import com.heliosapm.shorthand.attach.vm.VirtualMachineBootstrap; import com.heliosapm.utils.jmx.Command; /** * <p>Title: DynamicBuiltIn</p> * <p>Description: </p> * <p>Company: Helios Development Group LLC</p> * @author Whitehead (nwhitehead AT heliosdev DOT org) * <p><code>com.heliosapm.utils.jmx.builtins.DynamicBuiltIn</code></p> */ public class DynamicBuiltIn extends AbstractBuiltIn { private static final AtomicLong serial = new AtomicLong(); public static final String PACKAGE_NAME = "com.heliosapm.utils.jmx.builtins.dynamic"; public static final String SIMPLE_CLASS_NAME = "Dynamic_%s"; public static final String CLASS_NAME = PACKAGE_NAME + "." + SIMPLE_CLASS_NAME; /** * Creates a new DynamicBuiltIn */ public DynamicBuiltIn() { } /** * {@inheritDoc} * @see com.heliosapm.utils.jmx.builtins.AbstractBuiltIn#doExecute(javax.management.MBeanServerConnection, java.lang.String[]) */ @Override protected Object doExecute(final MBeanServerConnection conn, final String... args) throws Exception { if(args==null || args.length==0) throw new IllegalArgumentException("No code supplied"); final long id = serial.incrementAndGet(); String code = null; File f = new File(args[0]); if(f.exists()) { // full class file code = getFileText(f.getAbsolutePath()); } else { // in line java code = buildCode(args[0], id); } try { final ClassLoader cl = VirtualMachineBootstrap.getToolsClassLoader(); final ClassLoader current = Thread.currentThread().getContextClassLoader(); Class<?> clazz = null; try { Thread.currentThread().setContextClassLoader(cl); // System.out.println("\n" + code + "\n"); clazz = compile(code, String.format(CLASS_NAME, id)); Method m = clazz.getDeclaredMethod("execute", MBeanServerConnection.class); return m.invoke(null, conn); } finally { Thread.currentThread().setContextClassLoader(current); File delFile = new File(String.format(SIMPLE_CLASS_NAME, id) + ".class"); if(delFile.exists()) { if(!delFile.delete()) { delFile.deleteOnExit(); } } } } catch (Exception ex) { ex.printStackTrace(System.err); System.exit(-1); } return null; } public static String buildCode(final String command, final long id) throws Exception { InputStream is = null; InputStreamReader isr = null; BufferedReader br = null; String template = null; try { URL url = Command.class.getClassLoader().getResource("command-template.java"); if(url==null) throw new Exception("Failed to read template"); is = url.openStream(); if(is==null) throw new Exception("Failed to read template"); isr = new InputStreamReader(is, Charset.forName("UTF-8")); br = new BufferedReader(isr); StringBuilder b = new StringBuilder(); String line = null; while((line = br.readLine())!=null) { b.append(line).append("\n"); } template = b.toString(); } catch (Exception ex) { throw new RuntimeException("Failed to read template", ex); } finally { if(br!=null) try { br.close(); } catch (Exception x) {/* No Op */} if(isr!=null) try { isr.close(); } catch (Exception x) {/* No Op */} if(is!=null) try { is.close(); } catch (Exception x) {/* No Op */} } return template.replace("###c###", command).replace("###id###", "" + id); } public static Class<?> compile(final String code, final String className) throws Exception { StandardJavaFileManager fileManager = null; try { final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); if(compiler==null) throw new Exception("No compiler available. Suggestion: Use JDK if you are using JRE."); final Iterable<? extends JavaFileObject> compilationUnits = getJavaSourceFromString(code, className); final DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<JavaFileObject>(); fileManager = compiler.getStandardFileManager(diagnostics, null, Charset.forName("UTF8")); final JavaFileManager.Location[] loc = new JavaFileManager.Location[1]; final JavaFileObject[] jfo = new JavaFileObject[1]; JavaFileManager fFileManager = new ForwardingJavaFileManager<JavaFileManager>(fileManager) { @Override public JavaFileObject getJavaFileForOutput(JavaFileManager.Location location, String className, Kind kind, FileObject sibling) throws IOException { loc[0] = location; jfo[0] = super.getJavaFileForOutput(location, className, kind, sibling); return jfo[0]; } }; final StringWriter sw = new StringWriter(); final boolean success = compiler.getTask(sw, fFileManager, diagnostics, null, null, compilationUnits).call(); System.out.println(sw.toString()); if(!success) throw new Exception("Failed to compile code:" + diagnostics.getDiagnostics() + "\n" + sw.toString()); final DefiningClassLoader cc = new DefiningClassLoader(compiler.getClass().getClassLoader()); return cc.gitEm(className, jfo[0].openInputStream()); } finally { if(fileManager!=null) try { fileManager.close(); } catch (Exception x) {/* No Op */} } } public static class DefiningClassLoader extends ClassLoader { public DefiningClassLoader(final ClassLoader parent) { super(parent); } public Class<?> gitEm(final String name, final InputStream is) { final byte[] buff = new byte[1024]; final ByteArrayOutputStream baos = new ByteArrayOutputStream(8192); int bytesRead = -1; try { while((bytesRead = is.read(buff))!=-1) { baos.write(buff, 0, bytesRead); } byte[] byteCode = baos.toByteArray(); return super.defineClass(name, byteCode, 0, byteCode.length); } catch (Exception ex) { throw new RuntimeException(ex); } } } static Iterable<JavaSourceFromString> getJavaSourceFromString(String code, final String className) { final JavaSourceFromString jsfs; jsfs = new JavaSourceFromString(className, code); return new Iterable<JavaSourceFromString>() { public Iterator<JavaSourceFromString> iterator() { return new Iterator<JavaSourceFromString>() { boolean isNext = true; public boolean hasNext() { return isNext; } public JavaSourceFromString next() { if (!isNext) throw new NoSuchElementException(); isNext = false; return jsfs; } public void remove() { throw new UnsupportedOperationException(); } }; } }; } } class JavaSourceFromString extends SimpleJavaFileObject { final String code; JavaSourceFromString(String name, String code) { super(URI.create("string:///" + name.replace('.', '/') + Kind.SOURCE.extension), Kind.SOURCE); this.code = code; } public CharSequence getCharContent(boolean ignoreEncodingErrors) { return code; } }
apache-2.0
GoogleCloudPlatform/spring-cloud-gcp
spring-cloud-gcp-autoconfigure/src/main/java/com/google/cloud/spring/autoconfigure/trace/sleuth/StackdriverHttpRequestParser.java
1564
/* * Copyright 2017-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.spring.autoconfigure.trace.sleuth; import brave.SpanCustomizer; import brave.http.HttpRequest; import brave.http.HttpRequestParser; import brave.http.HttpTags; import brave.propagation.TraceContext; import java.net.URI; /** * An {@link HttpRequestParser} that fills information for Stackdriver Trace. * * <p>Based on {@code org.springframework.cloud.sleuth.instrument.web.SleuthHttpClientParser}. */ public class StackdriverHttpRequestParser implements HttpRequestParser { @Override public void parse(HttpRequest request, TraceContext context, SpanCustomizer customizer) { HttpRequestParser.DEFAULT.parse(request, context, customizer); HttpTags.URL.tag(request, context, customizer); HttpTags.ROUTE.tag(request, context, customizer); String url = request.url(); URI uri = URI.create(url); if (uri.getHost() != null) { customizer.tag("http.host", uri.getHost()); } } }
apache-2.0
ssaarela/javersion
javersion-path/src/test/java/org/javersion/path/SchemaTest.java
4224
package org.javersion.path; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; import static org.javersion.path.PropertyPath.ROOT; import static org.javersion.path.PropertyPath.parse; import org.javersion.path.Schema.Builder; import org.junit.Test; public class SchemaTest { @Test public void secured_path_besides_anything() { // : "root" // secure.property: "secure property" // *: "any" // **: @* Builder<String> root = new Builder<>("root"); Builder<String> any = new Builder<>("any"); root.addChild(NodeId.ANY, any); any.addChild(NodeId.ANY, any); root.getOrCreate(ROOT.property("secure").property("property"), "secure property"); Schema<String> schema = root.build(); assertThat(schema.get(ROOT.property("foo")).getValue()).isEqualTo("any"); assertThat(schema.get(ROOT.key("foo").key("bar")).getValue()).isEqualTo("any"); assertThat(schema.get(ROOT.index(123).index(456)).getValue()).isEqualTo("any"); assertThat(schema.get((ROOT.property("foo").property("secure"))).getValue()).isEqualTo("any"); assertThat(schema.get(ROOT.property("secure").property("property")).getValue()).isEqualTo("secure property"); try { schema.get(ROOT.property("secure").property("insecure")); fail("Found insecure under secure"); } catch (IllegalArgumentException e) { // as expected } } @Test public void any_property() { Builder<String> root = new Builder<>("root"); root.connect(ROOT.anyProperty(), new Builder<>("anyProperty")); Schema<String> schema = root.build(); assertThat(schema.get(parse("anything")).getValue()).isEqualTo("anyProperty"); } @Test public void anything_goes() { Builder<String> root = new Builder<>("root"); root.connect(ROOT.any(), root); assertThat(root.getValue()).isEqualTo("root"); Schema<String> schema = root.build(); assertThat(schema.getValue()).isEqualTo("root"); assertThat(schema.getChild(NodeId.ANY)).isEqualTo(schema); assertThat(schema.find(ROOT)).isEqualTo(schema); assertThat(schema.find(ROOT.key("foo").property("bar").index(123))).isEqualTo(schema); } @Test public void list_of_named_objects() { // list[].name = name // list[] = element Builder<String> root = new Builder<>("root"); Builder<String> list = root.addChild(NodeId.property("list"), new Builder<>()); list.getOrCreate(parse("[].name"), "name"); root.getOrCreate(parse("list[]")).setValue("element"); Schema<String> schema = root.build(); assertThat(schema.getValue()).isEqualTo("root"); Schema<String> child = schema.get(parse("list")); assertThat(child.getValue()).isNull(); child = child.getChild(NodeId.ANY_INDEX); assertThat(child.getValue()).isEqualTo("element"); assertThat(child.get(parse("name")).getValue()).isEqualTo("name"); } @Test public void has_child() { Builder<String> root = new Builder<>("root"); root.getOrCreate(ROOT.property("property")); Schema<String> schema = root.build(); assertThat(schema.hasChildren()).isTrue(); assertThat(schema.hasChild(NodeId.property("property"))).isTrue(); assertThat(schema.hasChild(NodeId.key("property"))).isFalse(); assertThat(schema.hasChild(NodeId.ANY_KEY)).isFalse(); } @Test(expected = IllegalArgumentException.class) public void node_not_found() { new Builder<>("root").get(ROOT.property("foo")); } @Test(expected = IllegalArgumentException.class) public void get_by_null_throws_exception() { new Builder<>().get(null); } @Test(expected = IllegalArgumentException.class) public void builder_get_child_by_null_throws_exception() { new Builder<>().getChild(null); } @Test(expected = IllegalArgumentException.class) public void schema_get_child_by_null_throws_exception() { new Builder<>().build().getChild(null); } }
apache-2.0
Scavi/BrainSqueeze
src/test/java/com/scavi/brainsqueeze/geometry/CircleIntersectionTest.java
1831
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.scavi.brainsqueeze.geometry; import com.scavi.brainsqueeze.util.Circle; import org.junit.Assert; import org.junit.Test; /** * @author Michael Heymel * @since 18/11/16 */ public class CircleIntersectionTest { @Test public void testExistingIntersection() { Circle[] circles = new Circle[]{new Circle(3, 0, 1), new Circle(10, 0, 1), new Circle(0, 0, 1), new Circle(9, 0, 1), new Circle(6, 0, 1)}; CircleIntersection intersectionValidator = new CircleIntersection(); boolean isIntersection = intersectionValidator.areIntersectingCirclesExists(circles); Assert.assertTrue(isIntersection); } @Test public void testNoIntersection() { Circle[] circles = new Circle[]{ new Circle(3, 0, 1), new Circle(12, 0, 1), new Circle(0, 0, 1), new Circle(9, 0, 1), new Circle(6, 0, 1)}; CircleIntersection intersectionValidator = new CircleIntersection(); boolean isIntersection = intersectionValidator.areIntersectingCirclesExists(circles); if (isIntersection) { intersectionValidator.areIntersectingCirclesExists(circles); } Assert.assertFalse(isIntersection); } }
apache-2.0
weiwenqiang/GitHub
RefreshUpload/BGARefreshLayout-Android-master/demo/src/main/java/cn/bingoogolapple/refreshlayout/demo/ui/fragment/RefreshWebViewFragment.java
2508
package cn.bingoogolapple.refreshlayout.demo.ui.fragment; import android.graphics.Bitmap; import android.os.Bundle; import android.util.Log; import android.webkit.WebView; import android.webkit.WebViewClient; import cn.bingoogolapple.refreshlayout.BGAMoocStyleRefreshViewHolder; import cn.bingoogolapple.refreshlayout.BGARefreshLayout; import cn.bingoogolapple.refreshlayout.demo.R; /** * 作者:王浩 邮件:bingoogolapple@gmail.com * 创建时间:15/7/21 下午11:42 * 描述: */ public class RefreshWebViewFragment extends BaseFragment implements BGARefreshLayout.BGARefreshLayoutDelegate { private static final String TAG = RefreshWebViewFragment.class.getSimpleName(); private BGARefreshLayout mRefreshLayout; private WebView mContentWv; @Override protected void initView(Bundle savedInstanceState) { setContentView(R.layout.fragment_webview_refresh); mRefreshLayout = getViewById(R.id.rl_webview_refresh); mContentWv = getViewById(R.id.wv_webview_content); } @Override protected void setListener() { mRefreshLayout.setDelegate(this); mContentWv.setWebViewClient(new WebViewClient() { @Override public void onPageStarted(WebView view, String url, Bitmap favicon) { super.onPageStarted(view, url, favicon); } @Override public void onPageFinished(WebView view, String url) { super.onPageFinished(view, url); mRefreshLayout.endRefreshing(); } }); } @Override protected void processLogic(Bundle savedInstanceState) { BGAMoocStyleRefreshViewHolder moocStyleRefreshViewHolder = new BGAMoocStyleRefreshViewHolder(mApp, false); moocStyleRefreshViewHolder.setOriginalImage(R.mipmap.bga_refresh_moooc); moocStyleRefreshViewHolder.setUltimateColor(R.color.imoocstyle); mRefreshLayout.setRefreshViewHolder(moocStyleRefreshViewHolder); // mRefreshLayout.setCustomHeaderView(DataEngine.getCustomHeaderView(mApp), true); mContentWv.getSettings().setJavaScriptEnabled(true); mContentWv.loadUrl("https://github.com/bingoogolapple"); } @Override public void onBGARefreshLayoutBeginRefreshing(BGARefreshLayout refreshLayout) { mContentWv.reload(); } @Override public boolean onBGARefreshLayoutBeginLoadingMore(BGARefreshLayout refreshLayout) { Log.i(TAG, "加载更多"); return false; } }
apache-2.0
sungsoo/tez-0.3.0
tez-mapreduce-examples/src/main/java/org/apache/tez/mapreduce/examples/Sort.java
8174
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.mapreduce.examples; import java.io.IOException; import java.net.URI; import java.util.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.mapreduce.filecache.DistributedCache; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.ClusterStatus; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.hadoop.mapreduce.lib.partition.InputSampler; import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; /** * This is the trivial map/reduce program that does absolutely nothing * other than use the framework to fragment and sort the input values. * * To run: bin/hadoop jar build/hadoop-examples.jar sort * [-r <i>reduces</i>] * [-inFormat <i>input format class</i>] * [-outFormat <i>output format class</i>] * [-outKey <i>output key class</i>] * [-outValue <i>output value class</i>] * [-totalOrder <i>pcnt</i> <i>num samples</i> <i>max splits</i>] * <i>in-dir</i> <i>out-dir</i> */ public class Sort<K,V> extends Configured implements Tool { public static final String REDUCES_PER_HOST = "mapreduce.sort.reducesperhost"; private Job job = null; static int printUsage() { System.out.println("sort [-r <reduces>] " + "[-inFormat <input format class>] " + "[-outFormat <output format class>] " + "[-outKey <output key class>] " + "[-outValue <output value class>] " + "[-totalOrder <pcnt> <num samples> <max splits>] " + "<input> <output>"); ToolRunner.printGenericCommandUsage(System.out); return 2; } /** * The main driver for sort program. * Invoke this method to submit the map/reduce job. * @throws IOException When there is communication problems with the * job tracker. */ public int run(String[] args) throws Exception { Configuration conf = getConf(); JobClient client = new JobClient(conf); ClusterStatus cluster = client.getClusterStatus(); int num_reduces = (int) (cluster.getMaxReduceTasks() * 0.9); String sort_reduces = conf.get(REDUCES_PER_HOST); if (sort_reduces != null) { num_reduces = cluster.getTaskTrackers() * Integer.parseInt(sort_reduces); } Class<? extends InputFormat> inputFormatClass = SequenceFileInputFormat.class; Class<? extends OutputFormat> outputFormatClass = SequenceFileOutputFormat.class; Class<? extends WritableComparable> outputKeyClass = BytesWritable.class; Class<? extends Writable> outputValueClass = BytesWritable.class; List<String> otherArgs = new ArrayList<String>(); InputSampler.Sampler<K,V> sampler = null; for(int i=0; i < args.length; ++i) { try { if ("-r".equals(args[i])) { num_reduces = Integer.parseInt(args[++i]); } else if ("-inFormat".equals(args[i])) { inputFormatClass = Class.forName(args[++i]).asSubclass(InputFormat.class); } else if ("-outFormat".equals(args[i])) { outputFormatClass = Class.forName(args[++i]).asSubclass(OutputFormat.class); } else if ("-outKey".equals(args[i])) { outputKeyClass = Class.forName(args[++i]).asSubclass(WritableComparable.class); } else if ("-outValue".equals(args[i])) { outputValueClass = Class.forName(args[++i]).asSubclass(Writable.class); } else if ("-totalOrder".equals(args[i])) { double pcnt = Double.parseDouble(args[++i]); int numSamples = Integer.parseInt(args[++i]); int maxSplits = Integer.parseInt(args[++i]); if (0 >= maxSplits) maxSplits = Integer.MAX_VALUE; sampler = new InputSampler.RandomSampler<K,V>(pcnt, numSamples, maxSplits); } else { otherArgs.add(args[i]); } } catch (NumberFormatException except) { System.out.println("ERROR: Integer expected instead of " + args[i]); return printUsage(); } catch (ArrayIndexOutOfBoundsException except) { System.out.println("ERROR: Required parameter missing from " + args[i-1]); return printUsage(); // exits } } // Set user-supplied (possibly default) job configs job = new Job(conf); job.setJobName("sorter"); job.setJarByClass(Sort.class); job.setMapperClass(Mapper.class); job.setReducerClass(Reducer.class); job.setNumReduceTasks(num_reduces); job.setInputFormatClass(inputFormatClass); job.setOutputFormatClass(outputFormatClass); job.setOutputKeyClass(outputKeyClass); job.setOutputValueClass(outputValueClass); // Make sure there are exactly 2 parameters left. if (otherArgs.size() != 2) { System.out.println("ERROR: Wrong number of parameters: " + otherArgs.size() + " instead of 2."); return printUsage(); } FileInputFormat.setInputPaths(job, otherArgs.get(0)); FileOutputFormat.setOutputPath(job, new Path(otherArgs.get(1))); if (sampler != null) { System.out.println("Sampling input to effect total-order sort..."); job.setPartitionerClass(TotalOrderPartitioner.class); Path inputDir = FileInputFormat.getInputPaths(job)[0]; inputDir = inputDir.makeQualified(inputDir.getFileSystem(conf)); Path partitionFile = new Path(inputDir, "_sortPartitioning"); TotalOrderPartitioner.setPartitionFile(conf, partitionFile); InputSampler.<K,V>writePartitionFile(job, sampler); URI partitionUri = new URI(partitionFile.toString() + "#" + "_sortPartitioning"); DistributedCache.addCacheFile(partitionUri, conf); } System.out.println("Running on " + cluster.getTaskTrackers() + " nodes to sort from " + FileInputFormat.getInputPaths(job)[0] + " into " + FileOutputFormat.getOutputPath(job) + " with " + num_reduces + " reduces."); Date startTime = new Date(); System.out.println("Job started: " + startTime); int ret = job.waitForCompletion(true) ? 0 : 1; Date end_time = new Date(); System.out.println("Job ended: " + end_time); System.out.println("The job took " + (end_time.getTime() - startTime.getTime()) /1000 + " seconds."); return ret; } public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new Sort(), args); System.exit(res); } /** * Get the last job that was run using this instance. * @return the results of the last job that was run */ public Job getResult() { return job; } }
apache-2.0
panelion/incubator-stratos
components/org.apache.stratos.throttling.manager/src/main/java/org/apache/stratos/throttling/manager/internal/ThrottlingManagerServiceComponent.java
5600
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.stratos.throttling.manager.internal; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.stratos.throttling.manager.utils.Util; import org.osgi.service.component.ComponentContext; import org.wso2.carbon.billing.core.BillingManager; import org.wso2.carbon.registry.core.service.RegistryService; import org.wso2.carbon.rule.kernel.config.RuleEngineConfigService; import org.wso2.carbon.user.core.service.RealmService; import org.wso2.carbon.billing.mgt.api.MultitenancyBillingInfo; import org.apache.stratos.usage.api.TenantUsageRetriever; /** * @scr.component name="org.wso2.carbon.throttling.manager" * immediate="true" * @scr.reference name="registry.service" * interface="org.wso2.carbon.registry.core.service.RegistryService" cardinality="1..1" * policy="dynamic" bind="setRegistryService" unbind="unsetRegistryService" * @scr.reference name="user.realmservice.default" * interface="org.wso2.carbon.user.core.service.RealmService" * cardinality="1..1" policy="dynamic" bind="setRealmService" * unbind="unsetRealmService" * @scr.reference name="billingManager.service" * interface="org.wso2.carbon.billing.core.BillingManager" cardinality="1..1" * policy="dynamic" bind="setBillingManager" unbind="unsetBillingManager" * @scr.reference name="rule.engine.config.server.component" * interface="org.wso2.carbon.rule.kernel.config.RuleEngineConfigService" * cardinality="1..1" * policy="dynamic" bind="setRuleEngineConfigService" * unbind="unsetRuleEngineConfigService" * @scr.reference name="metering.service" * interface="org.apache.stratos.usage.api.TenantUsageRetriever" cardinality="1..1" * policy="dynamic" bind="setTenantUsageRetriever" unbind="unsetTenantUsageRetriever" * @scr.reference name="org.wso2.carbon.billing.mgt.api.MultitenancyBillingInfo" * interface="org.wso2.carbon.billing.mgt.api.MultitenancyBillingInfo" cardinality="1..1" * policy="dynamic" bind="setMultitenancyBillingInfo" unbind="unsetMultitenancyBillingInfo" */ public class ThrottlingManagerServiceComponent { private static Log log = LogFactory.getLog(ThrottlingManagerServiceComponent.class); protected void activate(ComponentContext context) { try { Util.setBundleContext(context.getBundleContext()); Util.loadThrottlingRules(); Util.registerThrottlingRuleInvoker(); Util.initializeThrottling(); log.debug(" Multitenancy Throttling Manager bundle is activated "); } catch (Throwable e) { log.error(" Multitenancy Throttling Manager bundle failed activating ", e); } } protected void deactivate(ComponentContext context) { log.debug("******* Multitenancy Throttling Manager bundle is deactivated ******* "); } protected void setRegistryService(RegistryService registryService) { Util.setRegistryService(registryService); } protected void unsetRegistryService(RegistryService registryService) { Util.setRegistryService(null); } protected void setRealmService(RealmService realmService) { Util.setRealmService(realmService); } protected void unsetRealmService(RealmService realmService) { Util.setRealmService(null); } protected void setBillingManager(BillingManager billingManager) { log.debug("Receiving billingManager service"); Util.setBillingManager(billingManager); } protected void unsetBillingManager(BillingManager billingManager) { log.debug("Halting billingManager service"); Util.setBillingManager(null); } protected void setRuleEngineConfigService(RuleEngineConfigService ruleEngineConfigService) { Util.setRuleEngineConfigService(ruleEngineConfigService); } protected void unsetRuleEngineConfigService(RuleEngineConfigService ruleEngineConfigService) { // we are not dynamically removing schedule helpers } protected void setTenantUsageRetriever(TenantUsageRetriever tenantUsageRetriever) { log.debug("Setting Tenant Usage Retriever service"); Util.setTenantUsageRetriever(tenantUsageRetriever); } protected void unsetTenantUsageRetriever(TenantUsageRetriever tenantUsageRetriever) { log.debug("Unsetting Tenant Usage Retriever service"); Util.setBillingManager(null); } protected void setMultitenancyBillingInfo(MultitenancyBillingInfo mtBillingInfo) { log.debug("Setting MT billing info service"); Util.setMultitenancyBillingInfo(mtBillingInfo); } protected void unsetMultitenancyBillingInfo(MultitenancyBillingInfo mtBillingInfo) { log.debug("Unsetting MT billing info service"); Util.setMultitenancyBillingInfo(null); } }
apache-2.0
graphhopper/graphhopper
core/src/test/java/com/graphhopper/util/GHUtilityTest.java
7080
/* * Licensed to GraphHopper GmbH under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper GmbH licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.util; import com.graphhopper.coll.GHIntLongHashMap; import com.graphhopper.routing.util.AllEdgesIterator; import com.graphhopper.routing.util.CarFlagEncoder; import com.graphhopper.routing.util.EncodingManager; import com.graphhopper.routing.util.FlagEncoder; import com.graphhopper.storage.Graph; import com.graphhopper.storage.GraphBuilder; import com.graphhopper.storage.NodeAccess; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; /** * @author Peter Karich */ public class GHUtilityTest { private final FlagEncoder carEncoder = new CarFlagEncoder(); private final EncodingManager encodingManager = EncodingManager.create(carEncoder); Graph createGraph() { return new GraphBuilder(encodingManager).create(); } // 7 8\ // | \ | 2 // | 5 | | // 3 4 | | // 6 \1 // ______/ // 0/ Graph initUnsorted(Graph g, FlagEncoder encoder) { NodeAccess na = g.getNodeAccess(); na.setNode(0, 0, 1); na.setNode(1, 2.5, 4.5); na.setNode(2, 4.5, 4.5); na.setNode(3, 3, 0.5); na.setNode(4, 2.8, 2.8); na.setNode(5, 4.2, 1.6); na.setNode(6, 2.3, 2.2); na.setNode(7, 5, 1.5); na.setNode(8, 4.6, 4); GHUtility.setSpeed(60, true, true, encoder, g.edge(8, 2).setDistance(0.5)); GHUtility.setSpeed(60, true, false, encoder, g.edge(7, 3).setDistance(2.1)); GHUtility.setSpeed(60, true, true, encoder, g.edge(1, 0).setDistance(3.9)); GHUtility.setSpeed(60, true, true, encoder, g.edge(7, 5).setDistance(0.7)); GHUtility.setSpeed(60, true, true, encoder, g.edge(1, 2).setDistance(1.9)); GHUtility.setSpeed(60, true, true, encoder, g.edge(8, 1).setDistance(2.05)); return g; } double getLengthOfAllEdges(Graph graph) { double distance = 0; DistanceCalc calc = new DistanceCalcEuclidean(); AllEdgesIterator iter = graph.getAllEdges(); while (iter.next()) { // This is meant to verify that all of the same edges (including tower nodes) // are included in the copied graph. Can not use iter.getDistance() since it // does not verify new geometry. See #1732 distance += calc.calcDistance(iter.fetchWayGeometry(FetchMode.ALL)); } return distance; } @Test public void testSort() { Graph g = initUnsorted(createGraph(), carEncoder); Graph newG = GHUtility.sortDFS(g, createGraph()); assertEquals(g.getNodes(), newG.getNodes()); assertEquals(g.getEdges(), newG.getEdges()); NodeAccess na = newG.getNodeAccess(); assertEquals(0, na.getLat(0), 1e-4); // 0 assertEquals(2.5, na.getLat(1), 1e-4); // 1 assertEquals(4.5, na.getLat(2), 1e-4); // 2 assertEquals(4.6, na.getLat(3), 1e-4); // 8 assertEquals(3.0, na.getLat(4), 1e-4); // 3 assertEquals(5.0, na.getLat(5), 1e-4); // 7 assertEquals(4.2, na.getLat(6), 1e-4); // 5 assertEquals(getLengthOfAllEdges(g), getLengthOfAllEdges(newG), 1e-4); // 0 => 1 assertEquals(0, newG.getEdgeIteratorState(0, Integer.MIN_VALUE).getAdjNode()); assertEquals(1, newG.getEdgeIteratorState(0, Integer.MIN_VALUE).getBaseNode()); // 1 => 3 (was 8) assertEquals(1, newG.getEdgeIteratorState(1, Integer.MIN_VALUE).getAdjNode()); assertEquals(3, newG.getEdgeIteratorState(1, Integer.MIN_VALUE).getBaseNode()); // 2 => 1 assertEquals(2, newG.getEdgeIteratorState(2, Integer.MIN_VALUE).getAdjNode()); assertEquals(1, newG.getEdgeIteratorState(2, Integer.MIN_VALUE).getBaseNode()); } @Test public void testSortDirected() { Graph g = createGraph(); NodeAccess na = g.getNodeAccess(); na.setNode(0, 0, 1); na.setNode(1, 2.5, 2); na.setNode(2, 3.5, 3); GHUtility.setSpeed(60, true, false, carEncoder, g.edge(0, 1).setDistance(1.1)); GHUtility.setSpeed(60, true, false, carEncoder, g.edge(2, 1).setDistance(1.1)); GHUtility.sortDFS(g, createGraph()); } @Test public void testEdgeStuff() { assertEquals(6, GHUtility.createEdgeKey(1, 2, 3, false)); assertEquals(7, GHUtility.createEdgeKey(2, 1, 3, false)); assertEquals(7, GHUtility.createEdgeKey(1, 2, 3, true)); assertEquals(6, GHUtility.createEdgeKey(2, 1, 3, true)); assertEquals(8, GHUtility.createEdgeKey(1, 2, 4, false)); assertEquals(9, GHUtility.createEdgeKey(2, 1, 4, false)); assertEquals(6, GHUtility.createEdgeKey(1, 1, 3, false)); assertEquals(6, GHUtility.createEdgeKey(1, 1, 3, true)); assertTrue(GHUtility.isSameEdgeKeys(GHUtility.createEdgeKey(1, 2, 4, false), GHUtility.createEdgeKey(1, 2, 4, false))); assertTrue(GHUtility.isSameEdgeKeys(GHUtility.createEdgeKey(2, 1, 4, false), GHUtility.createEdgeKey(1, 2, 4, false))); assertFalse(GHUtility.isSameEdgeKeys(GHUtility.createEdgeKey(1, 2, 4, false), GHUtility.createEdgeKey(1, 2, 5, false))); } @Test public void testZeroValue() { GHIntLongHashMap map1 = new GHIntLongHashMap(); assertFalse(map1.containsKey(0)); // assertFalse(map1.containsValue(0)); map1.put(0, 3); map1.put(1, 0); map1.put(2, 1); // assertTrue(map1.containsValue(0)); assertEquals(3, map1.get(0)); assertEquals(0, map1.get(1)); assertEquals(1, map1.get(2)); // instead of assertEquals(-1, map1.get(3)); with hppc we have to check before: assertTrue(map1.containsKey(0)); // trove4j behaviour was to return -1 if non existing: // TIntLongHashMap map2 = new TIntLongHashMap(100, 0.7f, -1, -1); // assertFalse(map2.containsKey(0)); // assertFalse(map2.containsValue(0)); // map2.add(0, 3); // map2.add(1, 0); // map2.add(2, 1); // assertTrue(map2.containsKey(0)); // assertTrue(map2.containsValue(0)); // assertEquals(3, map2.get(0)); // assertEquals(0, map2.get(1)); // assertEquals(1, map2.get(2)); // assertEquals(-1, map2.get(3)); } }
apache-2.0