repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
krishnanand/test-libraries-for-java
src/main/java/com/google/common/testing/junit4/TearDownMethodRule.java
1145
// Copyright 2010 Google Inc. All Rights Reserved. package com.google.common.testing.junit4; import com.google.common.testing.TearDown; import com.google.common.testing.TearDownAccepter; import com.google.common.testing.TearDownStack; import org.junit.rules.MethodRule; import org.junit.runners.model.FrameworkMethod; import org.junit.runners.model.Statement; /** * See {@link TearDownTestCase}. * * @author Luiz-Otavio "Z" Zorzella */ public final class TearDownMethodRule implements MethodRule, TearDownAccepter { final TearDownStack stack = new TearDownStack(); /** * Registers a TearDown implementor which will be run after the test execution. */ @Override public final void addTearDown(TearDown tearDown) { stack.addTearDown(tearDown); } /** * Don't call this method directly -- it fullfils the {@link MethodRule} * interface. */ @Override public Statement apply(final Statement base, FrameworkMethod method, Object target) { return new Statement() { @Override public void evaluate() throws Throwable { base.evaluate(); stack.runTearDown(); } }; } }
apache-2.0
kwon37xi/freemarker-dynamic-ql-builder
src/main/java/kr/pe/kwonnam/freemarkerdynamicqlbuilder/DynamicQueryImpl.java
1763
package kr.pe.kwonnam.freemarkerdynamicqlbuilder; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; public class DynamicQueryImpl implements DynamicQuery { private String queryString; private List<Object> queryParameters; public DynamicQueryImpl(String queryString, List<Object> queryParameters) { this.queryString = queryString; if (queryParameters == null) { this.queryParameters = new ArrayList<Object>(); } else { this.queryParameters = queryParameters; } } /** * {@inheritDoc} */ @Override public String getQueryString() { return queryString; } /** * {@inheritDoc} */ @Override public List<Object> getQueryParameters() { return queryParameters; } /** * {@inheritDoc} */ @Override public Object[] getQueryParameterArray() { return queryParameters.toArray(new Object[queryParameters.size()]); } /** * {@inheritDoc} */ @Override public void bindParameters(PreparedStatement preparedStatement) throws SQLException { if (preparedStatement == null) { throw new IllegalArgumentException("preparedStatement must not be null."); } for (int parameterIndex = 1; parameterIndex <= queryParameters.size(); parameterIndex++) { preparedStatement.setObject(parameterIndex, queryParameters.get(parameterIndex - 1)); } } @Override public String toString() { return "DynamicQueryImpl{" + "queryString='" + queryString + '\'' + ", queryParameters=" + queryParameters + '}'; } }
apache-2.0
eyal-lezmy/Android-DataLib
Android-DataLib-Sample-Netflix/src/fr/eyal/datalib/sample/netflix/data/model/filmography/Filmography_item.java
586
// Start of user code fr.eyal.datalib.sample.netflix.data.model.filmography.Filmography_item. DO NOT MODIFY THE GENERATED COMMENTS package fr.eyal.datalib.sample.netflix.data.model.filmography; import android.os.Parcel; public class Filmography_item extends Filmography_itemBase { private static final String TAG = Filmography_item.class.getSimpleName(); public Filmography_item() { super(); } public Filmography_item(final Parcel in) { super(in); } public Filmography_item(final long id) { super(id); } } // End of user code
apache-2.0
KeithXiaoY/QQKeithXiaoY
app/src/main/java/com/it/keithxiaoy/qqkeithxiaoy/util/ToastUtils.java
469
package com.it.keithxiaoy.qqkeithxiaoy.util; import android.content.Context; import android.widget.Toast; /** * Created by xiaoY on 2017/3/2. */ public class ToastUtils { private static Toast sToast; public static void showToast(Context context, String msg) { if (sToast == null) { sToast = Toast.makeText(context.getApplicationContext(), msg, Toast.LENGTH_SHORT); } sToast.setText(msg); sToast.show(); } }
apache-2.0
mingxin6/incu
inc2_tw2/src/com/model/Administrator.java
1119
package com.model; /** * 審查委員 */ public class Administrator extends A { String uid = ""; String name = ""; String email = ""; String phone = ""; String unit = ""; String title = ""; String remark = ""; public String getUid() { return uid; } public void setUid(String uid) { this.uid = uid; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getUnit() { return unit; } public void setUnit(String unit) { this.unit = unit; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getRemark() { return remark; } public void setRemark(String remark) { this.remark = remark; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getPhone() { return phone; } public void setPhone(String phone) { this.phone = phone; } public static void main(String[] args) { } }
apache-2.0
mrDarkHouse/GDefence
core/src/com/darkhouse/gdefence/Model/Level/HealthBar.java
2484
package com.darkhouse.gdefence.Model.Level; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.scenes.scene2d.Group; import com.badlogic.gdx.scenes.scene2d.ui.*; import com.badlogic.gdx.utils.Align; import com.darkhouse.gdefence.GDefence; import com.darkhouse.gdefence.Helpers.AssetLoader; import com.darkhouse.gdefence.Helpers.FontLoader; import com.darkhouse.gdefence.Level.Mob.Mob; import com.darkhouse.gdefence.Screens.LevelMap; public class HealthBar extends WidgetGroup{ protected ProgressBar healthBar; protected Label text; private int width; private int height; private int x; private int y; public HealthBar(int width, int height, int x, int y) { this.width = width; this.height = height; this.x = x; this.y = y; } public void init(){ initBar(); healthBar.getStyle().background.setMinHeight(height); healthBar.getStyle().knobBefore.setMinHeight(height - 2); healthBar.setPosition(x, y); //setPosition(Gdx.graphics.getWidth() - expBarSize[0], userlevelButton.getY() - expBarSize[1] - 4); healthBar.setSize(width, height); healthBar.setValue(LevelMap.getLevel().getHealthNumber()); healthBar.setAnimateDuration(0.5f); addActor(healthBar); //add(healthBar); initLabel(); } protected void initBar(){ healthBar = new ProgressBar(0, LevelMap.getLevel().getMaxHP(), 0.5f, false, /*GDefence.getInstance().assetLoader.getExpBarSkin()*/GDefence.getInstance().assetLoader.getSkin(), "health-bar"); } protected void initLabel(){ text = new Label(LevelMap.getLevel().getHealthNumber() + "/" + LevelMap.getLevel().getMaxHP(), FontLoader.generateStyle(0, 30, Color.BLACK)); text.setPosition(healthBar.getX() + healthBar.getWidth()/2 - text.getWidth()/2, healthBar.getY() + healthBar.getHeight()/2 - text.getHeight()/2); addActor(text); //add(text); } public void update(){ healthBar.setValue(LevelMap.getLevel().getHealthNumber()); text.setText(LevelMap.getLevel().getHealthNumber() + "/" + LevelMap.getLevel().getMaxHP()); } @Override public void act(float delta) { super.act(delta); // healthBar.setValue(LevelMap.getLevel().getHealthNumber()); // text.setText(LevelMap.getLevel().getHealthNumber() + "/" + LevelMap.getLevel().getMaxHP()); } }
apache-2.0
ramtej/Qi4j.Repo.4.Sync
core/api/src/main/java/org/qi4j/api/geometry/TPolygon.java
4226
/* * Copyright (c) 2014, Jiri Jetmar. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.qi4j.api.geometry; import org.qi4j.api.common.Optional; import org.qi4j.api.geometry.internal.Coordinate; import org.qi4j.api.geometry.internal.TGeometry; import org.qi4j.api.geometry.internal.TLinearRing; import org.qi4j.api.geometry.internal.TShape; import org.qi4j.api.injection.scope.Structure; import org.qi4j.api.injection.scope.This; import org.qi4j.api.mixin.Mixins; import org.qi4j.api.property.Property; import org.qi4j.api.structure.Module; import java.util.ArrayList; import java.util.List; @Mixins(TPolygon.Mixin.class) public interface TPolygon extends TShape, TGeometry { Property<TLinearRing> shell(); @Optional Property<List<TLinearRing>> holes(); TPolygon of(TLinearRing shell); TPolygon of(TLinearRing shell, @Optional TLinearRing... holes); TPolygon withHole(TLinearRing hole); TPolygon withHoles(@Optional TLinearRing... holes); boolean isEmpty(); public abstract class Mixin implements TPolygon { @Structure Module module; @This TPolygon self; private void init() { if (self.holes().get() == null) { List<TLinearRing> ring = new ArrayList<>(); self.holes().set(ring); self.geometryType().set(TGEOMETRY_TYPE.POINT); } } public TPolygon of(TLinearRing shell) { return of(shell, null); } public TPolygon of(TLinearRing shell, TLinearRing... holes) { init(); if (shell != null) { self.shell().set(shell); } withHoles(holes); self.geometryType().set(TGEOMETRY_TYPE.POLYGON); return self; } public TPolygon withHole(TLinearRing hole) { if (hole != null) self.holes().get().add(hole); return self; } public TPolygon withHoles(TLinearRing... holes) { if (holes != null && holes.length != 0) { for (TLinearRing hole : holes) withHole(hole); } return self; } @Override public Coordinate[] getCoordinates() { if (isEmpty()) { return new Coordinate[]{}; } Coordinate[] coordinates = new Coordinate[getNumPoints()]; int k = -1; Coordinate[] shellCoordinates = self.shell().get().getCoordinates(); for (int x = 0; x < shellCoordinates.length; x++) { k++; coordinates[k] = shellCoordinates[x]; } for (int i = 0; i < self.holes().get().size(); i++) { Coordinate[] childCoordinates = self.holes().get().get(i).getCoordinates(); for (int j = 0; j < childCoordinates.length; j++) { k++; coordinates[k] = childCoordinates[j]; } } return coordinates; } public boolean isEmpty() { return (self.shell() == null) || (self.shell().get() == null) || (self.shell().get().isEmpty()) ? true : false; } public int getNumPoints() { int numPoints = self.shell().get().getNumPoints(); for (int i = 0; i < self.holes().get().size(); i++) { numPoints += self.holes().get().get(i).getNumPoints(); } return numPoints; } } }
apache-2.0
chunInsane/Teaching-Assistance
src/main/java/cn/edu/nuc/acmicpc/service/DepartmentService.java
649
package cn.edu.nuc.acmicpc.service; import cn.edu.nuc.acmicpc.dto.DepartmentDto; import java.util.List; /** * Created with IDEA * User: chuninsane * Date: 2016/3/8 * Department service interface. */ public interface DepartmentService { /** * Create new department record. * @param name * @return */ public Long createDepartment(String name); /** * Get department name by department id * @param departmentId * @return */ public String getDepartmentName(Long departmentId); /** * Get all departments. * @return */ public List<DepartmentDto> getDepartments(); }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-glue/src/main/java/com/amazonaws/services/glue/model/PutSchemaVersionMetadataResult.java
14288
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glue.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/PutSchemaVersionMetadata" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class PutSchemaVersionMetadataResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The Amazon Resource Name (ARN) for the schema. * </p> */ private String schemaArn; /** * <p> * The name for the schema. * </p> */ private String schemaName; /** * <p> * The name for the registry. * </p> */ private String registryName; /** * <p> * The latest version of the schema. * </p> */ private Boolean latestVersion; /** * <p> * The version number of the schema. * </p> */ private Long versionNumber; /** * <p> * The unique version ID of the schema version. * </p> */ private String schemaVersionId; /** * <p> * The metadata key. * </p> */ private String metadataKey; /** * <p> * The value of the metadata key. * </p> */ private String metadataValue; /** * <p> * The Amazon Resource Name (ARN) for the schema. * </p> * * @param schemaArn * The Amazon Resource Name (ARN) for the schema. */ public void setSchemaArn(String schemaArn) { this.schemaArn = schemaArn; } /** * <p> * The Amazon Resource Name (ARN) for the schema. * </p> * * @return The Amazon Resource Name (ARN) for the schema. */ public String getSchemaArn() { return this.schemaArn; } /** * <p> * The Amazon Resource Name (ARN) for the schema. * </p> * * @param schemaArn * The Amazon Resource Name (ARN) for the schema. * @return Returns a reference to this object so that method calls can be chained together. */ public PutSchemaVersionMetadataResult withSchemaArn(String schemaArn) { setSchemaArn(schemaArn); return this; } /** * <p> * The name for the schema. * </p> * * @param schemaName * The name for the schema. */ public void setSchemaName(String schemaName) { this.schemaName = schemaName; } /** * <p> * The name for the schema. * </p> * * @return The name for the schema. */ public String getSchemaName() { return this.schemaName; } /** * <p> * The name for the schema. * </p> * * @param schemaName * The name for the schema. * @return Returns a reference to this object so that method calls can be chained together. */ public PutSchemaVersionMetadataResult withSchemaName(String schemaName) { setSchemaName(schemaName); return this; } /** * <p> * The name for the registry. * </p> * * @param registryName * The name for the registry. */ public void setRegistryName(String registryName) { this.registryName = registryName; } /** * <p> * The name for the registry. * </p> * * @return The name for the registry. */ public String getRegistryName() { return this.registryName; } /** * <p> * The name for the registry. * </p> * * @param registryName * The name for the registry. * @return Returns a reference to this object so that method calls can be chained together. */ public PutSchemaVersionMetadataResult withRegistryName(String registryName) { setRegistryName(registryName); return this; } /** * <p> * The latest version of the schema. * </p> * * @param latestVersion * The latest version of the schema. */ public void setLatestVersion(Boolean latestVersion) { this.latestVersion = latestVersion; } /** * <p> * The latest version of the schema. * </p> * * @return The latest version of the schema. */ public Boolean getLatestVersion() { return this.latestVersion; } /** * <p> * The latest version of the schema. * </p> * * @param latestVersion * The latest version of the schema. * @return Returns a reference to this object so that method calls can be chained together. */ public PutSchemaVersionMetadataResult withLatestVersion(Boolean latestVersion) { setLatestVersion(latestVersion); return this; } /** * <p> * The latest version of the schema. * </p> * * @return The latest version of the schema. */ public Boolean isLatestVersion() { return this.latestVersion; } /** * <p> * The version number of the schema. * </p> * * @param versionNumber * The version number of the schema. */ public void setVersionNumber(Long versionNumber) { this.versionNumber = versionNumber; } /** * <p> * The version number of the schema. * </p> * * @return The version number of the schema. */ public Long getVersionNumber() { return this.versionNumber; } /** * <p> * The version number of the schema. * </p> * * @param versionNumber * The version number of the schema. * @return Returns a reference to this object so that method calls can be chained together. */ public PutSchemaVersionMetadataResult withVersionNumber(Long versionNumber) { setVersionNumber(versionNumber); return this; } /** * <p> * The unique version ID of the schema version. * </p> * * @param schemaVersionId * The unique version ID of the schema version. */ public void setSchemaVersionId(String schemaVersionId) { this.schemaVersionId = schemaVersionId; } /** * <p> * The unique version ID of the schema version. * </p> * * @return The unique version ID of the schema version. */ public String getSchemaVersionId() { return this.schemaVersionId; } /** * <p> * The unique version ID of the schema version. * </p> * * @param schemaVersionId * The unique version ID of the schema version. * @return Returns a reference to this object so that method calls can be chained together. */ public PutSchemaVersionMetadataResult withSchemaVersionId(String schemaVersionId) { setSchemaVersionId(schemaVersionId); return this; } /** * <p> * The metadata key. * </p> * * @param metadataKey * The metadata key. */ public void setMetadataKey(String metadataKey) { this.metadataKey = metadataKey; } /** * <p> * The metadata key. * </p> * * @return The metadata key. */ public String getMetadataKey() { return this.metadataKey; } /** * <p> * The metadata key. * </p> * * @param metadataKey * The metadata key. * @return Returns a reference to this object so that method calls can be chained together. */ public PutSchemaVersionMetadataResult withMetadataKey(String metadataKey) { setMetadataKey(metadataKey); return this; } /** * <p> * The value of the metadata key. * </p> * * @param metadataValue * The value of the metadata key. */ public void setMetadataValue(String metadataValue) { this.metadataValue = metadataValue; } /** * <p> * The value of the metadata key. * </p> * * @return The value of the metadata key. */ public String getMetadataValue() { return this.metadataValue; } /** * <p> * The value of the metadata key. * </p> * * @param metadataValue * The value of the metadata key. * @return Returns a reference to this object so that method calls can be chained together. */ public PutSchemaVersionMetadataResult withMetadataValue(String metadataValue) { setMetadataValue(metadataValue); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSchemaArn() != null) sb.append("SchemaArn: ").append(getSchemaArn()).append(","); if (getSchemaName() != null) sb.append("SchemaName: ").append(getSchemaName()).append(","); if (getRegistryName() != null) sb.append("RegistryName: ").append(getRegistryName()).append(","); if (getLatestVersion() != null) sb.append("LatestVersion: ").append(getLatestVersion()).append(","); if (getVersionNumber() != null) sb.append("VersionNumber: ").append(getVersionNumber()).append(","); if (getSchemaVersionId() != null) sb.append("SchemaVersionId: ").append(getSchemaVersionId()).append(","); if (getMetadataKey() != null) sb.append("MetadataKey: ").append(getMetadataKey()).append(","); if (getMetadataValue() != null) sb.append("MetadataValue: ").append(getMetadataValue()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof PutSchemaVersionMetadataResult == false) return false; PutSchemaVersionMetadataResult other = (PutSchemaVersionMetadataResult) obj; if (other.getSchemaArn() == null ^ this.getSchemaArn() == null) return false; if (other.getSchemaArn() != null && other.getSchemaArn().equals(this.getSchemaArn()) == false) return false; if (other.getSchemaName() == null ^ this.getSchemaName() == null) return false; if (other.getSchemaName() != null && other.getSchemaName().equals(this.getSchemaName()) == false) return false; if (other.getRegistryName() == null ^ this.getRegistryName() == null) return false; if (other.getRegistryName() != null && other.getRegistryName().equals(this.getRegistryName()) == false) return false; if (other.getLatestVersion() == null ^ this.getLatestVersion() == null) return false; if (other.getLatestVersion() != null && other.getLatestVersion().equals(this.getLatestVersion()) == false) return false; if (other.getVersionNumber() == null ^ this.getVersionNumber() == null) return false; if (other.getVersionNumber() != null && other.getVersionNumber().equals(this.getVersionNumber()) == false) return false; if (other.getSchemaVersionId() == null ^ this.getSchemaVersionId() == null) return false; if (other.getSchemaVersionId() != null && other.getSchemaVersionId().equals(this.getSchemaVersionId()) == false) return false; if (other.getMetadataKey() == null ^ this.getMetadataKey() == null) return false; if (other.getMetadataKey() != null && other.getMetadataKey().equals(this.getMetadataKey()) == false) return false; if (other.getMetadataValue() == null ^ this.getMetadataValue() == null) return false; if (other.getMetadataValue() != null && other.getMetadataValue().equals(this.getMetadataValue()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSchemaArn() == null) ? 0 : getSchemaArn().hashCode()); hashCode = prime * hashCode + ((getSchemaName() == null) ? 0 : getSchemaName().hashCode()); hashCode = prime * hashCode + ((getRegistryName() == null) ? 0 : getRegistryName().hashCode()); hashCode = prime * hashCode + ((getLatestVersion() == null) ? 0 : getLatestVersion().hashCode()); hashCode = prime * hashCode + ((getVersionNumber() == null) ? 0 : getVersionNumber().hashCode()); hashCode = prime * hashCode + ((getSchemaVersionId() == null) ? 0 : getSchemaVersionId().hashCode()); hashCode = prime * hashCode + ((getMetadataKey() == null) ? 0 : getMetadataKey().hashCode()); hashCode = prime * hashCode + ((getMetadataValue() == null) ? 0 : getMetadataValue().hashCode()); return hashCode; } @Override public PutSchemaVersionMetadataResult clone() { try { return (PutSchemaVersionMetadataResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-mwaa/src/main/java/com/amazonaws/services/mwaa/model/UpdateEnvironmentResult.java
4536
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.mwaa.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mwaa-2020-07-01/UpdateEnvironment" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateEnvironmentResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The Amazon Resource Name (ARN) of the Amazon MWAA environment. For example, * <code>arn:aws:airflow:us-east-1:123456789012:environment/MyMWAAEnvironment</code>. * </p> */ private String arn; /** * <p> * The Amazon Resource Name (ARN) of the Amazon MWAA environment. For example, * <code>arn:aws:airflow:us-east-1:123456789012:environment/MyMWAAEnvironment</code>. * </p> * * @param arn * The Amazon Resource Name (ARN) of the Amazon MWAA environment. For example, * <code>arn:aws:airflow:us-east-1:123456789012:environment/MyMWAAEnvironment</code>. */ public void setArn(String arn) { this.arn = arn; } /** * <p> * The Amazon Resource Name (ARN) of the Amazon MWAA environment. For example, * <code>arn:aws:airflow:us-east-1:123456789012:environment/MyMWAAEnvironment</code>. * </p> * * @return The Amazon Resource Name (ARN) of the Amazon MWAA environment. For example, * <code>arn:aws:airflow:us-east-1:123456789012:environment/MyMWAAEnvironment</code>. */ public String getArn() { return this.arn; } /** * <p> * The Amazon Resource Name (ARN) of the Amazon MWAA environment. For example, * <code>arn:aws:airflow:us-east-1:123456789012:environment/MyMWAAEnvironment</code>. * </p> * * @param arn * The Amazon Resource Name (ARN) of the Amazon MWAA environment. For example, * <code>arn:aws:airflow:us-east-1:123456789012:environment/MyMWAAEnvironment</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateEnvironmentResult withArn(String arn) { setArn(arn); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getArn() != null) sb.append("Arn: ").append(getArn()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateEnvironmentResult == false) return false; UpdateEnvironmentResult other = (UpdateEnvironmentResult) obj; if (other.getArn() == null ^ this.getArn() == null) return false; if (other.getArn() != null && other.getArn().equals(this.getArn()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getArn() == null) ? 0 : getArn().hashCode()); return hashCode; } @Override public UpdateEnvironmentResult clone() { try { return (UpdateEnvironmentResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
ops4j/org.ops4j.pax.wicket
samples/blueprint/mount/src/main/java/org/ops4j/pax/wicket/samples/blueprint/mount/internal/ManuallyMountedPage.java
1022
/** * Copyright OPS4J * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ops4j.pax.wicket.samples.blueprint.mount.internal; import org.apache.wicket.markup.html.WebPage; /** * This page is mounted by blueprint but has a similar affect like the {@link org.ops4j.pax.wicket.samples.blueprint.mount.internal.AutomountedPage} with the difference that * it is possible to choose any mount point liked. * * @author nmw * @version $Id: $Id */ public class ManuallyMountedPage extends WebPage { }
apache-2.0
tlhhup/spring-boot
spring-boot-chapter-8/src/main/java/com/tlh/springboot/config/SpringSecurityConfig.java
1673
package com.tlh.springboot.config; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; import org.springframework.security.crypto.password.PasswordEncoder; import com.tlh.springboot.service.impl.SysUserServiceImpl; @Configuration public class SpringSecurityConfig extends WebSecurityConfigurerAdapter { @Bean UserDetailsService customUserDetailsService() { return new SysUserServiceImpl(); } @Bean PasswordEncoder passwordEncoder(){ return new BCryptPasswordEncoder(); } //配置用户认证信息 @Override protected void configure(AuthenticationManagerBuilder auth) throws Exception { //设置自定义用户数据源及加密方式 auth.userDetailsService(customUserDetailsService()).passwordEncoder(passwordEncoder()); } @Override protected void configure(HttpSecurity http) throws Exception { http.authorizeRequests()//开启授权 .anyRequest().authenticated()//所有的请求都必须先授权 .and()// .formLogin()//登录 .loginPage("/login")//登录页面,定义必须为post请求 .failureUrl("/login?error")//失败的地址 .permitAll()//用户任意访问 .and()// .logout()// .permitAll(); } }
apache-2.0
jjeb/kettle-trunk
engine/src/org/pentaho/di/trans/steps/rowgenerator/RowGeneratorMeta.java
17062
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.rowgenerator; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.Counter; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepIOMeta; import org.pentaho.di.trans.step.StepIOMetaInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.w3c.dom.Node; /* * Created on 4-apr-2003 */ public class RowGeneratorMeta extends BaseStepMeta implements StepMetaInterface { private static Class<?> PKG = RowGeneratorMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private String rowLimit; private String currency[]; private String decimal[]; private String group[]; private String value[]; private String fieldName[]; private String fieldType[]; private String fieldFormat[]; private int fieldLength[]; private int fieldPrecision[]; /** Flag : set empty string **/ private boolean setEmptyString[]; public RowGeneratorMeta() { super(); // allocate BaseStepMeta } /** * @return Returns the currency. */ public String[] getCurrency() { return currency; } /** * @param currency The currency to set. */ public void setCurrency(String[] currency) { this.currency = currency; } /** * @return Returns the decimal. */ public String[] getDecimal() { return decimal; } /** * @param decimal The decimal to set. */ public void setDecimal(String[] decimal) { this.decimal = decimal; } /** * @return Returns the fieldFormat. */ public String[] getFieldFormat() { return fieldFormat; } /** * @param fieldFormat The fieldFormat to set. */ public void setFieldFormat(String[] fieldFormat) { this.fieldFormat = fieldFormat; } /** * @return Returns the fieldLength. */ public int[] getFieldLength() { return fieldLength; } /** * @param fieldLength The fieldLength to set. */ public void setFieldLength(int[] fieldLength) { this.fieldLength = fieldLength; } /** * @return Returns the fieldName. */ public String[] getFieldName() { return fieldName; } /** * @param fieldName The fieldName to set. */ public void setFieldName(String[] fieldName) { this.fieldName = fieldName; } /** * @return Returns the fieldPrecision. */ public int[] getFieldPrecision() { return fieldPrecision; } /** * @param fieldPrecision The fieldPrecision to set. */ public void setFieldPrecision(int[] fieldPrecision) { this.fieldPrecision = fieldPrecision; } /** * @return Returns the fieldType. */ public String[] getFieldType() { return fieldType; } /** * @param fieldType The fieldType to set. */ public void setFieldType(String[] fieldType) { this.fieldType = fieldType; } /** * @return Returns the group. */ public String[] getGroup() { return group; } /** * @param group The group to set. */ public void setGroup(String[] group) { this.group = group; } /** * @return the setEmptyString */ public boolean[] isSetEmptyString() { return setEmptyString; } /** * @param setEmptyString the setEmptyString to set */ public void setEmptyString(boolean[] setEmptyString) { this.setEmptyString = setEmptyString; } /** * @return Returns the rowLimit. */ public String getRowLimit() { return rowLimit; } /** * @param rowLimit The rowLimit to set. */ public void setRowLimit(String rowLimit) { this.rowLimit = rowLimit; } /** * @return Returns the value. */ public String[] getValue() { return value; } /** * @param value The value to set. */ public void setValue(String[] value) { this.value = value; } public void loadXML(Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleXMLException { readData(stepnode); } public void allocate(int nrfields) { fieldName = new String[nrfields]; fieldType = new String[nrfields]; fieldFormat = new String[nrfields]; fieldLength = new int[nrfields]; fieldPrecision = new int[nrfields]; currency = new String[nrfields]; decimal = new String[nrfields]; group = new String[nrfields]; value = new String[nrfields]; setEmptyString = new boolean[nrfields]; } public Object clone() { RowGeneratorMeta retval = (RowGeneratorMeta)super.clone(); int nrfields=fieldName.length; retval.allocate(nrfields); for (int i=0;i<nrfields;i++) { retval.fieldName[i] = fieldName[i]; retval.fieldType[i] = fieldType[i]; retval.fieldFormat[i] = fieldFormat[i]; retval.currency[i] = currency[i]; retval.decimal[i] = decimal[i]; retval.group[i] = group[i]; retval.value[i] = value[i]; retval.fieldLength[i] = fieldLength[i]; retval.fieldPrecision[i] = fieldPrecision[i]; retval.setEmptyString[i]=setEmptyString[i]; } return retval; } private void readData(Node stepnode) throws KettleXMLException { try { Node fields = XMLHandler.getSubNode(stepnode, "fields"); int nrfields=XMLHandler.countNodes(fields, "field"); allocate(nrfields); String slength, sprecision; for (int i=0;i<nrfields;i++) { Node fnode = XMLHandler.getSubNodeByNr(fields, "field", i); fieldName[i] = XMLHandler.getTagValue(fnode, "name"); fieldType[i] = XMLHandler.getTagValue(fnode, "type"); fieldFormat[i] = XMLHandler.getTagValue(fnode, "format"); currency[i] = XMLHandler.getTagValue(fnode, "currency"); decimal[i] = XMLHandler.getTagValue(fnode, "decimal"); group[i] = XMLHandler.getTagValue(fnode, "group"); value[i] = XMLHandler.getTagValue(fnode, "nullif"); slength = XMLHandler.getTagValue(fnode, "length"); sprecision = XMLHandler.getTagValue(fnode, "precision"); fieldLength[i] = Const.toInt(slength, -1); fieldPrecision[i] = Const.toInt(sprecision, -1); String emptyString = XMLHandler.getTagValue(fnode, "set_empty_string"); setEmptyString[i] = !Const.isEmpty(emptyString) && "Y".equalsIgnoreCase(emptyString); } // Is there a limit on the number of rows we process? rowLimit = XMLHandler.getTagValue(stepnode, "limit"); } catch(Exception e) { throw new KettleXMLException("Unable to load step info from XML", e); } } public void setDefault() { int i, nrfields=0; allocate(nrfields); DecimalFormat decimalFormat = new DecimalFormat(); for (i=0;i<nrfields;i++) { fieldName[i] = "field"+i; fieldType[i] = "Number"; fieldFormat[i] = "\u00A40,000,000.00;\u00A4-0,000,000.00"; fieldLength[i] = 9; fieldPrecision[i] = 2; currency[i] = decimalFormat.getDecimalFormatSymbols().getCurrencySymbol(); decimal[i] = new String(new char[] { decimalFormat.getDecimalFormatSymbols().getDecimalSeparator() } ); group[i] = new String(new char[] { decimalFormat.getDecimalFormatSymbols().getGroupingSeparator() } ); value[i] = "-"; setEmptyString[i] = false; } rowLimit="10"; } public void getFields(RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException { try { List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); RowMetaAndData rowMetaAndData = RowGenerator.buildRow(this, remarks, origin); if (!remarks.isEmpty()) { StringBuffer stringRemarks = new StringBuffer(); for (CheckResultInterface remark : remarks) { stringRemarks.append(remark.toString()).append(Const.CR); } throw new KettleStepException(stringRemarks.toString()); } for (ValueMetaInterface valueMeta : rowMetaAndData.getRowMeta().getValueMetaList()) { valueMeta.setOrigin(origin); } row.mergeRowMeta(rowMetaAndData.getRowMeta()); } catch(Exception e) { throw new KettleStepException(e); } } public String getXML() { StringBuffer retval = new StringBuffer(300); retval.append(" <fields>").append(Const.CR); for (int i=0;i<fieldName.length;i++) { if (fieldName[i]!=null && fieldName[i].length()!=0) { retval.append(" <field>").append(Const.CR); retval.append(" ").append(XMLHandler.addTagValue("name", fieldName[i])); retval.append(" ").append(XMLHandler.addTagValue("type", fieldType[i])); retval.append(" ").append(XMLHandler.addTagValue("format", fieldFormat[i])); retval.append(" ").append(XMLHandler.addTagValue("currency", currency[i])); retval.append(" ").append(XMLHandler.addTagValue("decimal", decimal[i])); retval.append(" ").append(XMLHandler.addTagValue("group", group[i])); retval.append(" ").append(XMLHandler.addTagValue("nullif", value[i])); retval.append(" ").append(XMLHandler.addTagValue("length", fieldLength[i])); retval.append(" ").append(XMLHandler.addTagValue("precision", fieldPrecision[i])); retval.append(" ").append(XMLHandler.addTagValue("set_empty_string", setEmptyString[i])); retval.append(" </field>").append(Const.CR); } } retval.append(" </fields>").append(Const.CR); retval.append(" ").append(XMLHandler.addTagValue("limit", rowLimit)); return retval.toString(); } public void readRep(Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleException { try { int nrfields = rep.countNrStepAttributes(id_step, "field_name"); allocate(nrfields); for (int i=0;i<nrfields;i++) { fieldName[i] = rep.getStepAttributeString (id_step, i, "field_name"); fieldType[i] = rep.getStepAttributeString (id_step, i, "field_type"); fieldFormat[i] = rep.getStepAttributeString (id_step, i, "field_format"); currency[i] = rep.getStepAttributeString (id_step, i, "field_currency"); decimal[i] = rep.getStepAttributeString (id_step, i, "field_decimal"); group[i] = rep.getStepAttributeString (id_step, i, "field_group"); value[i] = rep.getStepAttributeString (id_step, i, "field_nullif"); fieldLength[i] = (int)rep.getStepAttributeInteger(id_step, i, "field_length"); fieldPrecision[i] = (int)rep.getStepAttributeInteger(id_step, i, "field_precision"); setEmptyString[i] = rep.getStepAttributeBoolean(id_step, i, "set_empty_string", false); } long longLimit = rep.getStepAttributeInteger(id_step, "limit"); if (longLimit<=0) { rowLimit = rep.getStepAttributeString(id_step, "limit"); } else { rowLimit = Long.toString(longLimit); } } catch(Exception e) { throw new KettleException("Unexpected error reading step information from the repository", e); } } public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { for (int i=0;i<fieldName.length;i++) { if (fieldName[i]!=null && fieldName[i].length()!=0) { rep.saveStepAttribute(id_transformation, id_step, i, "field_name", fieldName[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_type", fieldType[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_format", fieldFormat[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_currency", currency[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_decimal", decimal[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_group", group[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_nullif", value[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_length", fieldLength[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_precision", fieldPrecision[i]); rep.saveStepAttribute(id_transformation, id_step, i, "set_empty_string", setEmptyString[i]); } } rep.saveStepAttribute(id_transformation, id_step, "limit", rowLimit); } catch(Exception e) { throw new KettleException("Unable to save step information to the repository for id_step="+id_step, e); } } public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String input[], String output[], RowMetaInterface info) { CheckResult cr; if (prev!=null && prev.size()>0) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "RowGeneratorMeta.CheckResult.NoInputStreamsError"), stepMeta); remarks.add(cr); } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "RowGeneratorMeta.CheckResult.NoInputStreamOk"), stepMeta); remarks.add(cr); String strLimit = transMeta.environmentSubstitute(rowLimit); if (Const.toLong(strLimit, -1L)<=0) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_WARNING, BaseMessages.getString(PKG, "RowGeneratorMeta.CheckResult.WarnNoRows"), stepMeta); remarks.add(cr); } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "RowGeneratorMeta.CheckResult.WillReturnRows", strLimit), stepMeta); remarks.add(cr); } } // See if we have input streams leading to this step! if (input.length>0) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "RowGeneratorMeta.CheckResult.NoInputError"), stepMeta); remarks.add(cr); } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "RowGeneratorMeta.CheckResult.NoInputOk"), stepMeta); remarks.add(cr); } } public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans) { return new RowGenerator(stepMeta, stepDataInterface, cnr, transMeta, trans); } public StepDataInterface getStepData() { return new RowGeneratorData(); } /** * Returns the Input/Output metadata for this step. * The generator step only produces output, does not accept input! */ public StepIOMetaInterface getStepIOMeta() { return new StepIOMeta(false, true, false, false, false, false); } }
apache-2.0
DiligentGraphics/DiligentSamples
Android/Tutorial13_ShadowMap/src/main/java/com/diligentengine/tutorial13_shadowmap/Tutorial13NativeActivity.java
1568
/* * Copyright 2019-2021 Diligent Graphics LLC * Copyright 2015-2019 Egor Yusov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * In no event and under no legal theory, whether in tort (including negligence), * contract, or otherwise, unless required by applicable law (such as deliberate * and grossly negligent acts) or agreed to in writing, shall any Contributor be * liable for any damages, including any direct, indirect, special, incidental, * or consequential damages of any character arising as a result of this License or * out of the use or inability to use the software (including but not limited to damages * for loss of goodwill, work stoppage, computer failure or malfunction, or any and * all other commercial damages or losses), even if such Contributor has been advised * of the possibility of such damages. */ package com.diligentengine.tutorial13_shadowmap; import com.diligentengine.android.common.NativeActivityBase; public class Tutorial13NativeActivity extends NativeActivityBase { }
apache-2.0
queshaw/dita-ot
src/main/java/org/dita/dost/util/Job.java
36354
/* * This file is part of the DITA Open Toolkit project. * See the accompanying license.txt file for applicable licenses. */ package org.dita.dost.util; import static org.dita.dost.util.Constants.*; import static org.dita.dost.util.URLUtils.*; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Field; import java.net.URI; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import org.dita.dost.util.Job.FileInfo.Filter; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; /** * Definition of current job. * * <p>Instances are thread-safe.</p> * * @since 1.5.4 */ public final class Job { private static final String JOB_FILE = ".job.xml"; private static final String ELEMENT_JOB = "job"; private static final String ATTRIBUTE_KEY = "key"; private static final String ELEMENT_ENTRY = "entry"; private static final String ELEMENT_MAP = "map"; private static final String ELEMENT_SET = "set"; private static final String ELEMENT_STRING = "string"; private static final String ATTRIBUTE_NAME = "name"; private static final String ELEMENT_PROPERTY = "property"; private static final String ELEMENT_FILES = "files"; private static final String ELEMENT_FILE = "file"; private static final String ATTRIBUTE_SRC = "src"; private static final String ATTRIBUTE_URI = "uri"; private static final String ATTRIBUTE_PATH = "path"; private static final String ATTRIBUTE_FORMAT = "format"; private static final String ATTRIBUTE_CHUNKED = "chunked"; private static final String ATTRIBUTE_HAS_CONREF = "has-conref"; private static final String ATTRIBUTE_HAS_KEYREF = "has-keyref"; private static final String ATTRIBUTE_HAS_CODEREF = "has-coderef"; private static final String ATTRIBUTE_RESOURCE_ONLY = "resource-only"; private static final String ATTRIBUTE_TARGET = "target"; private static final String ATTRIBUTE_CONREF_TARGET = "conref-target"; private static final String ATTRIBUTE_NON_CONREF_TARGET = "non-conref-target"; private static final String ATTRIBUTE_CONREF_PUSH = "conrefpush"; private static final String ATTRIBUTE_SUBJECT_SCHEME = "subjectscheme"; private static final String ATTRIBUTE_HAS_LINK = "has-link"; private static final String ATTRIBUTE_COPYTO_SOURCE_LIST = "copy-to-source"; private static final String ATTRIBUTE_OUT_DITA_FILES_LIST = "out-dita"; private static final String ATTRIBUTE_CHUNKED_DITAMAP_LIST = "chunked-ditamap"; private static final String ATTRIBUTE_FLAG_IMAGE_LIST = "flag-image"; private static final String ATTRIBUTE_SUBSIDIARY_TARGET_LIST = "subtarget"; private static final String ATTRIBUTE_CHUNK_TOPIC_LIST = "skip-chunk"; private static final String PROPERTY_OUTER_CONTROL = ANT_INVOKER_EXT_PARAM_OUTTERCONTROL; private static final String PROPERTY_ONLY_TOPIC_IN_MAP = ANT_INVOKER_EXT_PARAM_ONLYTOPICINMAP; private static final String PROPERTY_GENERATE_COPY_OUTER = ANT_INVOKER_EXT_PARAM_GENERATECOPYOUTTER; private static final String PROPERTY_OUTPUT_DIR = ANT_INVOKER_EXT_PARAM_OUTPUTDIR; /** Deprecated since 2.2 */ @Deprecated private static final String PROPERTY_INPUT_MAP = "InputMapDir"; private static final String PROPERTY_INPUT_MAP_URI = "InputMapDir.uri"; /** File name for key definition file */ public static final String KEYDEF_LIST_FILE = "keydef.xml"; /** File name for key definition file */ public static final String SUBJECT_SCHEME_KEYDEF_LIST_FILE = "schemekeydef.xml"; /** File name for temporary input file list file */ public static final String USER_INPUT_FILE_LIST_FILE = "usr.input.file.list"; /** Map of serialization attributes to file info boolean fields. */ private static final Map<String, Field> attrToFieldMap= new HashMap<>(); static { try { attrToFieldMap.put(ATTRIBUTE_CHUNKED, FileInfo.class.getField("isChunked")); attrToFieldMap.put(ATTRIBUTE_HAS_LINK, FileInfo.class.getField("hasLink")); attrToFieldMap.put(ATTRIBUTE_HAS_CONREF, FileInfo.class.getField("hasConref")); attrToFieldMap.put(ATTRIBUTE_HAS_KEYREF, FileInfo.class.getField("hasKeyref")); attrToFieldMap.put(ATTRIBUTE_HAS_CODEREF, FileInfo.class.getField("hasCoderef")); attrToFieldMap.put(ATTRIBUTE_RESOURCE_ONLY, FileInfo.class.getField("isResourceOnly")); attrToFieldMap.put(ATTRIBUTE_TARGET, FileInfo.class.getField("isTarget")); attrToFieldMap.put(ATTRIBUTE_CONREF_TARGET, FileInfo.class.getField("isConrefTarget")); attrToFieldMap.put(ATTRIBUTE_NON_CONREF_TARGET, FileInfo.class.getField("isNonConrefTarget")); attrToFieldMap.put(ATTRIBUTE_CONREF_PUSH, FileInfo.class.getField("isConrefPush")); attrToFieldMap.put(ATTRIBUTE_SUBJECT_SCHEME, FileInfo.class.getField("isSubjectScheme")); attrToFieldMap.put(ATTRIBUTE_COPYTO_SOURCE_LIST, FileInfo.class.getField("isCopyToSource")); attrToFieldMap.put(ATTRIBUTE_OUT_DITA_FILES_LIST, FileInfo.class.getField("isOutDita")); attrToFieldMap.put(ATTRIBUTE_FLAG_IMAGE_LIST, FileInfo.class.getField("isFlagImage")); attrToFieldMap.put(ATTRIBUTE_SUBSIDIARY_TARGET_LIST, FileInfo.class.getField("isSubtarget")); attrToFieldMap.put(ATTRIBUTE_CHUNK_TOPIC_LIST, FileInfo.class.getField("isSkipChunk")); } catch (final NoSuchFieldException e) { throw new RuntimeException(e); } } private final Map<String, Object> prop; public final File tempDir; private final File jobFile; private final ConcurrentMap<URI, FileInfo> files = new ConcurrentHashMap<>(); private long lastModified; /** * Create new job configuration instance. Initialise by reading temporary configuration files. * * @param tempDir temporary directory * @throws IOException if reading configuration files failed * @throws IllegalStateException if configuration files are missing */ public Job(final File tempDir) throws IOException { if (!tempDir.isAbsolute()) { throw new IllegalArgumentException("Temporary directory " + tempDir + " must be absolute"); } this.tempDir = tempDir; jobFile = new File(tempDir, JOB_FILE); prop = new HashMap<>(); read(); } /** * Test if serialized configuration file has been updated. * @param tempDir job configuration directory * @return {@code true} if configuration file has been update after this object has been created or serialized */ public boolean isStale(final File tempDir) { return jobFile.lastModified() > lastModified; } /** * Read temporary configuration files. If configuration files are not found, * assume an empty job object is being created. * * @throws IOException if reading configuration files failed * @throws IllegalStateException if configuration files are missing */ private void read() throws IOException { lastModified = jobFile.lastModified(); if (jobFile.exists()) { InputStream in = null; try { final XMLReader parser = XMLUtils.getXMLReader(); parser.setContentHandler(new JobHandler(prop, files)); in = new FileInputStream(jobFile); parser.parse(new InputSource(in)); } catch (final SAXException e) { throw new IOException("Failed to read job file: " + e.getMessage()); } finally { if (in != null) { in.close(); } } } else { // defaults prop.put(PROPERTY_GENERATE_COPY_OUTER, Generate.NOT_GENERATEOUTTER.toString()); prop.put(PROPERTY_ONLY_TOPIC_IN_MAP, Boolean.toString(false)); prop.put(PROPERTY_OUTER_CONTROL, OutterControl.WARN.toString()); } } private final static class JobHandler extends DefaultHandler { private final Map<String, Object> prop; private final Map<URI, FileInfo> files; private StringBuilder buf; private String name; private String key; private Set<String> set; private Map<String, String> map; JobHandler(final Map<String, Object> prop, final Map<URI, FileInfo> files) { this.prop = prop; this.files = files; } @Override public void characters(final char[] ch, final int start, final int length) throws SAXException { if (buf != null) { buf.append(ch, start, length); } } @Override public void ignorableWhitespace(final char[] ch, final int start, final int length) throws SAXException { if (buf != null) { buf.append(ch, start, length); } } @Override public void startElement(final String ns, final String localName, final String qName, final Attributes atts) throws SAXException { final String n = localName != null ? localName : qName; switch (n) { case ELEMENT_PROPERTY: name = atts.getValue(ATTRIBUTE_NAME); break; case ELEMENT_STRING: buf = new StringBuilder(); break; case ELEMENT_SET: set = new HashSet<>(); break; case ELEMENT_MAP: map = new HashMap<>(); break; case ELEMENT_ENTRY: key = atts.getValue(ATTRIBUTE_KEY); break; case ELEMENT_FILE: final URI src = toURI(atts.getValue(ATTRIBUTE_SRC)); final URI uri = toURI(atts.getValue(ATTRIBUTE_URI)); final File path = toFile(atts.getValue(ATTRIBUTE_PATH)); FileInfo i; if (uri != null) { i = new FileInfo(src, uri, toFile(uri)); } else { i = new FileInfo(src, toURI(path), path); } i.format = atts.getValue(ATTRIBUTE_FORMAT); try { for (Map.Entry<String, Field> e : attrToFieldMap.entrySet()) { e.getValue().setBoolean(i, Boolean.parseBoolean(atts.getValue(e.getKey()))); } } catch (final IllegalAccessException ex) { throw new RuntimeException(ex); } files.put(i.uri, i); break; } } @Override public void endElement(final String uri, final String localName, final String qName) throws SAXException { final String n = localName != null ? localName : qName; switch (n) { case ELEMENT_PROPERTY: name = null; break; case ELEMENT_STRING: if (set != null) { set.add(buf.toString()); } else if (map != null) { map.put(key, buf.toString()); } else { prop.put(name, buf.toString()); } buf = null; break; case ELEMENT_SET: prop.put(name, set); set = null; break; case ELEMENT_MAP: prop.put(name, map); map = null; break; case ELEMENT_ENTRY: key = null; break; } } } /** * Store job into temporary configuration files. * * @throws IOException if writing configuration files failed */ public void write() throws IOException { OutputStream outStream = null; XMLStreamWriter out = null; try { outStream = new FileOutputStream(jobFile); out = XMLOutputFactory.newInstance().createXMLStreamWriter(outStream, "UTF-8"); out.writeStartDocument(); out.writeStartElement(ELEMENT_JOB); for (final Map.Entry<String, Object> e: prop.entrySet()) { out.writeStartElement(ELEMENT_PROPERTY); out.writeAttribute(ATTRIBUTE_NAME, e.getKey()); if (e.getValue() instanceof String) { out.writeStartElement(ELEMENT_STRING); out.writeCharacters(e.getValue().toString()); out.writeEndElement(); //string } else if (e.getValue() instanceof Set) { out.writeStartElement(ELEMENT_SET); final Set<?> s = (Set<?>) e.getValue(); for (final Object o: s) { out.writeStartElement(ELEMENT_STRING); out.writeCharacters(o.toString()); out.writeEndElement(); //string } out.writeEndElement(); //set } else if (e.getValue() instanceof Map) { out.writeStartElement(ELEMENT_MAP); final Map<?, ?> s = (Map<?, ?>) e.getValue(); for (final Map.Entry<?, ?> o: s.entrySet()) { out.writeStartElement(ELEMENT_ENTRY); out.writeAttribute(ATTRIBUTE_KEY, o.getKey().toString()); out.writeStartElement(ELEMENT_STRING); out.writeCharacters(o.getValue().toString()); out.writeEndElement(); //string out.writeEndElement(); //entry } out.writeEndElement(); //string } else { out.writeStartElement(e.getValue().getClass().getName()); out.writeCharacters(e.getValue().toString()); out.writeEndElement(); //string } out.writeEndElement(); //property } out.writeStartElement(ELEMENT_FILES); for (final FileInfo i: files.values()) { out.writeStartElement(ELEMENT_FILE); if (i.src != null) { out.writeAttribute(ATTRIBUTE_SRC, i.src.toString()); } out.writeAttribute(ATTRIBUTE_URI, i.uri.toString()); out.writeAttribute(ATTRIBUTE_PATH, i.file.getPath()); if (i.format != null) { out.writeAttribute(ATTRIBUTE_FORMAT, i.format); } try { for (Map.Entry<String, Field> e: attrToFieldMap.entrySet()) { final boolean v = e.getValue().getBoolean(i); if (v) { out.writeAttribute(e.getKey(), Boolean.TRUE.toString()); } } } catch (final IllegalAccessException ex) { throw new RuntimeException(ex); } out.writeEndElement(); //file } out.writeEndElement(); //files out.writeEndElement(); //job out.writeEndDocument(); } catch (final IOException e) { throw new IOException("Failed to write file: " + e.getMessage()); } catch (final XMLStreamException e) { throw new IOException("Failed to serialize job file: " + e.getMessage()); } finally { if (out != null) { try { out.close(); } catch (final XMLStreamException e) { throw new IOException("Failed to close file: " + e.getMessage()); } } if (outStream != null) { try { outStream.close(); } catch (final IOException e) { throw new IOException("Failed to close file: " + e.getMessage()); } } } lastModified = jobFile.lastModified(); } /** * Add file info. If file info with the same file already exists, it will be replaced. */ public void add(final FileInfo fileInfo) { files.put(fileInfo.uri, fileInfo); } /** * Remove file info. * * @return removed file info, {@code null} if not found */ public FileInfo remove(final FileInfo fileInfo) { return files.remove(fileInfo.uri); } /** * Searches for the property with the specified key in this property list. * * @param key property key * @return the value in this property list with the specified key value, {@code null} if not found */ public String getProperty(final String key) { return (String) prop.get(key); } /** * Get a map of string properties. * * @return map of properties, may be an empty map */ public Map<String, String> getProperties() { final Map<String, String> res = new HashMap<>(); for (final Map.Entry<String, Object> e: prop.entrySet()) { if (e.getValue() instanceof String) { res.put(e.getKey(), (String) e.getValue()); } } return Collections.unmodifiableMap(res); } /** * Set property value. * * @param key property key * @param value property value * @return the previous value of the specified key in this property list, or {@code null} if it did not have one */ public Object setProperty(final String key, final String value) { return prop.put(key, value); } /** * Return the copy-to map from target to source. * * @return copy-to map, empty map if no mapping is defined */ public Map<URI, URI> getCopytoMap() { final Map<String, String> value = (Map<String, String>) prop.get(COPYTO_TARGET_TO_SOURCE_MAP_LIST); if (value == null) { return Collections.emptyMap(); } else { final Map<URI, URI> res = new HashMap<>(); for (final Map.Entry<String, String> e: value.entrySet()) { res.put(toURI(e.getKey()), toURI(e.getValue())); } return Collections.unmodifiableMap(res); } } /** * Set copy-to map from target to source. */ public void setCopytoMap(final Map<URI, URI> value) { final Map<String, String> res = new HashMap<>(); for (final Map.Entry<URI, URI> e: value.entrySet()) { res.put(e.getKey().toString(), e.getValue().toString()); } prop.put(COPYTO_TARGET_TO_SOURCE_MAP_LIST, res); } /** * Get input file * * @return input file path relative to input directory */ public URI getInputMap() { return toURI(getProperty(INPUT_DITAMAP_URI)); } /** * Get input directory. * * @return absolute input directory path */ public URI getInputDir() { return toURI(getProperty(INPUT_DIR_URI)); } /** * Get all file info objects as a map * * @return map of file info objects, where the key is the {@link FileInfo#file} value. May be empty */ public Map<File, FileInfo> getFileInfoMap() { final Map<File, FileInfo> ret = new HashMap<>(); for (final Map.Entry<URI, FileInfo> e: files.entrySet()) { ret.put(e.getValue().file, e.getValue()); } return Collections.unmodifiableMap(ret); } /** * Get all file info objects * * @return collection of file info objects, may be empty */ public Collection<FileInfo> getFileInfo() { return Collections.unmodifiableCollection(new ArrayList<>(files.values())); } /** * Get file info objects that pass the filter * * @param filter filter file info object must pass * @return collection of file info objects that pass the filter, may be empty */ public Collection<FileInfo> getFileInfo(final Filter filter) { final Collection<FileInfo> ret = new ArrayList<>(); for (final FileInfo f: files.values()) { if (filter.accept(f)) { ret.add(f); } } return ret; } /** * Get file info object * * @param file file URI * @return file info object, {@code null} if not found */ public FileInfo getFileInfo(final URI file) { if (file == null) { return null; } else if (files.containsKey(file)) { return files.get(file); } else if (file.isAbsolute()) { final URI relative = getRelativePath(jobFile.toURI(), file); return files.get(relative); } else { return null; } } /** * Get or create FileInfo for given path. * @param file relative URI to temporary directory * @return created or existing file info object */ public FileInfo getOrCreateFileInfo(final URI file) { assert file.getFragment() == null; final URI f = file.normalize(); FileInfo i = files.get(f); if (i == null) { i = new FileInfo(f); files.put(i.uri, i); } return i; } /** * Add a collection of file info objects * * @param fs file info objects */ public void addAll(final Collection<FileInfo> fs) { for (final FileInfo f: fs) { files.put(f.uri, f); } } /** * File info object. */ public static final class FileInfo { /** Absolute source URI. */ public final URI src; /** File URI. */ public final URI uri; /** File path. */ public final File file; /** File format. */ public String format; /** File has a conref. */ public boolean hasConref; /** File is part of chunk. */ public boolean isChunked; /** File has links. Only applies to topics. */ public boolean hasLink; /** File is resource only. */ public boolean isResourceOnly; /** File is a link target. */ public boolean isTarget; /** File is a push conref target. */ public boolean isConrefTarget; /** File is a target in non-conref link. */ public boolean isNonConrefTarget; /** File is a push conref source. */ public boolean isConrefPush; /** File has a keyref. */ public boolean hasKeyref; /** File has coderef. */ public boolean hasCoderef; /** File is a subject scheme. */ public boolean isSubjectScheme; /** File is a target in conref link. Opposite of {@link #isNonConrefTarget}. */ public boolean isSkipChunk; /** File is a coderef target. */ public boolean isSubtarget; /** File is a flagging image. */ public boolean isFlagImage; /** Source file is outside base directory. */ public boolean isOutDita; /** File is used only as a source of a copy-to. */ public boolean isCopyToSource; FileInfo(final URI src, final URI uri, final File file) { if (src == null && uri == null && file == null) throw new IllegalArgumentException(new NullPointerException()); this.src = src; this.uri = uri != null ? uri : toURI(file); this.file = uri != null ? toFile(uri) : file; } FileInfo(final URI uri) { if (uri == null) throw new IllegalArgumentException(new NullPointerException()); this.src = null; this.uri = uri; this.file = toFile(uri); } FileInfo(final File file) { if (file == null) throw new IllegalArgumentException(new NullPointerException()); this.src = null; this.uri = toURI(file); this.file = file; } @Override public String toString() { return "FileInfo{" + "uri=" + uri + ", file=" + file + ", format='" + format + '\'' + ", hasConref=" + hasConref + ", isChunked=" + isChunked + ", hasLink=" + hasLink + ", isResourceOnly=" + isResourceOnly + ", isTarget=" + isTarget + ", isConrefTarget=" + isConrefTarget + ", isNonConrefTarget=" + isNonConrefTarget + ", isConrefPush=" + isConrefPush + ", hasKeyref=" + hasKeyref + ", hasCoderef=" + hasCoderef + ", isSubjectScheme=" + isSubjectScheme + ", isSkipChunk=" + isSkipChunk + ", isSubtarget=" + isSubtarget + ", isFlagImage=" + isFlagImage + ", isOutDita=" + isOutDita + ", isCopyToSource=" + isCopyToSource + '}'; } public interface Filter { boolean accept(FileInfo f); } public static class Builder { private URI src; private URI uri; private File file; private String format; private boolean hasConref; private boolean isChunked; private boolean hasLink; private boolean isResourceOnly; private boolean isTarget; private boolean isConrefTarget; private boolean isNonConrefTarget; private boolean isConrefPush; private boolean hasKeyref; private boolean hasCoderef; private boolean isSubjectScheme; private boolean isSkipChunk; private boolean isSubtarget; private boolean isFlagImage; private boolean isOutDita; private boolean isCopyToSource; public Builder() {} public Builder(final FileInfo orig) { src = orig.src; uri = orig.uri; file = orig.file; format = orig.format; hasConref = orig.hasConref; isChunked = orig.isChunked; hasLink = orig.hasLink; isResourceOnly = orig.isResourceOnly; isTarget = orig.isTarget; isConrefTarget = orig.isConrefTarget; isNonConrefTarget = orig.isNonConrefTarget; isConrefPush = orig.isConrefPush; hasKeyref = orig.hasKeyref; hasCoderef = orig.hasCoderef; isSubjectScheme = orig.isSubjectScheme; isSkipChunk = orig.isSkipChunk; isSubtarget = orig.isSubtarget; isFlagImage = orig.isFlagImage; isOutDita = orig.isOutDita; isCopyToSource = orig.isCopyToSource; } /** * Add file info to this builder. Only non-null and true values will be added. */ public Builder add(final FileInfo orig) { if (orig.src != null) src = orig.src; if (orig.uri != null) uri = orig.uri; if (orig.file != null) file = orig.file; if (orig.format != null) format = orig.format; if (orig.hasConref) hasConref = orig.hasConref; if (orig.isChunked) isChunked = orig.isChunked; if (orig.hasLink) hasLink = orig.hasLink; if (orig.isResourceOnly) isResourceOnly = orig.isResourceOnly; if (orig.isTarget) isTarget = orig.isTarget; if (orig.isConrefTarget) isConrefTarget = orig.isConrefTarget; if (orig.isNonConrefTarget) isNonConrefTarget = orig.isNonConrefTarget; if (orig.isConrefPush) isConrefPush = orig.isConrefPush; if (orig.hasKeyref) hasKeyref = orig.hasKeyref; if (orig.hasCoderef) hasCoderef = orig.hasCoderef; if (orig.isSubjectScheme) isSubjectScheme = orig.isSubjectScheme; if (orig.isSkipChunk) isSkipChunk = orig.isSkipChunk; if (orig.isSubtarget) isSubtarget = orig.isSubtarget; if (orig.isFlagImage) isFlagImage = orig.isFlagImage; if (orig.isOutDita) isOutDita = orig.isOutDita; if (orig.isCopyToSource) isCopyToSource = orig.isCopyToSource; return this; } public Builder src(final URI src) { this.src = src; return this; } public Builder uri(final URI uri) { this.uri = uri; this.file = null; return this; } public Builder file(final File file) { this.file = file; this.uri = null; return this; } public Builder format(final String format) { this.format = format; return this; } public Builder hasConref(final boolean hasConref) { this.hasConref = hasConref; return this; } public Builder isChunked(final boolean isChunked) { this.isChunked = isChunked; return this; } public Builder hasLink(final boolean hasLink) { this.hasLink = hasLink; return this; } public Builder isResourceOnly(final boolean isResourceOnly) { this.isResourceOnly = isResourceOnly; return this; } public Builder isTarget(final boolean isTarget) { this.isTarget = isTarget; return this; } public Builder isConrefTarget(final boolean isConrefTarget) { this.isConrefTarget = isConrefTarget; return this; } public Builder isNonConrefTarget(final boolean isNonConrefTarget) { this.isNonConrefTarget = isNonConrefTarget; return this; } public Builder isConrefPush(final boolean isConrefPush) { this.isConrefPush = isConrefPush; return this; } public Builder hasKeyref(final boolean hasKeyref) { this.hasKeyref = hasKeyref; return this; } public Builder hasCoderef(final boolean hasCoderef) { this.hasCoderef = hasCoderef; return this; } public Builder isSubjectScheme(final boolean isSubjectScheme) { this.isSubjectScheme = isSubjectScheme; return this; } public Builder isSkipChunk(final boolean isSkipChunk) { this.isSkipChunk = isSkipChunk; return this; } public Builder isSubtarget(final boolean isSubtarget) { this.isSubtarget = isSubtarget; return this; } public Builder isFlagImage(final boolean isFlagImage) { this.isFlagImage = isFlagImage; return this; } public Builder isOutDita(final boolean isOutDita) { this.isOutDita = isOutDita; return this; } public Builder isCopyToSource(final boolean isCopyToSource) { this.isCopyToSource = isCopyToSource; return this; } public FileInfo build() { if (src == null && uri == null && file == null) { throw new IllegalStateException("src, uri, and file may not be null"); } final FileInfo fi = new FileInfo(src, uri, file); fi.format = format; fi.hasConref = hasConref; fi.isChunked = isChunked; fi.hasLink = hasLink; fi.isResourceOnly = isResourceOnly; fi.isTarget = isTarget; fi.isConrefTarget = isConrefTarget; fi.isNonConrefTarget = isNonConrefTarget; fi.isConrefPush = isConrefPush; fi.hasKeyref = hasKeyref; fi.hasCoderef = hasCoderef; fi.isSubjectScheme = isSubjectScheme; fi.isSkipChunk = isSkipChunk; fi.isSubtarget = isSubtarget; fi.isFlagImage = isFlagImage; fi.isOutDita = isOutDita; fi.isCopyToSource = isCopyToSource; return fi; } } } public enum OutterControl { /** Fail behavior. */ FAIL, /** Warn behavior. */ WARN, /** Quiet behavior. */ QUIET } public enum Generate { /** Not generate outer files. */ NOT_GENERATEOUTTER(1), /** Old solution. */ OLDSOLUTION(3); public final int type; Generate(final int type) { this.type = type; } public static Generate get(final int type) { for (final Generate g: Generate.values()) { if (g.type == type) { return g; } } throw new IllegalArgumentException(); } } /** * Retrieve the outercontrol. * @return String outercontrol behavior * */ public OutterControl getOutterControl(){ return OutterControl.valueOf(prop.get(PROPERTY_OUTER_CONTROL).toString()); } /** * Set the outercontrol. * @param control control */ public void setOutterControl(final String control){ prop.put(PROPERTY_OUTER_CONTROL, OutterControl.valueOf(control.toUpperCase()).toString()); } /** * Retrieve the flag of onlytopicinmap. * @return boolean if only topic in map */ public boolean getOnlyTopicInMap(){ return Boolean.parseBoolean(prop.get(PROPERTY_ONLY_TOPIC_IN_MAP).toString()); } /** * Set the onlytopicinmap. * @param flag onlytopicinmap flag */ public void setOnlyTopicInMap(final boolean flag){ prop.put(PROPERTY_ONLY_TOPIC_IN_MAP, Boolean.toString(flag)); } public Generate getGeneratecopyouter(){ return Generate.valueOf(prop.get(PROPERTY_GENERATE_COPY_OUTER).toString()); } /** * Set the generatecopyouter. * @param flag generatecopyouter flag */ public void setGeneratecopyouter(final String flag){ setGeneratecopyouter(Generate.get(Integer.parseInt(flag))); } /** * Set the generatecopyouter. * @param flag generatecopyouter flag */ public void setGeneratecopyouter(final Generate flag){ prop.put(PROPERTY_GENERATE_COPY_OUTER, flag.toString()); } /** * Get output dir. * @return absolute output dir */ public File getOutputDir(){ return new File(prop.get(PROPERTY_OUTPUT_DIR).toString()); } /** * Set output dir. * @param outputDir absolute output dir */ public void setOutputDir(final File outputDir){ prop.put(PROPERTY_OUTPUT_DIR, outputDir.getAbsolutePath()); } /** * Get input file path. * @return absolute input file path */ public URI getInputFile() { return toURI(prop.get(PROPERTY_INPUT_MAP_URI).toString()); } /** * Set input map path. * @param inputFile absolute input map path */ public void setInputFile(final URI inputFile) { assert inputFile.isAbsolute(); prop.put(PROPERTY_INPUT_MAP_URI, inputFile.toString()); // Deprecated since 2.1 if (inputFile.getScheme().equals("file")) { prop.put(PROPERTY_INPUT_MAP, new File(inputFile).getAbsolutePath()); } } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-redshift/src/main/java/com/amazonaws/services/redshift/model/transform/InvalidClusterSubnetGroupStateExceptionUnmarshaller.java
1685
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.redshift.model.transform; import org.w3c.dom.Node; import javax.annotation.Generated; import com.amazonaws.AmazonServiceException; import com.amazonaws.transform.StandardErrorUnmarshaller; import com.amazonaws.services.redshift.model.InvalidClusterSubnetGroupStateException; @Generated("com.amazonaws:aws-java-sdk-code-generator") public class InvalidClusterSubnetGroupStateExceptionUnmarshaller extends StandardErrorUnmarshaller { public InvalidClusterSubnetGroupStateExceptionUnmarshaller() { super(InvalidClusterSubnetGroupStateException.class); } @Override public AmazonServiceException unmarshall(Node node) throws Exception { // Bail out if this isn't the right error code that this // marshaller understands String errorCode = parseErrorCode(node); if (errorCode == null || !errorCode.equals("InvalidClusterSubnetGroupStateFault")) return null; InvalidClusterSubnetGroupStateException e = (InvalidClusterSubnetGroupStateException) super.unmarshall(node); return e; } }
apache-2.0
hernad/oo-netbeans
src/org/openoffice/extensions/config/office/PlatformLocator.java
7842
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ package org.openoffice.extensions.config.office; import java.io.File; import java.io.IOException; import org.openide.util.Exceptions; import org.openoffice.extensions.util.LogWriter; /** * * @author steffen */ public abstract class PlatformLocator { /** derived classes have to fill this variable in locateOffice method **/ protected File mOfficePath; /** derived classes have to fill this variable in locateSdk method **/ protected File mSdkPath; /** derived classes have to fill this variable in locateOfficePaths method **/ protected String mJutUnoilPath; /** derived classes have to fill this variable in locateOfficePaths method **/ protected String mJuhJurtRidlPath; /** derived classes have to fill this variable in locateOfficePaths method **/ protected String mPathVariable; /** derived classes have to fill this variable in locateOfficePaths method **/ protected String mUnorcPath; /** derived classes have to fill this variable in locateOfficePaths method **/ protected String mUreBinPath; /** derived classes have to fill this variable in locateOfficePaths method **/ protected String[] mTypesPath; /** derived classes have to fill this variable in locateOfficePaths method **/ protected boolean mThreeLayerOffice; /** derived clases can set this to show that something went wrong with office paths **/ protected boolean mOfficePathsSet; /** * C'tor for this class: only called from derived classes * @param office path to the office, can be null * @param sdk path to the sdk, can be null */ protected PlatformLocator(String officePath, String sdkPath, boolean guessOfficePaths) { if (guessOfficePaths && (officePath == null || officePath.length() == 0)) { // sdk path may be found, too locateOffice(); } else { if (officePath != null && officePath.length() > 0) mOfficePath = new File(officePath); } if (guessOfficePaths && (sdkPath == null || sdkPath.length() == 0)) { locateSDK(); } else { if (sdkPath != null && sdkPath.length() > 0) mSdkPath = new File(sdkPath); } if (mOfficePath != null && mSdkPath != null && mOfficePath.exists() && mSdkPath.exists()) { locateOfficePaths(); // produce path variables } else { mOfficePathsSet = false; // gets also set in locateOfficePaths mOfficePath = null; mSdkPath = null; } logAllVariables(); } private void logAllVariables() { try { LogWriter.getLogWriter().log(LogWriter.LEVEL_INFO, "mOfficePath: " + (mOfficePath == null ? "" : mOfficePath.getCanonicalPath())); LogWriter.getLogWriter().log(LogWriter.LEVEL_INFO, "mSdkPath: " + (mSdkPath == null ? "" : mSdkPath.getCanonicalPath())); LogWriter.getLogWriter().log(LogWriter.LEVEL_INFO, "mJutUnoilPath: " + mJutUnoilPath); LogWriter.getLogWriter().log(LogWriter.LEVEL_INFO, "mJuhJurtRidlPath: " + mJuhJurtRidlPath); LogWriter.getLogWriter().log(LogWriter.LEVEL_INFO, "mPathVariable: " + mPathVariable); String typesPath = ""; if (mTypesPath != null) { for (int i = 0; i < mTypesPath.length; i++) { typesPath = typesPath.concat(mTypesPath[i]).concat(":"); } } LogWriter.getLogWriter().log(LogWriter.LEVEL_INFO, "mTypesPath: " + typesPath); LogWriter.getLogWriter().log(LogWriter.LEVEL_INFO, "mThreeLayerOffice: " + mThreeLayerOffice); } catch (IOException ex) { Exceptions.printStackTrace(ex); } } // derived classes must implement the following functions: /** * Locate the office directory for this platform: after executing this * function, mOfficePath should denote the path to the office, null * otherwise */ protected abstract void locateOffice(); /** * Locate the sdk directory for this platform: after executing this * function, mSdkPath should denote the path to the sdk, null * otherwise */ protected abstract void locateSDK(); /** * Fill all other variables with meaningful values, when officePath and * sdkPath are known. */ protected abstract void locateOfficePaths(); /** * Get the full path for a special jar. Note: WinLocator overrides this, * because of stupid Java problems when writing \\ * @param jarName the name of the jar * @return the full path including the jar */ public String getFullPathForJar(String jarName) { if (jarName.equals("juh.jar") || jarName.equals("jurt.jar") || jarName.equals("ridl.jar") || jarName.equals("java_uno.jar") || jarName.equals("unoloader.jar")) { return mJuhJurtRidlPath.concat(File.separator).concat(jarName); } return mJutUnoilPath.concat(File.separator).concat(jarName); } /** * Get the path to the Office * @return the path to the Office */ public String getOfficePath() { try { if (mOfficePath != null) { return mOfficePath.getCanonicalPath(); } } catch (IOException ex) { Exceptions.printStackTrace(ex); } return ""; } /** * Get the path to the SDK * @return the path to the sdk */ public String getSdkPath() { try { if (mSdkPath != null) { return mSdkPath.getCanonicalPath(); } } catch (IOException ex) { Exceptions.printStackTrace(ex); } return ""; } /** * Get the path variable * @return the path variable */ public String getPathVariable() { if (mPathVariable == null) { return ""; } return mPathVariable; } /** * Get the ure bin path for tools * @return the path variable */ public String getUreBinPath() { if (mUreBinPath == null) { return ""; } return mUreBinPath; } /** * get the types path * @return the types path */ public String[] getTypesPath() { if (mTypesPath == null) { return new String[]{""}; } return mTypesPath; } /** * Is this a three layer office? * @return true, when it's athree layer office, false else. */ public boolean isThreeLayerOffice() { return mThreeLayerOffice; } /** * Get the path to uno(.exe) * */ public String getUnorcPath() { if (mUnorcPath == null) { return ""; } return mUnorcPath; } }
apache-2.0
falko/camunda-bpm-platform
qa/integration-tests-engine/src/test/java/org/camunda/bpm/integrationtest/functional/cdi/CdiBeanResolutionTest.java
4099
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.integrationtest.functional.cdi; import org.camunda.bpm.engine.cdi.impl.util.ProgrammaticBeanLookup; import org.camunda.bpm.integrationtest.functional.cdi.beans.ExampleBean; import org.camunda.bpm.integrationtest.util.AbstractFoxPlatformIntegrationTest; import org.camunda.bpm.integrationtest.util.DeploymentHelper; import org.camunda.bpm.integrationtest.util.TestContainer; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.container.test.api.OperateOnDeployment; import org.jboss.arquillian.junit.Arquillian; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.asset.EmptyAsset; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; /** * <p>Deploys two different applications, a process archive and a client application.</p> * * <p>This test ensures that when the process is started from the client, * it is able to make the context switch to the process archive and resolve cdi beans * from the process archive.</p> * * * @author Daniel Meyer */ @RunWith(Arquillian.class) public class CdiBeanResolutionTest extends AbstractFoxPlatformIntegrationTest { @Deployment public static WebArchive processArchive() { return initWebArchiveDeployment() .addClass(ExampleBean.class) .addAsResource("org/camunda/bpm/integrationtest/functional/cdi/CdiBeanResolutionTest.testResolveBean.bpmn20.xml") .addAsResource("org/camunda/bpm/integrationtest/functional/cdi/CdiBeanResolutionTest.testResolveBeanFromJobExecutor.bpmn20.xml"); } @Deployment(name="clientDeployment") public static WebArchive clientDeployment() { WebArchive deployment = ShrinkWrap.create(WebArchive.class, "client.war") .addAsWebInfResource(EmptyAsset.INSTANCE, "beans.xml") .addClass(AbstractFoxPlatformIntegrationTest.class) .addAsLibraries(DeploymentHelper.getEngineCdi()); TestContainer.addContainerSpecificResourcesForNonPa(deployment); return deployment; } @Test @OperateOnDeployment("clientDeployment") public void testResolveBean() { // assert that we cannot resolve the bean here: Assert.assertNull(ProgrammaticBeanLookup.lookup("exampleBean")); Assert.assertEquals(0, runtimeService.createProcessInstanceQuery().processDefinitionKey("testResolveBean").count()); // but the process engine can: runtimeService.startProcessInstanceByKey("testResolveBean"); Assert.assertEquals(0,runtimeService.createProcessInstanceQuery().processDefinitionKey("testResolveBean").count()); } @Test @OperateOnDeployment("clientDeployment") public void testResolveBeanFromJobExecutor() { Assert.assertEquals(0,runtimeService.createProcessInstanceQuery().processDefinitionKey("testResolveBeanFromJobExecutor").count()); runtimeService.startProcessInstanceByKey("testResolveBeanFromJobExecutor"); Assert.assertEquals(1,runtimeService.createProcessInstanceQuery().processDefinitionKey("testResolveBeanFromJobExecutor").count()); waitForJobExecutorToProcessAllJobs(); Assert.assertEquals(0,runtimeService.createProcessInstanceQuery().processDefinitionKey("testResolveBeanFromJobExecutor").count()); } }
apache-2.0
Ansafari/melon
melon-mock/src/test/java/com/melon/mock/SampleMockService.java
470
package com.melon.mock; import org.apache.commons.collections4.CollectionUtils; import java.util.List; /** * . * User: xiongjinteng@raycloud.com * Date: 2017/7/8 * Time: 13:35 */ public class SampleMockService { public int print(List<String> list) { if (CollectionUtils.isNotEmpty(list)) { for (String s : list) { System.out.println(s); } return list.size(); } return 0; } }
apache-2.0
googleads/google-ads-java
google-ads-stubs-v10/src/main/java/com/google/ads/googleads/v10/resources/BillingSetupName.java
5786
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v10.resources; import com.google.api.pathtemplate.PathTemplate; import com.google.api.resourcenames.ResourceName; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. @Generated("by gapic-generator-java") public class BillingSetupName implements ResourceName { private static final PathTemplate CUSTOMER_ID_BILLING_SETUP_ID = PathTemplate.createWithoutUrlEncoding( "customers/{customer_id}/billingSetups/{billing_setup_id}"); private volatile Map<String, String> fieldValuesMap; private final String customerId; private final String billingSetupId; @Deprecated protected BillingSetupName() { customerId = null; billingSetupId = null; } private BillingSetupName(Builder builder) { customerId = Preconditions.checkNotNull(builder.getCustomerId()); billingSetupId = Preconditions.checkNotNull(builder.getBillingSetupId()); } public String getCustomerId() { return customerId; } public String getBillingSetupId() { return billingSetupId; } public static Builder newBuilder() { return new Builder(); } public Builder toBuilder() { return new Builder(this); } public static BillingSetupName of(String customerId, String billingSetupId) { return newBuilder().setCustomerId(customerId).setBillingSetupId(billingSetupId).build(); } public static String format(String customerId, String billingSetupId) { return newBuilder() .setCustomerId(customerId) .setBillingSetupId(billingSetupId) .build() .toString(); } public static BillingSetupName parse(String formattedString) { if (formattedString.isEmpty()) { return null; } Map<String, String> matchMap = CUSTOMER_ID_BILLING_SETUP_ID.validatedMatch( formattedString, "BillingSetupName.parse: formattedString not in valid format"); return of(matchMap.get("customer_id"), matchMap.get("billing_setup_id")); } public static List<BillingSetupName> parseList(List<String> formattedStrings) { List<BillingSetupName> list = new ArrayList<>(formattedStrings.size()); for (String formattedString : formattedStrings) { list.add(parse(formattedString)); } return list; } public static List<String> toStringList(List<BillingSetupName> values) { List<String> list = new ArrayList<>(values.size()); for (BillingSetupName value : values) { if (value == null) { list.add(""); } else { list.add(value.toString()); } } return list; } public static boolean isParsableFrom(String formattedString) { return CUSTOMER_ID_BILLING_SETUP_ID.matches(formattedString); } @Override public Map<String, String> getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder(); if (customerId != null) { fieldMapBuilder.put("customer_id", customerId); } if (billingSetupId != null) { fieldMapBuilder.put("billing_setup_id", billingSetupId); } fieldValuesMap = fieldMapBuilder.build(); } } } return fieldValuesMap; } public String getFieldValue(String fieldName) { return getFieldValuesMap().get(fieldName); } @Override public String toString() { return CUSTOMER_ID_BILLING_SETUP_ID.instantiate( "customer_id", customerId, "billing_setup_id", billingSetupId); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o != null || getClass() == o.getClass()) { BillingSetupName that = ((BillingSetupName) o); return Objects.equals(this.customerId, that.customerId) && Objects.equals(this.billingSetupId, that.billingSetupId); } return false; } @Override public int hashCode() { int h = 1; h *= 1000003; h ^= Objects.hashCode(customerId); h *= 1000003; h ^= Objects.hashCode(billingSetupId); return h; } /** Builder for customers/{customer_id}/billingSetups/{billing_setup_id}. */ public static class Builder { private String customerId; private String billingSetupId; protected Builder() {} public String getCustomerId() { return customerId; } public String getBillingSetupId() { return billingSetupId; } public Builder setCustomerId(String customerId) { this.customerId = customerId; return this; } public Builder setBillingSetupId(String billingSetupId) { this.billingSetupId = billingSetupId; return this; } private Builder(BillingSetupName billingSetupName) { this.customerId = billingSetupName.customerId; this.billingSetupId = billingSetupName.billingSetupId; } public BillingSetupName build() { return new BillingSetupName(this); } } }
apache-2.0
somnus-sir/coolweather
src/com/coolweather/app/db/CoolWeatherOpenHelper.java
1553
package com.coolweather.app.db; import android.R.id; import android.R.integer; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteDatabase.CursorFactory; import android.database.sqlite.SQLiteOpenHelper; public class CoolWeatherOpenHelper extends SQLiteOpenHelper{ /** * province表剪标语句 */ public static final String CREATE_PROVINCE = "create table Province(" + "id integer primary key autoincrement," + "province_name text," + "province_code text)"; /** * city表建表语句 */ public static final String CREATE_CITY = "create table City(" + "id integer primary key autoincrement," + "city_name text," + "city_code text," +"province_id integer)"; /** * County表建表语句 */ public static final String CREATE_COUNTY = "create table County(" + "id integer primary key autoincrement," + "county_name text," + "county_code text," + "city_id integer)"; public CoolWeatherOpenHelper(Context context, String name, CursorFactory factory, int version) { super(context, name, factory, version); // TODO Auto-generated constructor stub } @Override public void onCreate(SQLiteDatabase db) { // TODO Auto-generated method stub db.execSQL(CREATE_PROVINCE);//创建Province表 db.execSQL(CREATE_CITY);//创建City表 db.execSQL(CREATE_COUNTY);//创建County表 } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { // TODO Auto-generated method stub } }
apache-2.0
abjugard/DAT255-EpiClock
AlarmedTestTest/src/edu/chalmers/dat255/group09/Alarmed/test/modules/mathModule/factory/MathProblemFactoryTest.java
1854
/* * Copyright (C) 2012 Joakim Persson, Daniel Augurell, Adrian Bjugard, Andreas Rolen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.chalmers.dat255.group09.Alarmed.test.modules.mathModule.factory; import junit.framework.TestCase; import edu.chalmers.dat255.group09.Alarmed.modules.mathModule.factory.MathProblemFactory; import edu.chalmers.dat255.group09.Alarmed.modules.mathModule.model.MathProblem; import edu.chalmers.dat255.group09.Alarmed.modules.mathModule.model.constants.Difficulty; /** * A test class for the MathProblemFactory class. * * @author Joakim Persson * */ public class MathProblemFactoryTest extends TestCase { private MathProblemFactory generator; @Override protected void setUp() throws Exception { super.setUp(); generator = new MathProblemFactory(); } /** * Tests the get problem method. By testing that it returns mathproblems * types. */ public void testGetProblem() { final int delta = 3; int nbrOfNumbers = delta; MathProblem problem = generator.generateProblem(Difficulty.EASY); int[] nbrs = problem.getNumbers(); assertEquals(nbrOfNumbers, nbrs.length, delta); nbrs = problem.getNumbers(); assertEquals(nbrOfNumbers, nbrs.length, delta); } @Override protected void tearDown() throws Exception { super.tearDown(); generator = null; } }
apache-2.0
darranl/keycloak
testsuite/integration-arquillian/tests/base/src/test/java/org/keycloak/testsuite/user/profile/AbstractUserProfileTest.java
6410
/* * * * Copyright 2021 Red Hat, Inc. and/or its affiliates * * and other contributors as indicated by the @author tags. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package org.keycloak.testsuite.user.profile; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.keycloak.models.ClientModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.RealmModel; import org.keycloak.models.UserModel; import org.keycloak.sessions.AuthenticationSessionModel; import org.keycloak.sessions.RootAuthenticationSessionModel; import org.keycloak.testsuite.AbstractTestRealmKeycloakTest; import org.keycloak.testsuite.user.profile.config.DeclarativeUserProfileProvider; import org.keycloak.userprofile.UserProfileProvider; /** * @author <a href="mailto:psilva@redhat.com">Pedro Igor</a> */ public abstract class AbstractUserProfileTest extends AbstractTestRealmKeycloakTest { protected static void configureAuthenticationSession(KeycloakSession session) { configureSessionRealm(session); Set<String> scopes = new HashSet<>(); scopes.add("customer"); configureAuthenticationSession(session, "client-a", scopes); } protected static void configureAuthenticationSession(KeycloakSession session, String clientId, Set<String> requestedScopes) { RealmModel realm = session.getContext().getRealm(); session.getContext().setAuthenticationSession(createAuthenticationSession(realm.getClientByClientId(clientId), requestedScopes)); } protected static RealmModel configureSessionRealm(KeycloakSession session) { RealmModel realm = session.realms().getRealm(TEST_REALM_NAME); session.getContext().setRealm(realm); return realm; } protected static DeclarativeUserProfileProvider getDynamicUserProfileProvider(KeycloakSession session) { return (DeclarativeUserProfileProvider) session.getProvider(UserProfileProvider.class, DeclarativeUserProfileProvider.ID); } protected static AuthenticationSessionModel createAuthenticationSession(ClientModel client, Set<String> scopes) { return new AuthenticationSessionModel() { @Override public String getTabId() { return null; } @Override public RootAuthenticationSessionModel getParentSession() { return null; } @Override public Map<String, ExecutionStatus> getExecutionStatus() { return null; } @Override public void setExecutionStatus(String authenticator, ExecutionStatus status) { } @Override public void clearExecutionStatus() { } @Override public UserModel getAuthenticatedUser() { return null; } @Override public void setAuthenticatedUser(UserModel user) { } @Override public Set<String> getRequiredActions() { return null; } @Override public void addRequiredAction(String action) { } @Override public void removeRequiredAction(String action) { } @Override public void addRequiredAction(UserModel.RequiredAction action) { } @Override public void removeRequiredAction(UserModel.RequiredAction action) { } @Override public void setUserSessionNote(String name, String value) { } @Override public Map<String, String> getUserSessionNotes() { return null; } @Override public void clearUserSessionNotes() { } @Override public String getAuthNote(String name) { return null; } @Override public void setAuthNote(String name, String value) { } @Override public void removeAuthNote(String name) { } @Override public void clearAuthNotes() { } @Override public String getClientNote(String name) { return null; } @Override public void setClientNote(String name, String value) { } @Override public void removeClientNote(String name) { } @Override public Map<String, String> getClientNotes() { return null; } @Override public void clearClientNotes() { } @Override public Set<String> getClientScopes() { return scopes; } @Override public void setClientScopes(Set<String> clientScopes) { } @Override public String getRedirectUri() { return null; } @Override public void setRedirectUri(String uri) { } @Override public RealmModel getRealm() { return null; } @Override public ClientModel getClient() { return client; } @Override public String getAction() { return null; } @Override public void setAction(String action) { } @Override public String getProtocol() { return null; } @Override public void setProtocol(String method) { } }; } }
apache-2.0
darkredz/DooJ
DooQuercus/src/com/doophp/util/DateTimeUtil.java
1820
package com.doophp.util; import java.time.LocalDateTime; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; /** * Created by leng on 1/19/17. */ public class DateTimeUtil { public static long parse(String dateTime) { return parse(dateTime, DateTimeFormatter.ISO_DATE_TIME); } public static long parse(String dateTime, String format) { return parse(dateTime, DateTimeFormatter.ofPattern(format)); } public static long parseWithTimeZone(String dateTime) { //2011-12-03T10:15:30+01:00 return parse(dateTime, DateTimeFormatter.ISO_DATE_TIME); } public static long parseWithLocalTime(String dateTime) { //2011-12-03T10:15:30 return parse(dateTime, DateTimeFormatter.ISO_LOCAL_DATE_TIME); } public static long parse(String dateTime, DateTimeFormatter formatter) { try { ZonedDateTime dt = ZonedDateTime.parse(dateTime, formatter); return dt.toEpochSecond(); } catch (Exception err) { System.out.println(dateTime); System.out.println(err.toString()); // err.printStackTrace(); return -1; } } public static long parseLocal(String dateTime, String format) { return parseLocal(dateTime, DateTimeFormatter.ofPattern(format)); } public static long parseLocal(String dateTime, DateTimeFormatter formatter) { try { LocalDateTime dt = LocalDateTime.parse(dateTime, formatter); return dt.toEpochSecond(ZoneOffset.UTC); } catch (Exception err) { System.out.println(dateTime); System.out.println(err.toString()); // err.printStackTrace(); return -1; } } }
apache-2.0
jdcasey/pnc
build-executor/src/main/java/org/jboss/pnc/executor/servicefactories/RepositoryManagerFactory.java
1666
/** * JBoss, Home of Professional Open Source. * Copyright 2014-2018 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.pnc.executor.servicefactories; import org.jboss.pnc.model.TargetRepository; import org.jboss.pnc.spi.executor.exceptions.ExecutorException; import org.jboss.pnc.spi.repositorymanager.RepositoryManager; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.Instance; import javax.inject.Inject; /** * Created by <a href="mailto:matejonnet@gmail.com">Matej Lazar</a> on 2014-11-23. */ @ApplicationScoped public class RepositoryManagerFactory { @Inject Instance<RepositoryManager> availableManagers; public RepositoryManager getRepositoryManager(TargetRepository.Type managerType) throws ExecutorException { for (RepositoryManager manager : availableManagers) { if (manager.canManage(managerType)) { return manager; } } throw new ExecutorException("No repository manager available for " + managerType + " build type."); } }
apache-2.0
JimBarrows/JavaDomainObjects
Products/src/main/java/jdo/product/model/price/SaleType.java
287
package jdo.product.model.price; import javax.persistence.Entity; import jdo.model.BaseType; /** * @author Jim * @version 1.0 * @created 25-Dec-2007 9:54:38 AM */ @Entity public class SaleType extends BaseType { /** * */ private static final long serialVersionUID = 1L; }
apache-2.0
trasa/aws-sdk-java
aws-java-sdk-elasticbeanstalk/src/main/java/com/amazonaws/services/elasticbeanstalk/model/TooManyConfigurationTemplatesException.java
1229
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticbeanstalk.model; import com.amazonaws.AmazonServiceException; /** * <p> * The specified account has reached its limit of configuration templates. * </p> */ public class TooManyConfigurationTemplatesException extends AmazonServiceException { private static final long serialVersionUID = 1L; /** * Constructs a new TooManyConfigurationTemplatesException with the * specified error message. * * @param message * Describes the error encountered. */ public TooManyConfigurationTemplatesException(String message) { super(message); } }
apache-2.0
minwoo-jung/pinpoint
profiler/src/main/java/com/navercorp/pinpoint/profiler/context/recorder/proxy/DefaultProxyRequestRecorder.java
3501
/* * Copyright 2018 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.profiler.context.recorder.proxy; import com.navercorp.pinpoint.bootstrap.context.SpanRecorder; import com.navercorp.pinpoint.bootstrap.plugin.proxy.ProxyRequestRecorder; import com.navercorp.pinpoint.bootstrap.plugin.request.RequestAdaptor; import com.navercorp.pinpoint.common.util.Assert; import com.navercorp.pinpoint.common.util.StringUtils; import com.navercorp.pinpoint.profiler.context.DefaultTrace; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; /** * @author jaehong.kim */ public class DefaultProxyRequestRecorder<T> implements ProxyRequestRecorder<T> { private static final Logger logger = LoggerFactory.getLogger(DefaultTrace.class.getName()); private final boolean isDebug = logger.isDebugEnabled(); private final ProxyRequestParser[] proxyRequestParsers; private final RequestAdaptor<T> requestAdaptor; private final ProxyRequestAnnotationFactory annotationFactory = new ProxyRequestAnnotationFactory(); public DefaultProxyRequestRecorder(final List<ProxyRequestParser> proxyRequestParserList, final RequestAdaptor<T> requestAdaptor) { Assert.requireNonNull(proxyRequestParserList, "proxyRequestParserList"); this.proxyRequestParsers = proxyRequestParserList.toArray(new ProxyRequestParser[0]); this.requestAdaptor = Assert.requireNonNull(requestAdaptor, "requestAdaptor"); } public void record(final SpanRecorder recorder, final T request) { if (recorder == null) { return; } if (request == null) { return; } try { for (ProxyRequestParser parser : proxyRequestParsers) { parseAndRecord(recorder, request, parser); } } catch (Exception e) { // for handler operations. if (logger.isInfoEnabled()) { logger.info("Failed to record proxy http header. cause={}", e.getMessage()); } } } private void parseAndRecord(final SpanRecorder recorder, final T request, final ProxyRequestParser parser) { final String name = parser.getHttpHeaderName(); final String value = requestAdaptor.getHeader(request, name); if (StringUtils.isEmpty(value)) { return; } final ProxyRequestHeader header = parser.parse(value); if (header.isValid()) { recorder.recordAttribute(annotationFactory.getAnnotationKey(), annotationFactory.getAnnotationValue(parser.getCode(), header)); if (isDebug) { logger.debug("Record proxy request header. name={}, value={}", name, value); } } else { if (logger.isInfoEnabled()) { logger.info("Failed to parse proxy request header. name={}. value={}, cause={}", name, value, header.getCause()); } } } }
apache-2.0
actframework/actframework
src/main/java/act/util/ObjectMetaInfo.java
6774
package act.util; /*- * #%L * ACT Framework * %% * Copyright (C) 2014 - 2017 ActFramework * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import act.asm.Label; import act.asm.MethodVisitor; import act.asm.Opcodes; import act.asm.Type; import act.data.annotation.Data; import org.osgl.$; import org.osgl.util.S; import java.util.ArrayList; import java.util.List; /** * Datastructure captures a class's meta information in related to {@link DataObjectEnhancer}. * The following info will be captured: * <ul> * <li>Is the class annotated with {@link Data} annotation</li> * <li>A list of {@link FieldMetaInfo} of all declared fields</li> * </ul> */ class ObjectMetaInfo implements Opcodes { /** * Datastructure captures a class's declared field meta info */ static class FieldMetaInfo implements Opcodes { private String name; private boolean isTransient = false; private boolean equalForce = false; private boolean equalIgnore = false; private Type type; FieldMetaInfo(String name, Type type, boolean isTransient) { this.name = $.NPE(name); this.type = $.NPE(type); this.isTransient = isTransient; } void setEqualForce() { equalForce = true; } void setEqualIgnore() { equalIgnore = true; } void addEqualInstructions(Type host, MethodVisitor mv, Label jumpTo) { if (!eligible()) { return; } String typeDesc = type.getDescriptor(); // ALOAD 0: this // ALOAD 2: that = (Type) obj (which is 1) mv.visitVarInsn(ALOAD, 2); mv.visitFieldInsn(GETFIELD, host.getInternalName(), name, typeDesc); mv.visitVarInsn(ALOAD, 0); mv.visitFieldInsn(GETFIELD, host.getInternalName(), name, typeDesc); String s = typeDesc; if (s.length() > 1) { s = OBJECT_TYPE.getDescriptor(); } String op = "eq"; if (typeDesc.startsWith("[")) { // array needs deep eq op = "eq2"; } mv.visitMethodInsn(INVOKESTATIC, "org/osgl/Osgl", op, S.fmt("(%s%s)Z", s, s), false); mv.visitJumpInsn(IFEQ, jumpTo); } boolean addHashCodeInstruction(Type host, MethodVisitor mv) { if (!eligible()) { return false; } mv.visitVarInsn(ALOAD, 0); // load this pointer mv.visitFieldInsn(GETFIELD, host.getInternalName(), name, type.getDescriptor()); convertFromPrimaryType(type, mv); return true; } private void convertFromPrimaryType(Type fieldType, MethodVisitor mv) { switch (fieldType.getSort()) { case Type.BOOLEAN: mv.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Boolean", "valueOf", "(Z)Ljava/lang/Boolean;",false); break; case Type.BYTE: mv.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Byte", "valueOf", "(B)Ljava/lang/Byte;",false); break; case Type.CHAR: mv.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Character", "valueOf", "(C)Ljava/lang/Character;",false); break; case Type.SHORT: mv.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Short", "valueOf", "(S)Ljava/lang/Short;",false); break; case Type.INT: mv.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Integer", "valueOf", "(I)Ljava/lang/Integer;",false); break; case Type.LONG: mv.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Long", "valueOf", "(J)Ljava/lang/Long;",false); break; case Type.FLOAT: mv.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Float", "valueOf", "(F)Ljava/lang/Float;",false); break; case Type.DOUBLE: mv.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Double", "valueOf", "(D)Ljava/lang/Double;",false); break; default: // do nothing } } boolean eligible() { return !equalIgnore && (!isTransient || equalForce); } } private Type superType; private Type type; private boolean callSuper; private List<FieldMetaInfo> fields = new ArrayList<>(); private boolean hasEqualMethod = false; private boolean hasHashCodeMethod = false; private boolean hasToStringMethod = false; private boolean hasAutoObjectAnnotation = false; private static Type OBJECT_TYPE = Type.getType(Object.class); ObjectMetaInfo(Type type, Type superType) { this.type = $.NPE(type); if (null != superType && !OBJECT_TYPE.equals(superType)) { this.superType = superType; } } Type type() { return type; } Type superType() { return superType; } List<FieldMetaInfo> fields() { return fields; } FieldMetaInfo addField(String fieldName, Type fieldType, boolean isTransient) { FieldMetaInfo fi = new FieldMetaInfo(fieldName, fieldType, isTransient); fields.add(fi); return fi; } void requireCallSuper() { callSuper = true; } boolean shouldCallSuper() { return callSuper && null != superType; } void equalMethodFound() { hasEqualMethod = true; } void hashCodeMethodFound() { hasHashCodeMethod = true; } void toStringMethodFound() { hasToStringMethod = true; } void autoObjectAnnotationFound() { hasAutoObjectAnnotation = true; } boolean hasDataAnnotation() { return hasAutoObjectAnnotation; } boolean shouldGenerateEqualsMethod() { return hasAutoObjectAnnotation && !hasEqualMethod; } boolean shouldGenerateHashCodeMethod() { return hasAutoObjectAnnotation && !hasHashCodeMethod; } }
apache-2.0
arenadata/ambari
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
34540
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.actionmanager; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import javax.annotation.Nullable; import org.apache.ambari.server.Role; import org.apache.ambari.server.RoleCommand; import org.apache.ambari.server.agent.AgentCommand.AgentCommandType; import org.apache.ambari.server.agent.ExecutionCommand; import org.apache.ambari.server.metadata.RoleCommandPair; import org.apache.ambari.server.orm.dao.HostRoleCommandDAO; import org.apache.ambari.server.orm.entities.HostRoleCommandEntity; import org.apache.ambari.server.orm.entities.RoleSuccessCriteriaEntity; import org.apache.ambari.server.orm.entities.StageEntity; import org.apache.ambari.server.serveraction.ServerAction; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Host; import org.apache.ambari.server.state.ServiceComponentHostEvent; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostServerActionEvent; import org.apache.ambari.server.utils.StageUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.Assert; import com.google.inject.Inject; import com.google.inject.assistedinject.Assisted; import com.google.inject.assistedinject.AssistedInject; import com.google.inject.persist.Transactional; //This class encapsulates the stage. The stage encapsulates all the information //required to persist an action. public class Stage { /** * Used because in-memory storage of commands requires a hostname for maps * when the underlying store does not (host_id is {@code null}). We also * don't want stages getting confused with Ambari vs cluster hosts, so * don't use {@link StageUtils#getHostName()} */ public static final String INTERNAL_HOSTNAME = "_internal_ambari"; private static Logger LOG = LoggerFactory.getLogger(Stage.class); private final long requestId; private String clusterName; private long clusterId = -1L; private long stageId = -1; private final String logDir; private final String requestContext; private String commandParamsStage; private String hostParamsStage; private CommandExecutionType commandExecutionType = CommandExecutionType.STAGE; private boolean skippable; private boolean supportsAutoSkipOnFailure; private int stageTimeout = -1; private volatile boolean wrappersLoaded = false; //Map of roles to successFactors for this stage. Default is 1 i.e. 100% private Map<Role, Float> successFactors = new HashMap<>(); //Map of host to host-roles Map<String, Map<String, HostRoleCommand>> hostRoleCommands = new TreeMap<>(); private Map<String, List<ExecutionCommandWrapper>> commandsToSend = new TreeMap<>(); @Inject private HostRoleCommandFactory hostRoleCommandFactory; @Inject private ExecutionCommandWrapperFactory ecwFactory; @AssistedInject public Stage(@Assisted long requestId, @Assisted("logDir") String logDir, @Assisted("clusterName") @Nullable String clusterName, @Assisted("clusterId") long clusterId, @Assisted("requestContext") @Nullable String requestContext, @Assisted("commandParamsStage") String commandParamsStage, @Assisted("hostParamsStage") String hostParamsStage, HostRoleCommandFactory hostRoleCommandFactory, ExecutionCommandWrapperFactory ecwFactory) { wrappersLoaded = true; this.requestId = requestId; this.logDir = logDir; this.clusterName = clusterName; this.clusterId = clusterId; this.requestContext = requestContext == null ? "" : requestContext; this.commandParamsStage = commandParamsStage; this.hostParamsStage = hostParamsStage; skippable = false; supportsAutoSkipOnFailure = false; this.hostRoleCommandFactory = hostRoleCommandFactory; this.ecwFactory = ecwFactory; } @AssistedInject public Stage(@Assisted StageEntity stageEntity, HostRoleCommandDAO hostRoleCommandDAO, ActionDBAccessor dbAccessor, Clusters clusters, HostRoleCommandFactory hostRoleCommandFactory, ExecutionCommandWrapperFactory ecwFactory) { this.hostRoleCommandFactory = hostRoleCommandFactory; this.ecwFactory = ecwFactory; requestId = stageEntity.getRequestId(); stageId = stageEntity.getStageId(); skippable = stageEntity.isSkippable(); supportsAutoSkipOnFailure = stageEntity.isAutoSkipOnFailureSupported(); logDir = stageEntity.getLogInfo(); clusterId = stageEntity.getClusterId().longValue(); if (-1L != clusterId) { try { clusterName = clusters.getClusterById(clusterId).getClusterName(); } catch (Exception e) { LOG.debug("Could not load cluster with id {}, the cluster may have been removed for stage {}", Long.valueOf(clusterId), Long.valueOf(stageId)); } } requestContext = stageEntity.getRequestContext(); commandParamsStage = stageEntity.getCommandParamsStage(); hostParamsStage = stageEntity.getHostParamsStage(); commandExecutionType = stageEntity.getCommandExecutionType(); List<Long> taskIds = hostRoleCommandDAO.findTaskIdsByStage(requestId, stageId); Collection<HostRoleCommand> commands = dbAccessor.getTasks(taskIds); for (HostRoleCommand command : commands) { // !!! some commands won't have a hostname, because they are server-side and // don't hold that information. In that case, use the special key to // use in the map String hostname = getSafeHost(command.getHostName()); if (!hostRoleCommands.containsKey(hostname)) { hostRoleCommands.put(hostname, new LinkedHashMap<String, HostRoleCommand>()); } hostRoleCommands.get(hostname).put(command.getRole().toString(), command); } for (RoleSuccessCriteriaEntity successCriteriaEntity : stageEntity.getRoleSuccessCriterias()) { successFactors.put(successCriteriaEntity.getRole(), successCriteriaEntity.getSuccessFactor().floatValue()); } } /** * Creates object to be persisted in database * @return StageEntity */ public synchronized StageEntity constructNewPersistenceEntity() { StageEntity stageEntity = new StageEntity(); stageEntity.setRequestId(requestId); stageEntity.setStageId(getStageId()); stageEntity.setLogInfo(logDir); stageEntity.setSkippable(skippable); stageEntity.setAutoSkipFailureSupported(supportsAutoSkipOnFailure); stageEntity.setRequestContext(requestContext); stageEntity.setHostRoleCommands(new ArrayList<HostRoleCommandEntity>()); stageEntity.setRoleSuccessCriterias(new ArrayList<RoleSuccessCriteriaEntity>()); stageEntity.setCommandParamsStage(commandParamsStage); if (null != hostParamsStage) { stageEntity.setHostParamsStage(hostParamsStage); } stageEntity.setCommandExecutionType(commandExecutionType); for (Role role : successFactors.keySet()) { RoleSuccessCriteriaEntity roleSuccessCriteriaEntity = new RoleSuccessCriteriaEntity(); roleSuccessCriteriaEntity.setRole(role); roleSuccessCriteriaEntity.setStage(stageEntity); roleSuccessCriteriaEntity.setSuccessFactor(successFactors.get(role).doubleValue()); stageEntity.getRoleSuccessCriterias().add(roleSuccessCriteriaEntity); } return stageEntity; } void checkWrappersLoaded() { if (!wrappersLoaded) { synchronized (this) { // Stages are not used concurrently now, but it won't be performance loss if (!wrappersLoaded) { loadExecutionCommandWrappers(); } } } } @Transactional void loadExecutionCommandWrappers() { for (Map.Entry<String, Map<String, HostRoleCommand>> hostRoleCommandEntry : hostRoleCommands.entrySet()) { String hostname = hostRoleCommandEntry.getKey(); commandsToSend.put(hostname, new ArrayList<ExecutionCommandWrapper>()); Map<String, HostRoleCommand> roleCommandMap = hostRoleCommandEntry.getValue(); for (Map.Entry<String, HostRoleCommand> roleCommandEntry : roleCommandMap.entrySet()) { commandsToSend.get(hostname).add(roleCommandEntry.getValue().getExecutionCommandWrapper()); } } } public List<HostRoleCommand> getOrderedHostRoleCommands() { List<HostRoleCommand> commands = new ArrayList<>(); //Correct due to ordered maps for (Map.Entry<String, Map<String, HostRoleCommand>> hostRoleCommandEntry : hostRoleCommands.entrySet()) { for (Map.Entry<String, HostRoleCommand> roleCommandEntry : hostRoleCommandEntry.getValue().entrySet()) { commands.add(roleCommandEntry.getValue()); } } return commands; } /** * Returns <Role, RoleCommand> pairs which are in progress. * @return */ public Set<RoleCommandPair> getHostRolesInProgress() { Set<RoleCommandPair> commandsToScheduleSet = new HashSet<>(); for (Map.Entry<String, Map<String, HostRoleCommand>> hostRoleCommandEntry : hostRoleCommands.entrySet()) { for (Map.Entry<String, HostRoleCommand> roleCommandEntry : hostRoleCommandEntry.getValue().entrySet()) { if (HostRoleStatus.IN_PROGRESS_STATUSES.contains(roleCommandEntry.getValue().getStatus())) { commandsToScheduleSet.add( new RoleCommandPair(roleCommandEntry.getValue().getRole(), roleCommandEntry.getValue().getRoleCommand())); } } } return commandsToScheduleSet; } public String getCommandParamsStage() { return commandParamsStage; } public void setCommandParamsStage(String commandParamsStage) { this.commandParamsStage = commandParamsStage; } public String getHostParamsStage() { return hostParamsStage; } public void setHostParamsStage(String hostParamsStage) { this.hostParamsStage = hostParamsStage; } public CommandExecutionType getCommandExecutionType() { return commandExecutionType; } public void setCommandExecutionType(CommandExecutionType commandExecutionType) { this.commandExecutionType = commandExecutionType; } public synchronized void setStageId(long stageId) { if (this.stageId != -1) { throw new RuntimeException("Attempt to set stageId again! Not allowed."); } //used on stage creation only, no need to check if wrappers loaded this.stageId = stageId; for (String host: commandsToSend.keySet()) { for (ExecutionCommandWrapper wrapper : commandsToSend.get(host)) { ExecutionCommand cmd = wrapper.getExecutionCommand(); cmd.setRequestAndStage(requestId, stageId); } } } public synchronized long getStageId() { return stageId; } public String getActionId() { return StageUtils.getActionId(requestId, getStageId()); } private synchronized ExecutionCommandWrapper addGenericExecutionCommand(String clusterName, String hostName, Role role, RoleCommand command, ServiceComponentHostEvent event, boolean retryAllowed, boolean autoSkipFailure) { boolean isHostRoleCommandAutoSkippable = autoSkipFailure && supportsAutoSkipOnFailure && skippable; // used on stage creation only, no need to check if wrappers loaded HostRoleCommand hrc = hostRoleCommandFactory.create(hostName, role, event, command, retryAllowed, isHostRoleCommandAutoSkippable); return addGenericExecutionCommand(clusterName, hostName, role, command, event, hrc); } private ExecutionCommandWrapper addGenericExecutionCommand(Cluster cluster, Host host, Role role, RoleCommand command, ServiceComponentHostEvent event, boolean retryAllowed, boolean autoSkipFailure) { boolean isHostRoleCommandAutoSkippable = autoSkipFailure && supportsAutoSkipOnFailure && skippable; HostRoleCommand hrc = hostRoleCommandFactory.create(host, role, event, command, retryAllowed, isHostRoleCommandAutoSkippable); return addGenericExecutionCommand(cluster.getClusterName(), host.getHostName(), role, command, event, hrc); } //TODO refactor method to use Host object (host_id support) private ExecutionCommandWrapper addGenericExecutionCommand(String clusterName, String hostName, Role role, RoleCommand command, ServiceComponentHostEvent event, HostRoleCommand hrc) { ExecutionCommand cmd = new ExecutionCommand(); ExecutionCommandWrapper wrapper = ecwFactory.createFromCommand(cmd); hrc.setExecutionCommandWrapper(wrapper); cmd.setHostname(hostName); cmd.setClusterName(clusterName); cmd.setRequestAndStage(requestId, stageId); cmd.setRole(role.name()); cmd.setRoleCommand(command); cmd.setServiceName(""); Map<String, HostRoleCommand> hrcMap = hostRoleCommands.get(hostName); if (hrcMap == null) { hrcMap = new LinkedHashMap<>(); hostRoleCommands.put(hostName, hrcMap); } if (hrcMap.get(role.toString()) != null) { throw new RuntimeException( "Setting the host role command second time for same stage: stage=" + getActionId() + ", host=" + hostName + ", role=" + role); } hrcMap.put(role.toString(), hrc); List<ExecutionCommandWrapper> execCmdList = commandsToSend.get(hostName); if (execCmdList == null) { execCmdList = new ArrayList<>(); commandsToSend.put(hostName, execCmdList); } if (execCmdList.contains(wrapper)) { //todo: proper exception throw new RuntimeException( "Setting the execution command second time for same stage: stage=" + getActionId() + ", host=" + hostName + ", role=" + role+ ", event="+event); } execCmdList.add(wrapper); return wrapper; } /** * A new host role command is created for execution. Creates both * ExecutionCommand and HostRoleCommand objects and adds them to the Stage. * This should be called only once for a host-role for a given stage. */ public synchronized void addHostRoleExecutionCommand(String host, Role role, RoleCommand command, ServiceComponentHostEvent event, String clusterName, String serviceName, boolean retryAllowed, boolean autoSkipFailure) { boolean isHostRoleCommandAutoSkippable = autoSkipFailure && supportsAutoSkipOnFailure && skippable; ExecutionCommandWrapper commandWrapper = addGenericExecutionCommand(clusterName, host, role, command, event, retryAllowed, isHostRoleCommandAutoSkippable); commandWrapper.getExecutionCommand().setServiceName(serviceName); } /** * A new host role command is created for execution. Creates both * ExecutionCommand and HostRoleCommand objects and adds them to the Stage. * This should be called only once for a host-role for a given stage. */ public synchronized void addHostRoleExecutionCommand(Host host, Role role, RoleCommand command, ServiceComponentHostEvent event, Cluster cluster, String serviceName, boolean retryAllowed, boolean autoSkipFailure) { boolean isHostRoleCommandAutoSkippable = autoSkipFailure && supportsAutoSkipOnFailure && skippable; ExecutionCommandWrapper commandWrapper = addGenericExecutionCommand(cluster, host, role, command, event, retryAllowed, isHostRoleCommandAutoSkippable); commandWrapper.getExecutionCommand().setServiceName(serviceName); } /** * <p/> * Creates server-side execution command. * <p/> * The action name for this command is expected to be the classname of a * {@link org.apache.ambari.server.serveraction.ServerAction} implementation * which will be instantiated and invoked as needed. * * @param actionName * a String declaring the action name (in the form of a classname) to * execute * @param userName * the name of the user who created this stage; may be null for * anonymous user * @param role * the Role for this command * @param command * the RoleCommand for this command * @param clusterName * a String identifying the cluster on which to to execute this * command * @param event * a ServiceComponentHostServerActionEvent * @param commandParams * a Map of String to String data used to pass to the action - this * may be empty or null if no data is relevant * @param commandDetail * a String declaring a descriptive name to pass to the action - null * or an empty string indicates no value is to be set * @param configTags * a Map of configuration tags to set for this command - if null, no * configurations will be available for the command * @param timeout * an Integer declaring the timeout for this action - if null, a * default * @param retryAllowed * indicates whether retry after failure is allowed */ public synchronized void addServerActionCommand(String actionName, @Nullable String userName, Role role, RoleCommand command, String clusterName, ServiceComponentHostServerActionEvent event, @Nullable Map<String, String> commandParams, @Nullable String commandDetail, @Nullable Map<String, Map<String, String>> configTags, @Nullable Integer timeout, boolean retryAllowed, boolean autoSkipFailure) { boolean isHostRoleCommandAutoSkippable = autoSkipFailure && supportsAutoSkipOnFailure && skippable; ExecutionCommandWrapper commandWrapper = addGenericExecutionCommand(clusterName, INTERNAL_HOSTNAME, role, command, event, retryAllowed, isHostRoleCommandAutoSkippable); ExecutionCommand cmd = commandWrapper.getExecutionCommand(); Map<String, String> cmdParams = new HashMap<>(); if (commandParams != null) { cmdParams.putAll(commandParams); } if (timeout != null) { cmdParams.put(ExecutionCommand.KeyNames.COMMAND_TIMEOUT, Long.toString(timeout)); } cmd.setCommandParams(cmdParams); Map<String, Map<String, String>> configurations = new TreeMap<>(); cmd.setConfigurations(configurations); Map<String, Map<String, Map<String, String>>> configurationAttributes = new TreeMap<>(); cmd.setConfigurationAttributes(configurationAttributes); if (configTags == null) { configTags = new TreeMap<>(); } cmd.setConfigurationTags(configTags); Map<String, String> roleParams = new HashMap<>(); roleParams.put(ServerAction.ACTION_NAME, actionName); if (userName != null) { roleParams.put(ServerAction.ACTION_USER_NAME, userName); } cmd.setRoleParams(roleParams); if(commandDetail != null) { HostRoleCommand hostRoleCommand = getHostRoleCommand(INTERNAL_HOSTNAME, role.toString()); if (hostRoleCommand != null) { hostRoleCommand.setCommandDetail(commandDetail); hostRoleCommand.setCustomCommandName(actionName); } } } /** * Adds cancel command to stage for given cancelTargets collection of * task id's that has to be canceled in Agent layer. */ public synchronized void addCancelRequestCommand(List<Long> cancelTargets, String clusterName, String hostName) { ExecutionCommandWrapper commandWrapper = addGenericExecutionCommand(clusterName, hostName, Role.AMBARI_SERVER_ACTION, RoleCommand.ABORT, null, false, false); ExecutionCommand cmd = commandWrapper.getExecutionCommand(); cmd.setCommandType(AgentCommandType.CANCEL_COMMAND); Assert.notEmpty(cancelTargets, "Provided targets task Id are empty."); Map<String, String> roleParams = new HashMap<>(); roleParams.put("cancelTaskIdTargets", StringUtils.join(cancelTargets, ',')); cmd.setRoleParams(roleParams); } /** * * @return list of hosts */ public synchronized List<String> getHosts() { // TODO: Check whether method should be synchronized List<String> hlist = new ArrayList<>(); for (String h : hostRoleCommands.keySet()) { hlist.add(h); } return hlist; } synchronized float getSuccessFactor(Role r) { Float f = successFactors.get(r); if (f == null) { if (r.equals(Role.DATANODE) || r.equals(Role.TASKTRACKER) || r.equals(Role.GANGLIA_MONITOR) || r.equals(Role.HBASE_REGIONSERVER)) { return (float) 0.5; } else { return 1; } } else { return f; } } public synchronized void setSuccessFactors(Map<Role, Float> suc) { successFactors = suc; } public synchronized Map<Role, Float> getSuccessFactors() { return successFactors; } public long getRequestId() { return requestId; } public String getClusterName() { return clusterName; } public long getClusterId() { return clusterId; } public String getRequestContext() { return requestContext; } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @return the last attempt time */ public long getLastAttemptTime(String hostname, String role) { return hostRoleCommands.get(getSafeHost(hostname)).get(role).getLastAttemptTime(); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @return the number of attempts */ public short getAttemptCount(String hostname, String role) { return hostRoleCommands.get(getSafeHost(hostname)).get(role).getAttemptCount(); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role */ public void incrementAttemptCount(String hostname, String role) { hostRoleCommands.get(getSafeHost(hostname)).get(role).incrementAttemptCount(); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @param t the last time the role was attempted */ public void setLastAttemptTime(String hostname, String role, long t) { hostRoleCommands.get(getSafeHost(hostname)).get(role).setLastAttemptTime(t); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @return the wrapper */ public ExecutionCommandWrapper getExecutionCommandWrapper(String hostname, String role) { HostRoleCommand hrc = hostRoleCommands.get(getSafeHost(hostname)).get(role); if (hrc != null) { return hrc.getExecutionCommandWrapper(); } else { return null; } } /** * @param hostname the hostname; {@code null} for a server-side stage * @return the list of commands for the host */ public List<ExecutionCommandWrapper> getExecutionCommands(String hostname) { checkWrappersLoaded(); return commandsToSend.get(getSafeHost(hostname)); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @return the start time for the task */ public long getStartTime(String hostname, String role) { return hostRoleCommands.get(getSafeHost(hostname)).get(role).getStartTime(); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @param startTime the start time */ public void setStartTime(String hostname, String role, long startTime) { hostRoleCommands.get(getSafeHost(hostname)).get(role).setStartTime(startTime); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @return the status */ public HostRoleStatus getHostRoleStatus(String hostname, String role) { return hostRoleCommands.get(getSafeHost(hostname)).get(role).getStatus(); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @param status the status */ public void setHostRoleStatus(String hostname, String role, HostRoleStatus status) { hostRoleCommands.get(getSafeHost(hostname)).get(role).setStatus(status); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param roleStr the role name * @return the wrapper event */ public ServiceComponentHostEventWrapper getFsmEvent(String hostname, String roleStr) { return hostRoleCommands.get(getSafeHost(hostname)).get(roleStr).getEvent(); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @param exitCode the exit code */ public void setExitCode(String hostname, String role, int exitCode) { hostRoleCommands.get(getSafeHost(hostname)).get(role).setExitCode(exitCode); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @return the exit code */ public int getExitCode(String hostname, String role) { return hostRoleCommands.get(getSafeHost(hostname)).get(role).getExitCode(); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @param stdErr the standard error string */ public void setStderr(String hostname, String role, String stdErr) { hostRoleCommands.get(getSafeHost(hostname)).get(role).setStderr(stdErr); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @param stdOut the standard output string */ public void setStdout(String hostname, String role, String stdOut) { hostRoleCommands.get(getSafeHost(hostname)).get(role).setStdout(stdOut); } public synchronized boolean isStageInProgress() { for(String host: hostRoleCommands.keySet()) { for (String role : hostRoleCommands.get(host).keySet()) { HostRoleCommand hrc = hostRoleCommands.get(host).get(role); if (hrc == null) { return false; } if (hrc.getStatus().equals(HostRoleStatus.PENDING) || hrc.getStatus().equals(HostRoleStatus.QUEUED) || hrc.getStatus().equals(HostRoleStatus.IN_PROGRESS)) { return true; } } } return false; } public synchronized boolean doesStageHaveHostRoleStatus( Set<HostRoleStatus> statuses) { for(String host: hostRoleCommands.keySet()) { for (String role : hostRoleCommands.get(host).keySet()) { HostRoleCommand hrc = hostRoleCommands.get(host).get(role); if (hrc == null) { return false; } for (HostRoleStatus status : statuses) { if (hrc.getStatus().equals(status)) { return true; } } } } return false; } public Map<String, List<ExecutionCommandWrapper>> getExecutionCommands() { checkWrappersLoaded(); return commandsToSend; } public String getLogDir() { return logDir; } public Map<String, Map<String, HostRoleCommand>> getHostRoleCommands() { return hostRoleCommands; } /** * Gets the {@link HostRoleCommand} matching the specified ID from this stage. * This will not hit the database, instead using the pre-cached list of HRCs * from the construction of the stage. * * @param taskId * the ID to match * @return the {@link HostRoleCommand} or {@code null} if none match. */ public HostRoleCommand getHostRoleCommand(long taskId) { for (Map.Entry<String, Map<String, HostRoleCommand>> hostEntry : hostRoleCommands.entrySet()) { Map<String, HostRoleCommand> hostCommands = hostEntry.getValue(); for (Map.Entry<String, HostRoleCommand> hostCommand : hostCommands.entrySet()) { HostRoleCommand hostRoleCommand = hostCommand.getValue(); if (null != hostRoleCommand && hostRoleCommand.getTaskId() == taskId) { return hostRoleCommand; } } } return null; } /** * This method should be used only in stage planner. To add * a new execution command use * {@link #addHostRoleExecutionCommand(String, org.apache.ambari.server.Role, org.apache.ambari.server.RoleCommand, org.apache.ambari.server.state.ServiceComponentHostEvent, String, String, boolean)} * @param origStage the stage * @param hostname the hostname; {@code null} for a server-side stage * @param r the role */ public synchronized void addExecutionCommandWrapper(Stage origStage, String hostname, Role r) { //used on stage creation only, no need to check if wrappers loaded hostname = getSafeHost(hostname); String role = r.toString(); if (commandsToSend.get(hostname) == null) { commandsToSend.put(hostname, new ArrayList<ExecutionCommandWrapper>()); } commandsToSend.get(hostname).add( origStage.getExecutionCommandWrapper(hostname, role)); if (hostRoleCommands.get(hostname) == null) { hostRoleCommands.put(hostname, new LinkedHashMap<String, HostRoleCommand>()); } // TODO add reference to ExecutionCommand into HostRoleCommand hostRoleCommands.get(hostname).put(role, origStage.getHostRoleCommand(hostname, role)); } /** * @param hostname the hostname; {@code null} for a server-side stage * @param role the role * @return the role command */ public HostRoleCommand getHostRoleCommand(String hostname, String role) { return hostRoleCommands.get(getSafeHost(hostname)).get(role); } /** * In this method we sum up all timeout values for all commands inside stage */ public synchronized int getStageTimeout() { checkWrappersLoaded(); if (stageTimeout == -1) { for (String host: commandsToSend.keySet()) { int summaryTaskTimeoutForHost = 0; for (ExecutionCommandWrapper command : commandsToSend.get(host)) { Map<String, String> commandParams = command.getExecutionCommand().getCommandParams(); String timeoutKey = ExecutionCommand.KeyNames.COMMAND_TIMEOUT; if (commandParams != null && commandParams.containsKey(timeoutKey)) { String timeoutStr = commandParams.get(timeoutKey); long commandTimeout = Long.parseLong(timeoutStr) * 1000; // Converting to milliseconds summaryTaskTimeoutForHost += commandTimeout; } else { LOG.error("Execution command has no timeout parameter" + command.toString()); } } if (summaryTaskTimeoutForHost > stageTimeout) { stageTimeout = summaryTaskTimeoutForHost; } } } return stageTimeout; } /** * Determine whether or not this stage is skippable. * * A skippable stage can be skipped on failure so that the * remaining stages of the request can execute. * If a stage is not skippable, a failure will cause the * remaining stages of the request to be aborted. * * @return true if this stage is skippable */ public boolean isSkippable() { return skippable; } /** * Set skippable for this stage. * * A skippable stage can be skipped on failure so that the * remaining stages of the request can execute. * If a stage is not skippable, a failure will cause the * remaining stages of the request to be aborted. * * @param skippable true if this stage should be skippable */ public void setSkippable(boolean skippable) { this.skippable = skippable; } /** * Determine whether this stage supports automatically skipping failures of * its commands. * * @return {@code true} if this stage supports automatically skipping failures * of its commands. */ public boolean isAutoSkipOnFailureSupported() { return supportsAutoSkipOnFailure; } /** * Sets whether this stage supports automatically skipping failures of its * commands. * * @param supportsAutoSkipOnFailure * {@code true} if this stage supports automatically skipping * failures of its commands. */ public void setAutoSkipFailureSupported(boolean supportsAutoSkipOnFailure) { this.supportsAutoSkipOnFailure = supportsAutoSkipOnFailure; } @Override //Object public synchronized String toString() { StringBuilder builder = new StringBuilder(); builder.append("STAGE DESCRIPTION BEGIN\n"); builder.append("requestId="+requestId+"\n"); builder.append("stageId="+stageId+"\n"); builder.append("clusterName="+clusterName+"\n"); builder.append("logDir=" + logDir+"\n"); builder.append("requestContext="+requestContext+"\n"); builder.append("commandParamsStage="+commandParamsStage+"\n"); builder.append("hostParamsStage="+hostParamsStage+"\n"); builder.append("Success Factors:\n"); for (Role r : successFactors.keySet()) { builder.append(" role: "+r+", factor: "+successFactors.get(r)+"\n"); } for (HostRoleCommand hostRoleCommand : getOrderedHostRoleCommands()) { builder.append("HOST: ").append(hostRoleCommand.getHostName()).append(" :\n"); builder.append(hostRoleCommand.getExecutionCommandWrapper().getJson()); builder.append("\n"); builder.append(hostRoleCommand.toString()); builder.append("\n"); } builder.append("STAGE DESCRIPTION END\n"); return builder.toString(); } /** * Helper to make sure the hostname is non-null for internal command map. * @param hostname the hostname for the map key * @return the hostname when not {@code null}, otherwise {@link #INTERNAL_HOSTNAME} */ private static String getSafeHost(String hostname) { return (null == hostname) ? INTERNAL_HOSTNAME : hostname; } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-mediapackage/src/main/java/com/amazonaws/services/mediapackage/model/RotateChannelCredentialsResult.java
8800
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.mediapackage.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/RotateChannelCredentials" * target="_top">AWS API Documentation</a> */ @Deprecated @Generated("com.amazonaws:aws-java-sdk-code-generator") public class RotateChannelCredentialsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** The Amazon Resource Name (ARN) assigned to the Channel. */ private String arn; /** A short text description of the Channel. */ private String description; private HlsIngest hlsIngest; /** The ID of the Channel. */ private String id; private java.util.Map<String, String> tags; /** * The Amazon Resource Name (ARN) assigned to the Channel. * * @param arn * The Amazon Resource Name (ARN) assigned to the Channel. */ public void setArn(String arn) { this.arn = arn; } /** * The Amazon Resource Name (ARN) assigned to the Channel. * * @return The Amazon Resource Name (ARN) assigned to the Channel. */ public String getArn() { return this.arn; } /** * The Amazon Resource Name (ARN) assigned to the Channel. * * @param arn * The Amazon Resource Name (ARN) assigned to the Channel. * @return Returns a reference to this object so that method calls can be chained together. */ public RotateChannelCredentialsResult withArn(String arn) { setArn(arn); return this; } /** * A short text description of the Channel. * * @param description * A short text description of the Channel. */ public void setDescription(String description) { this.description = description; } /** * A short text description of the Channel. * * @return A short text description of the Channel. */ public String getDescription() { return this.description; } /** * A short text description of the Channel. * * @param description * A short text description of the Channel. * @return Returns a reference to this object so that method calls can be chained together. */ public RotateChannelCredentialsResult withDescription(String description) { setDescription(description); return this; } /** * @param hlsIngest */ public void setHlsIngest(HlsIngest hlsIngest) { this.hlsIngest = hlsIngest; } /** * @return */ public HlsIngest getHlsIngest() { return this.hlsIngest; } /** * @param hlsIngest * @return Returns a reference to this object so that method calls can be chained together. */ public RotateChannelCredentialsResult withHlsIngest(HlsIngest hlsIngest) { setHlsIngest(hlsIngest); return this; } /** * The ID of the Channel. * * @param id * The ID of the Channel. */ public void setId(String id) { this.id = id; } /** * The ID of the Channel. * * @return The ID of the Channel. */ public String getId() { return this.id; } /** * The ID of the Channel. * * @param id * The ID of the Channel. * @return Returns a reference to this object so that method calls can be chained together. */ public RotateChannelCredentialsResult withId(String id) { setId(id); return this; } /** * @return */ public java.util.Map<String, String> getTags() { return tags; } /** * @param tags */ public void setTags(java.util.Map<String, String> tags) { this.tags = tags; } /** * @param tags * @return Returns a reference to this object so that method calls can be chained together. */ public RotateChannelCredentialsResult withTags(java.util.Map<String, String> tags) { setTags(tags); return this; } public RotateChannelCredentialsResult addTagsEntry(String key, String value) { if (null == this.tags) { this.tags = new java.util.HashMap<String, String>(); } if (this.tags.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.tags.put(key, value); return this; } /** * Removes all the entries added into Tags. * * @return Returns a reference to this object so that method calls can be chained together. */ public RotateChannelCredentialsResult clearTagsEntries() { this.tags = null; return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getArn() != null) sb.append("Arn: ").append(getArn()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getHlsIngest() != null) sb.append("HlsIngest: ").append(getHlsIngest()).append(","); if (getId() != null) sb.append("Id: ").append(getId()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof RotateChannelCredentialsResult == false) return false; RotateChannelCredentialsResult other = (RotateChannelCredentialsResult) obj; if (other.getArn() == null ^ this.getArn() == null) return false; if (other.getArn() != null && other.getArn().equals(this.getArn()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getHlsIngest() == null ^ this.getHlsIngest() == null) return false; if (other.getHlsIngest() != null && other.getHlsIngest().equals(this.getHlsIngest()) == false) return false; if (other.getId() == null ^ this.getId() == null) return false; if (other.getId() != null && other.getId().equals(this.getId()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getArn() == null) ? 0 : getArn().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getHlsIngest() == null) ? 0 : getHlsIngest().hashCode()); hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public RotateChannelCredentialsResult clone() { try { return (RotateChannelCredentialsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
centic9/github-version-statistics
src/main/java/org/dstadler/github/search/BaseSearch.java
6547
package org.dstadler.github.search; import com.google.common.base.Preconditions; import com.google.common.collect.Multimap; import org.apache.commons.io.IOUtils; import org.kohsuke.github.GHContent; import org.kohsuke.github.GHRepository; import org.kohsuke.github.GitHub; import org.kohsuke.github.GitHubBuilder; import org.kohsuke.github.HttpException; import org.kohsuke.github.RateLimitChecker; import org.kohsuke.github.RateLimitTarget; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Base class for different code-searchers */ public abstract class BaseSearch { // main definition of which library you are looking for protected final static String GROUP_REGEX = "org\\.apache\\.poi"; protected final static String GROUP = "org.apache.poi"; protected final static String VERSION = "([-0-9A-Za-z.$_{}()\\[\\]+]+)"; protected final static String QUOTE = "[\"']?"; // parse out the name of the repository from the URL returned by the GitHub search private final static Pattern REPO_NAME = Pattern.compile("https://github\\.com/([-a-zA-Z0-9_.]+/[-a-zA-Z0-9_.]+)/blob/.*"); protected void processResults(GitHub github, Multimap<String, String> versions, Iterable<GHContent> list) throws IOException { // try up to three times to cater for some connection issues that // we see from time to time. // This is done around the whole loop as the iterator also // fetches new pages. // Note that getNonForkRepository() does it's own retry as well int retries = 3; while(true) { try { int i = 0; for(GHContent match : list) { i++; final String htmlUrl = match.getHtmlUrl(); String repo = getNonForkRepository(github, htmlUrl); if (repo == null) { continue; } String str = readFileContent(match, htmlUrl, repo); if (str == null) { continue; } try { parseVersion(versions, htmlUrl, repo, str); } catch (RuntimeException e) { throw new IllegalStateException("Failed for " + htmlUrl + ", repo; " + repo + ", str: " + str, e); } if(i % 1000 == 0) { System.out.println("Having " + i + " results"); } } System.out.println("Processed " + i + " results overall"); //noinspection BreakStatement break; } catch (HttpException e) { retries--; if(retries <= 0) { throw e; } // retry once more System.out.println("Retry " + retries + " after failing to talk to Github"); e.printStackTrace(System.out); } } } protected String readFileContent(GHContent match, String htmlUrl, String repo) throws IOException { // This is a workaround for https://github.com/github-api/github-api/issues/729 match.refresh(); if(match.getEncoding() == null) { System.out.println("Could not read content of " + htmlUrl + " of repo " + repo + ", encoding is not set: " + match.getHtmlUrl()); return null; } final InputStream stream; try { stream = match.read(); } catch (IOException e) { System.out.println("Could not read content of " + htmlUrl + " of repo " + repo + ": " + e); return null; } String str = IOUtils.toString(stream, StandardCharsets.UTF_8); // filter out some unwanted matches str = str.replaceAll(getExcludeRegex(), ""); // skip this if the group-tag is not found any more now if(!str.contains(GROUP)) { //System.out.println("Did not find " + GROUP + " in content of repo " + repo + " at " + htmlUrl); return null; } return str; } abstract void search(GitHub github, Multimap<String, String> versions) throws IOException; abstract String getExcludeRegex(); abstract void parseVersion(Multimap<String, String> versions, String htmlUrl, String repo, String str); protected String reducedContent(String str, String htmlUrl) { int pos = str.indexOf(GROUP); Preconditions.checkState(pos >= 0, "Did not find " + GROUP + " at " + htmlUrl); return str.substring(Math.max(0, pos - 100), Math.min(str.length(), pos + 100)); } protected String getNonForkRepository(GitHub github, CharSequence htmlUrl) throws IOException { String repo = getRepository(htmlUrl); if(repo == null) { return null; } // try up to three times to cater for some connection issues that // we see from time to time int retries = 3; while(true) { try { final GHRepository repository = github.getRepository(repo); if (repository.isFork()) { //System.out.println("Ignoring forked repo " + repo); return null; } return repo; } catch (HttpException e) { retries--; if(retries <= 0) { throw e; } // retry once more System.out.println("Retry " + retries + " after failing to talk to Github"); e.printStackTrace(System.out); } } } public static String getRepository(CharSequence htmlUrl) { Matcher matcher = REPO_NAME.matcher(htmlUrl); if(!matcher.matches()) { System.out.println("Could not parse repo of " + htmlUrl + " with regex " + REPO_NAME.pattern()); return null; } return matcher.group(1); } public static GitHub connect() throws IOException { return GitHubBuilder.fromEnvironment(). // observe rate-limits and wait if we get near the returned remaining number of requests per timeframe withRateLimitChecker(new RateLimitChecker.LiteralValue(1), RateLimitTarget.SEARCH). build(); } }
apache-2.0
Heart2009/buck
test/com/facebook/buck/apple/AppleDescriptionsTest.java
11041
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.apple; import static org.junit.Assert.assertEquals; import com.facebook.buck.apple.xcode.xcodeproj.PBXReference; import com.facebook.buck.apple.xcode.xcodeproj.SourceTreePath; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.TestSourcePath; import com.facebook.buck.rules.coercer.FrameworkPath; import com.facebook.buck.rules.coercer.SourceList; import com.facebook.buck.testutil.FakeProjectFilesystem; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import org.junit.Test; import java.nio.file.Path; import java.nio.file.Paths; public class AppleDescriptionsTest { @Test public void parseAppleHeadersForUseFromOtherTargetsFromSet() { assertEquals( ImmutableMap.<String, SourcePath>of( "prefix/some_file.h", new TestSourcePath("path/to/some_file.h"), "prefix/another_file.h", new TestSourcePath("path/to/another_file.h"), "prefix/a_file.h", new TestSourcePath("different/path/to/a_file.h"), "prefix/file.h", new TestSourcePath("file.h")), AppleDescriptions.parseAppleHeadersForUseFromOtherTargets( new SourcePathResolver(new BuildRuleResolver()).getPathFunction(), Paths.get("prefix"), SourceList.ofUnnamedSources( ImmutableSortedSet.<SourcePath>of( new TestSourcePath("path/to/some_file.h"), new TestSourcePath("path/to/another_file.h"), new TestSourcePath("different/path/to/a_file.h"), new TestSourcePath("file.h"))))); } @Test public void parseAppleHeadersForUseFromTheSameFromSet() { assertEquals( ImmutableMap.<String, SourcePath>of( "some_file.h", new TestSourcePath("path/to/some_file.h"), "another_file.h", new TestSourcePath("path/to/another_file.h"), "a_file.h", new TestSourcePath("different/path/to/a_file.h"), "file.h", new TestSourcePath("file.h")), AppleDescriptions.parseAppleHeadersForUseFromTheSameTarget( new SourcePathResolver(new BuildRuleResolver()).getPathFunction(), SourceList.ofUnnamedSources( ImmutableSortedSet.<SourcePath>of( new TestSourcePath("path/to/some_file.h"), new TestSourcePath("path/to/another_file.h"), new TestSourcePath("different/path/to/a_file.h"), new TestSourcePath("file.h"))))); } @Test public void parseAppleHeadersForUseFromOtherTargetsFromMap() { ImmutableSortedMap<String, SourcePath> headerMap = ImmutableSortedMap.<String, SourcePath>of( "virtual/path.h", new TestSourcePath("path/to/some_file.h"), "another/path.h", new TestSourcePath("path/to/another_file.h"), "another/file.h", new TestSourcePath("different/path/to/a_file.h"), "file.h", new TestSourcePath("file.h")); assertEquals( headerMap, AppleDescriptions.parseAppleHeadersForUseFromOtherTargets( new SourcePathResolver(new BuildRuleResolver()).getPathFunction(), Paths.get("prefix"), SourceList.ofNamedSources(headerMap))); } @Test public void parseAppleHeadersForUseFromTheSameTargetFromMap() { ImmutableSortedMap<String, SourcePath> headerMap = ImmutableSortedMap.<String, SourcePath>of( "virtual/path.h", new TestSourcePath("path/to/some_file.h"), "another/path.h", new TestSourcePath("path/to/another_file.h"), "another/file.h", new TestSourcePath("different/path/to/a_file.h"), "file.h", new TestSourcePath("file.h")); assertEquals( ImmutableMap.of(), AppleDescriptions.parseAppleHeadersForUseFromTheSameTarget( new SourcePathResolver(new BuildRuleResolver()).getPathFunction(), SourceList.ofNamedSources(headerMap))); } @Test public void convertToFlatCxxHeadersWithPrefix() { assertEquals( ImmutableMap.<String, SourcePath>of( "prefix/some_file.h", new TestSourcePath("path/to/some_file.h"), "prefix/another_file.h", new TestSourcePath("path/to/another_file.h"), "prefix/a_file.h", new TestSourcePath("different/path/to/a_file.h"), "prefix/file.h", new TestSourcePath("file.h")), AppleDescriptions.convertToFlatCxxHeaders( Paths.get("prefix"), new SourcePathResolver(new BuildRuleResolver()).getPathFunction(), ImmutableSet.<SourcePath>of( new TestSourcePath("path/to/some_file.h"), new TestSourcePath("path/to/another_file.h"), new TestSourcePath("different/path/to/a_file.h"), new TestSourcePath("file.h")))); } @Test public void convertToFlatCxxHeadersWithoutPrefix() { assertEquals( ImmutableMap.<String, SourcePath>of( "some_file.h", new TestSourcePath("path/to/some_file.h"), "another_file.h", new TestSourcePath("path/to/another_file.h"), "a_file.h", new TestSourcePath("different/path/to/a_file.h"), "file.h", new TestSourcePath("file.h")), AppleDescriptions.convertToFlatCxxHeaders( Paths.get(""), new SourcePathResolver(new BuildRuleResolver()).getPathFunction(), ImmutableSet.<SourcePath>of( new TestSourcePath("path/to/some_file.h"), new TestSourcePath("path/to/another_file.h"), new TestSourcePath("different/path/to/a_file.h"), new TestSourcePath("file.h")))); } @Test public void frameworksToLinkerFlagsTransformer() { ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); SourcePathResolver resolver = new SourcePathResolver(new BuildRuleResolver()); Function< ImmutableSortedSet<FrameworkPath>, ImmutableList<String>> frameworksToLinkerFlagsTransformer = AppleDescriptions.frameworksToLinkerFlagsFunction(resolver); ImmutableList<String> linkerFlags = frameworksToLinkerFlagsTransformer.apply( ImmutableSortedSet.of( FrameworkPath.ofSourceTreePath( new SourceTreePath( PBXReference.SourceTree.SDKROOT, Paths.get("usr/lib/libz.dylib"), Optional.<String>absent())), FrameworkPath.ofSourcePath( new PathSourcePath(projectFilesystem, Paths.get("Vendor/Foo/libFoo.a"))), FrameworkPath.ofSourceTreePath( new SourceTreePath( PBXReference.SourceTree.DEVELOPER_DIR, Paths.get("Library/Frameworks/XCTest.framework"), Optional.<String>absent())), FrameworkPath.ofSourcePath( new PathSourcePath(projectFilesystem, Paths.get("Vendor/Bar/Bar.framework"))))); assertEquals( ImmutableList.of( "-lz", "-framework", "XCTest", "-framework", "Bar", "-lFoo"), linkerFlags); } @Test public void frameworksToSearchPathsTransformer() { ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); SourcePathResolver resolver = new SourcePathResolver(new BuildRuleResolver()); Path appleSdkRoot = Paths.get("Root"); AppleSdkPaths appleSdkPaths = AppleSdkPaths.builder() .setDeveloperPath(appleSdkRoot) .addToolchainPaths(appleSdkRoot.resolve("Toolchain")) .setPlatformPath(appleSdkRoot.resolve("Platform")) .setSdkPath(appleSdkRoot.resolve("SDK")) .build(); Function< ImmutableSortedSet<FrameworkPath>, ImmutableList<Path>> frameworksToSearchPathsTransformer = AppleDescriptions.frameworksToSearchPathsFunction(resolver, appleSdkPaths); ImmutableList<Path> searchPaths = frameworksToSearchPathsTransformer.apply( ImmutableSortedSet.of( FrameworkPath.ofSourceTreePath( new SourceTreePath( PBXReference.SourceTree.SDKROOT, Paths.get("usr/lib/libz.dylib"), Optional.<String>absent())), FrameworkPath.ofSourcePath( new PathSourcePath(projectFilesystem, Paths.get("Vendor/Foo/libFoo.a"))), FrameworkPath.ofSourceTreePath( new SourceTreePath( PBXReference.SourceTree.DEVELOPER_DIR, Paths.get("Library/Frameworks/XCTest.framework"), Optional.<String>absent())), FrameworkPath.ofSourcePath( new PathSourcePath(projectFilesystem, Paths.get("Vendor/Bar/Bar.framework"))))); assertEquals( ImmutableList.of( Paths.get("Root/SDK/usr/lib"), Paths.get("Root/Library/Frameworks"), Paths.get("Vendor/Bar"), Paths.get("Vendor/Foo")), searchPaths); } @Test public void expandSdkVariableReferences() { Path appleSdkRoot = Paths.get("Root"); AppleSdkPaths appleSdkPaths = AppleSdkPaths.builder() .setDeveloperPath(appleSdkRoot) .addToolchainPaths(appleSdkRoot.resolve("Toolchain")) .setPlatformPath(appleSdkRoot.resolve("Platform")) .setSdkPath(appleSdkRoot.resolve("SDK")) .build(); Function<ImmutableList<String>, ImmutableList<String>> expandSdkVariableRefs = AppleDescriptions.expandSdkVariableReferencesFunction(appleSdkPaths); ImmutableList<String> expandedRefs = expandSdkVariableRefs.apply( ImmutableList.of( "-Ifoo/bar/baz", "-L$DEVELOPER_DIR/blech", "-I$SDKROOT/quux", "-F$PLATFORM_DIR/xyzzy")); assertEquals( ImmutableList.of( "-Ifoo/bar/baz", "-LRoot/blech", "-IRoot/SDK/quux", "-FRoot/Platform/xyzzy"), expandedRefs); } }
apache-2.0
Traubenfuchs/loup
loup-commons/src/main/java/at/loup/commons/services/SpringAwareThread.java
3165
package at.loup.commons.services; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.event.ContextClosedEvent; import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.context.event.ContextStartedEvent; import org.springframework.context.event.ContextStoppedEvent; import org.springframework.context.event.EventListener; /** * Executes a thread on ContextRefreshedEvent, interrupts it on * ContextClosedEvent or ContextStoppedEvent */ public abstract class SpringAwareThread { private static final Logger logger = LoggerFactory.getLogger(SpringAwareThread.class); private final Object lockObject = new Object(); private final String threadName; private volatile Thread thread = null; public SpringAwareThread(String threadName) { this.threadName = threadName; } /** * Stops the thread if one exists. */ protected void stop() { synchronized (lockObject) { if (thread == null) { logger.debug("\"SpringAwareThread<" + threadName + "> stop has been called, but no thread is running."); return; } thread.interrupt(); thread = null; logger.debug( "\"SpringAwareThread<" + threadName + "> stop has been called and the thread was interrupted."); } } /** * Starts the thread if none exists or it is no longer alive. */ protected void start() { synchronized (lockObject) { if (thread != null) { if (thread.isAlive()) { logger.debug("SpringAwareThread<" + threadName + "> start has been called, but a thread that is alive already exists."); return; } else { logger.debug("SpringAwareThread<" + threadName + "> start has been called, a dead thread exists and a new one will be created."); } return; } thread = new Thread(() -> { try { this.threadMethod(); } catch (InterruptedException e) { logger.debug("SpringAwareThread<" + threadName + "> was interrupted and will stop working now."); return; } }); thread.setName(threadName); thread.start(); logger.debug( "SpringAwareThread<" + threadName + "> start has been called and the thread has been started."); } } /** * The method that will be started and stopped by this * SpringAwareThread.<br> * Catches InterruptedException and gracefully shuts down.<br> * Does NOT take care of any other exceptions and does NOT loop:<br> * You need to implement while(!notInterrupted and try/catch yourself! * * @throws InterruptedException */ protected abstract void threadMethod() throws InterruptedException; public String getThreadName() { return threadName; } @EventListener({ ContextStartedEvent.class }) private void contextStartedEvent() { start(); } @EventListener({ ContextRefreshedEvent.class }) private void contextRefreshedEvent() { start(); } @EventListener({ ContextClosedEvent.class }) private void contextClosedEvent() { stop(); } @EventListener({ ContextStoppedEvent.class }) private void contextStoppedEvent() { stop(); } }
apache-2.0
nafae/developer
modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201306/ReconciliationReportServiceLocator.java
6362
/** * ReconciliationReportServiceLocator.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.dfp.axis.v201306; public class ReconciliationReportServiceLocator extends org.apache.axis.client.Service implements com.google.api.ads.dfp.axis.v201306.ReconciliationReportService { public ReconciliationReportServiceLocator() { } public ReconciliationReportServiceLocator(org.apache.axis.EngineConfiguration config) { super(config); } public ReconciliationReportServiceLocator(java.lang.String wsdlLoc, javax.xml.namespace.QName sName) throws javax.xml.rpc.ServiceException { super(wsdlLoc, sName); } // Use to get a proxy class for ReconciliationReportServiceInterfacePort private java.lang.String ReconciliationReportServiceInterfacePort_address = "https://ads.google.com/apis/ads/publisher/v201306/ReconciliationReportService"; public java.lang.String getReconciliationReportServiceInterfacePortAddress() { return ReconciliationReportServiceInterfacePort_address; } // The WSDD service name defaults to the port name. private java.lang.String ReconciliationReportServiceInterfacePortWSDDServiceName = "ReconciliationReportServiceInterfacePort"; public java.lang.String getReconciliationReportServiceInterfacePortWSDDServiceName() { return ReconciliationReportServiceInterfacePortWSDDServiceName; } public void setReconciliationReportServiceInterfacePortWSDDServiceName(java.lang.String name) { ReconciliationReportServiceInterfacePortWSDDServiceName = name; } public com.google.api.ads.dfp.axis.v201306.ReconciliationReportServiceInterface getReconciliationReportServiceInterfacePort() throws javax.xml.rpc.ServiceException { java.net.URL endpoint; try { endpoint = new java.net.URL(ReconciliationReportServiceInterfacePort_address); } catch (java.net.MalformedURLException e) { throw new javax.xml.rpc.ServiceException(e); } return getReconciliationReportServiceInterfacePort(endpoint); } public com.google.api.ads.dfp.axis.v201306.ReconciliationReportServiceInterface getReconciliationReportServiceInterfacePort(java.net.URL portAddress) throws javax.xml.rpc.ServiceException { try { com.google.api.ads.dfp.axis.v201306.ReconciliationReportServiceSoapBindingStub _stub = new com.google.api.ads.dfp.axis.v201306.ReconciliationReportServiceSoapBindingStub(portAddress, this); _stub.setPortName(getReconciliationReportServiceInterfacePortWSDDServiceName()); return _stub; } catch (org.apache.axis.AxisFault e) { return null; } } public void setReconciliationReportServiceInterfacePortEndpointAddress(java.lang.String address) { ReconciliationReportServiceInterfacePort_address = address; } /** * For the given interface, get the stub implementation. * If this service has no port for the given interface, * then ServiceException is thrown. */ public java.rmi.Remote getPort(Class serviceEndpointInterface) throws javax.xml.rpc.ServiceException { try { if (com.google.api.ads.dfp.axis.v201306.ReconciliationReportServiceInterface.class.isAssignableFrom(serviceEndpointInterface)) { com.google.api.ads.dfp.axis.v201306.ReconciliationReportServiceSoapBindingStub _stub = new com.google.api.ads.dfp.axis.v201306.ReconciliationReportServiceSoapBindingStub(new java.net.URL(ReconciliationReportServiceInterfacePort_address), this); _stub.setPortName(getReconciliationReportServiceInterfacePortWSDDServiceName()); return _stub; } } catch (java.lang.Throwable t) { throw new javax.xml.rpc.ServiceException(t); } throw new javax.xml.rpc.ServiceException("There is no stub implementation for the interface: " + (serviceEndpointInterface == null ? "null" : serviceEndpointInterface.getName())); } /** * For the given interface, get the stub implementation. * If this service has no port for the given interface, * then ServiceException is thrown. */ public java.rmi.Remote getPort(javax.xml.namespace.QName portName, Class serviceEndpointInterface) throws javax.xml.rpc.ServiceException { if (portName == null) { return getPort(serviceEndpointInterface); } java.lang.String inputPortName = portName.getLocalPart(); if ("ReconciliationReportServiceInterfacePort".equals(inputPortName)) { return getReconciliationReportServiceInterfacePort(); } else { java.rmi.Remote _stub = getPort(serviceEndpointInterface); ((org.apache.axis.client.Stub) _stub).setPortName(portName); return _stub; } } public javax.xml.namespace.QName getServiceName() { return new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "ReconciliationReportService"); } private java.util.HashSet ports = null; public java.util.Iterator getPorts() { if (ports == null) { ports = new java.util.HashSet(); ports.add(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "ReconciliationReportServiceInterfacePort")); } return ports.iterator(); } /** * Set the endpoint address for the specified port name. */ public void setEndpointAddress(java.lang.String portName, java.lang.String address) throws javax.xml.rpc.ServiceException { if ("ReconciliationReportServiceInterfacePort".equals(portName)) { setReconciliationReportServiceInterfacePortEndpointAddress(address); } else { // Unknown Port Name throw new javax.xml.rpc.ServiceException(" Cannot set Endpoint Address for Unknown Port" + portName); } } /** * Set the endpoint address for the specified port name. */ public void setEndpointAddress(javax.xml.namespace.QName portName, java.lang.String address) throws javax.xml.rpc.ServiceException { setEndpointAddress(portName.getLocalPart(), address); } }
apache-2.0
gawkermedia/googleads-java-lib
modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201511/PremiumRateServiceInterface.java
2479
/** * PremiumRateServiceInterface.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.dfp.axis.v201511; public interface PremiumRateServiceInterface extends java.rmi.Remote { /** * Creates a list of new {@link PremiumRate} objects. * * * @param premiumRates the premium rates to be created * * @return the premium rates with their IDs filled in */ public com.google.api.ads.dfp.axis.v201511.PremiumRate[] createPremiumRates(com.google.api.ads.dfp.axis.v201511.PremiumRate[] premiumRates) throws java.rmi.RemoteException, com.google.api.ads.dfp.axis.v201511.ApiException; /** * Gets a {@link PremiumRatePage} of {@link PremiumRate} objects * that * satisfy the given {@link Statement#query}. The following fields * are * supported for filtering: * * <table> * <tr> * <th scope="col">PQL Property</th> <th scope="col">Object Property</th> * </tr> * <tr> * <td>{@code id}</td> * <td>{@link PremiumRate#id}</td> * </tr> * <tr> * <td>{@code rateCardId}</td> * <td>{@link PremiumRate#rateCardId}</td> * </tr> * <tr> * <td>{@code pricingMethod}</td> * <td>{@link PremiumRate#pricingMethod}</td> * </tr> * </table> * * * @param filterStatement a Publisher Query Language statement to filter * a * list of premium rates. * * @return the premium rates that match the filter */ public com.google.api.ads.dfp.axis.v201511.PremiumRatePage getPremiumRatesByStatement(com.google.api.ads.dfp.axis.v201511.Statement filterStatement) throws java.rmi.RemoteException, com.google.api.ads.dfp.axis.v201511.ApiException; /** * Updates the specified {@link PremiumRate} objects. * * * @param premiumRates the premium rates to be updated * * @return the updated premium rates */ public com.google.api.ads.dfp.axis.v201511.PremiumRate[] updatePremiumRates(com.google.api.ads.dfp.axis.v201511.PremiumRate[] premiumRates) throws java.rmi.RemoteException, com.google.api.ads.dfp.axis.v201511.ApiException; }
apache-2.0
java110/MicroCommunity
service-front/src/main/java/com/java110/front/smo/store/impl/ListStoreSMOImpl.java
2348
package com.java110.front.smo.store.impl; import com.alibaba.fastjson.JSONObject; import com.java110.core.component.AbstractComponentSMO; import com.java110.core.context.IPageData; import com.java110.entity.component.ComponentValidateResult; import com.java110.utils.constant.ServiceConstant; import com.java110.utils.exception.SMOException; import com.java110.front.smo.store.IListStoreSMO; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpMethod; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Service; import org.springframework.web.client.RestTemplate; /** * 查询carInout服务类 */ @Service("listStoreSMOImpl") public class ListStoreSMOImpl extends AbstractComponentSMO implements IListStoreSMO { @Autowired private RestTemplate restTemplate; @Override public ResponseEntity<String> listStores(IPageData pd) throws SMOException { return businessProcess(pd); } @Override protected void validate(IPageData pd, JSONObject paramIn) { //Assert.hasKeyAndValue(paramIn, "communityId", "必填,请填写小区信息"); super.validatePageInfo(pd); //super.checkUserHasPrivilege(pd, restTemplate, PrivilegeCodeConstant.AGENT_HAS_LIST_CARINOUT); } @Override protected ResponseEntity<String> doBusinessProcess(IPageData pd, JSONObject paramIn) { ComponentValidateResult result = super.validateStoreStaffCommunityRelationship(pd, restTemplate); //只有管理员才能查询,这里以防权限控制不住在控制一层 if (!"800900000001".equals(result.getStoreTypeCd())) { throw new IllegalArgumentException("您当前没有权限访问"); } // Map paramMap = BeanConvertUtil.beanCovertMap(result); // paramIn.putAll(paramMap); String apiUrl = ServiceConstant.SERVICE_API_URL + "/api/store.listStores" + mapToUrlParam(paramIn); ResponseEntity<String> responseEntity = this.callCenterService(restTemplate, pd, "", apiUrl, HttpMethod.GET); return responseEntity; } public RestTemplate getRestTemplate() { return restTemplate; } public void setRestTemplate(RestTemplate restTemplate) { this.restTemplate = restTemplate; } }
apache-2.0
jboss-developer/jboss-jdg-quickstarts
spark/temperature-client/src/main/java/org/infinispan/quickstart/spark/TemperatureClient.java
4135
package org.infinispan.quickstart.spark; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.infinispan.client.hotrod.RemoteCache; import org.infinispan.client.hotrod.RemoteCacheManager; import org.infinispan.client.hotrod.annotation.ClientCacheEntryCreated; import org.infinispan.client.hotrod.annotation.ClientCacheEntryModified; import org.infinispan.client.hotrod.annotation.ClientListener; import org.infinispan.client.hotrod.configuration.ConfigurationBuilder; import org.infinispan.client.hotrod.event.ClientCacheEntryCreatedEvent; import org.infinispan.client.hotrod.event.ClientCacheEntryModifiedEvent; import org.infinispan.client.hotrod.impl.ConfigurationProperties; /** * <p> * Simulates client application, which is interested on most up-to-date average temperature in specified places. It uses * Data Grid {@link ClientListener} for obtaining notifications that the average temperature has changed for the * subscribed cities. * </p> * <p> * The TemperatureClient needs at least one argument - place where we are interested in average temperature changes. In * can be also a comma separated list of places. * </p> * * @author vjuranek */ public class TemperatureClient { public static final String ISPN_IP = "127.0.0.1"; public static final String CACHE_NAME = "avg-temperatures"; public static final int LISTEN_TIME = 5; // how long the client should listen to changes, in minutes public static void main(String[] args) throws Exception { // check provided arguments - at least one place of interest needs to be specified if (args.length < 1) { System.err.println("You have to provide list of places to watch, at least one!"); System.exit(1); } Set<String> placesToWatch = new HashSet<>(args.length); Collections.addAll(placesToWatch, args); // Configure remote cache ConfigurationBuilder builder = new ConfigurationBuilder(); builder.addServer().host(ISPN_IP).port(ConfigurationProperties.DEFAULT_HOTROD_PORT); RemoteCacheManager cacheManager = new RemoteCacheManager(builder.build()); RemoteCache<String, Double> cache = cacheManager.getCache(CACHE_NAME); // Add cache listener and wait for specified amount of time AvgTemperatureListener avgTempListener = new AvgTemperatureListener(cache, placesToWatch); cache.addClientListener(avgTempListener); System.out.printf("Client will be listening to avg. temperature updates for %d minutes%n", LISTEN_TIME); Thread.sleep(LISTEN_TIME * 60 * 1000); System.out.println("Stopping client"); cache.removeClientListener(avgTempListener); cacheManager.stop(); System.exit(0); } /** * Listens for updates in avg. temperature cache and takes action (printing to std. out) when avg. temperature in * watched place has changed. * * @author vjuranek */ @ClientListener @SuppressWarnings("unused") public static class AvgTemperatureListener { private final RemoteCache<String, Double> cache; private final Set<String> watchedPlaces; private final ExecutorService executorService; public AvgTemperatureListener(RemoteCache<String, Double> cache, Set<String> watchedPlaces) { this.cache = cache; this.watchedPlaces = watchedPlaces; this.executorService = Executors.newSingleThreadExecutor(); } @ClientCacheEntryCreated public void entryCreated(ClientCacheEntryCreatedEvent<String> event) { if (watchedPlaces.contains(event.getKey())) updateAction(event.getKey()); } @ClientCacheEntryModified public void entryModified(ClientCacheEntryModifiedEvent<String> event) { if (watchedPlaces.contains(event.getKey())) updateAction(event.getKey()); } private void updateAction(String key) { executorService.submit(() -> System.out.printf("[%s] avg. temperature is now %.1f \u00B0C%n", key, cache.get(key))); } } }
apache-2.0
JuKu/test-rpg-game-server
game-network-client/src/main/java/com/jukusoft/libgdx/rpg/game/client/message/receiver/AuthResponseReceiver.java
1766
package com.jukusoft.libgdx.rpg.game.client.message.receiver; import com.jukusoft.libgdx.rpg.game.client.listener.AuthListener; import com.jukusoft.libgdx.rpg.network.channel.ChannelAttributes; import com.jukusoft.libgdx.rpg.network.message.MessageReceiver; import com.jukusoft.libgdx.rpg.network.message.NetMessage; import com.jukusoft.libgdx.rpg.network.utils.ByteUtils; import io.netty.channel.ChannelHandlerContext; import java.nio.charset.StandardCharsets; /** * Created by Justin on 24.03.2017. */ public class AuthResponseReceiver implements MessageReceiver<NetMessage> { protected AuthListener authListener = null; public AuthResponseReceiver (AuthListener authListener) { if (authListener == null) { throw new NullPointerException("auth listener cannot be null."); } this.authListener = authListener; } @Override public void onReceive(ChannelHandlerContext ctx, long connID, ChannelAttributes attributes, NetMessage msg) { //read success flag, errorCode and userID boolean success = msg.content().readBoolean(); int errorCode = msg.content().readInt(); long userID = msg.content().readLong(); //read message int messageLength = msg.content().readInt(); byte[] messageBytes = new byte[messageLength]; for (int i = 0; i < messageLength; i++) { messageBytes[i] = msg.content().readByte(); } //convert bytes to string String message = ByteUtils.getStringFromBytes(messageBytes, StandardCharsets.UTF_8); if (success) { attributes.setAuth(userID, "You"); } //call listener this.authListener.onAuth(success, errorCode, userID, message); } }
apache-2.0
irontable/genie
genie-core/src/main/java/com/netflix/genie/core/services/impl/JobCoordinatorServiceImpl.java
24801
/* * * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.genie.core.services.impl; import com.google.common.collect.ImmutableList; import com.netflix.genie.common.dto.Application; import com.netflix.genie.common.dto.Cluster; import com.netflix.genie.common.dto.Command; import com.netflix.genie.common.dto.CommandStatus; import com.netflix.genie.common.dto.Job; import com.netflix.genie.common.dto.JobExecution; import com.netflix.genie.common.dto.JobMetadata; import com.netflix.genie.common.dto.JobRequest; import com.netflix.genie.common.dto.JobStatus; import com.netflix.genie.common.exceptions.GenieConflictException; import com.netflix.genie.common.exceptions.GenieException; import com.netflix.genie.common.exceptions.GeniePreconditionException; import com.netflix.genie.common.exceptions.GenieServerException; import com.netflix.genie.common.exceptions.GenieServerUnavailableException; import com.netflix.genie.common.exceptions.GenieUserLimitExceededException; import com.netflix.genie.core.jobs.JobConstants; import com.netflix.genie.core.properties.JobsProperties; import com.netflix.genie.core.properties.JobsUsersActiveLimitProperties; import com.netflix.genie.core.services.ApplicationService; import com.netflix.genie.core.services.ClusterLoadBalancer; import com.netflix.genie.core.services.ClusterService; import com.netflix.genie.core.services.CommandService; import com.netflix.genie.core.services.JobCoordinatorService; import com.netflix.genie.core.services.JobKillService; import com.netflix.genie.core.services.JobPersistenceService; import com.netflix.genie.core.services.JobSearchService; import com.netflix.genie.core.services.JobStateService; import com.netflix.spectator.api.Counter; import com.netflix.spectator.api.Id; import com.netflix.spectator.api.Registry; import com.netflix.spectator.api.Timer; import lombok.extern.slf4j.Slf4j; import org.hibernate.validator.constraints.NotBlank; import org.hibernate.validator.constraints.NotEmpty; import org.springframework.aop.TargetClassAware; import javax.validation.Valid; import javax.validation.constraints.NotNull; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; /** * Implementation of the JobCoordinatorService APIs. * * @author amsharma * @author tgianos * @since 3.0.0 */ @Slf4j public class JobCoordinatorServiceImpl implements JobCoordinatorService { private static final String NO_ID_FOUND = "No id found"; private static final String LOAD_BALANCER_CLASS_TAG = "class"; private static final String LOAD_BALANCER_STATUS_TAG = "status"; private static final String LOAD_BALANCER_STATUS_SUCCESS = "success"; private static final String LOAD_BALANCER_STATUS_NO_PREFERENCE = "no preference"; private static final String LOAD_BALANCER_STATUS_EXCEPTION = "exception"; private static final String LOAD_BALANCER_STATUS_INVALID = "invalid"; private final JobPersistenceService jobPersistenceService; private final JobKillService jobKillService; private final JobStateService jobStateService; private final ApplicationService applicationService; private final JobSearchService jobSearchService; private final ClusterService clusterService; private final CommandService commandService; private final List<ClusterLoadBalancer> clusterLoadBalancers; private final JobsProperties jobsProperties; private final String hostName; // For reuse in queries private final Set<CommandStatus> commandStatuses; // Metrics private final Registry registry; private final Timer coordinationTimer; private final Timer selectClusterTimer; private final Timer selectCommandTimer; private final Timer selectApplicationsTimer; private final Timer setJobEnvironmentTimer; private final Counter noClusterFoundCounter; private final Id loadBalancerId; /** * Constructor. * * @param jobPersistenceService implementation of job persistence service interface * @param jobKillService The job kill service to use * @param jobStateService The service where we report the job state and keep track of various metrics about * jobs currently running * @param jobsProperties The jobs properties to use * @param applicationService Implementation of application service interface * @param jobSearchService Implementation of job search service * @param clusterService Implementation of cluster service interface * @param commandService Implementation of command service interface * @param clusterLoadBalancers Implementations of the cluster load balancer interface in invocation order * @param registry The registry * @param hostName The name of the host this Genie instance is running on */ public JobCoordinatorServiceImpl( @NotNull final JobPersistenceService jobPersistenceService, @NotNull final JobKillService jobKillService, @NotNull final JobStateService jobStateService, @NotNull final JobsProperties jobsProperties, @NotNull final ApplicationService applicationService, @NotNull final JobSearchService jobSearchService, @NotNull final ClusterService clusterService, @NotNull final CommandService commandService, @NotNull @NotEmpty final List<ClusterLoadBalancer> clusterLoadBalancers, @NotNull final Registry registry, @NotBlank final String hostName ) { this.jobPersistenceService = jobPersistenceService; this.jobKillService = jobKillService; this.jobStateService = jobStateService; this.applicationService = applicationService; this.jobSearchService = jobSearchService; this.clusterService = clusterService; this.commandService = commandService; this.clusterLoadBalancers = clusterLoadBalancers; this.jobsProperties = jobsProperties; this.hostName = hostName; // We'll only care about active statuses this.commandStatuses = EnumSet.noneOf(CommandStatus.class); this.commandStatuses.add(CommandStatus.ACTIVE); // Metrics this.registry = registry; this.coordinationTimer = registry.timer("genie.jobs.coordination.timer"); this.selectClusterTimer = registry.timer("genie.jobs.submit.localRunner.selectCluster.timer"); this.selectCommandTimer = registry.timer("genie.jobs.submit.localRunner.selectCommand.timer"); this.selectApplicationsTimer = registry.timer("genie.jobs.submit.localRunner.selectApplications.timer"); this.setJobEnvironmentTimer = registry.timer("genie.jobs.submit.localRunner.setJobEnvironment.timer"); this.loadBalancerId = registry.createId("genie.jobs.submit.selectCluster.loadBalancer.counter"); this.noClusterFoundCounter = registry.counter("genie.jobs.submit.selectCluster.notFound.counter"); } /** * {@inheritDoc} */ @Override public String coordinateJob( @Valid @NotNull(message = "No job request provided. Unable to execute.") final JobRequest jobRequest, @Valid @NotNull(message = "No job metadata provided. Unable to execute.") final JobMetadata jobMetadata ) throws GenieException { final long coordinationStart = System.nanoTime(); final String jobId = jobRequest .getId() .orElseThrow(() -> new GenieServerException("Id of the jobRequest cannot be null")); JobStatus jobStatus = JobStatus.FAILED; try { log.info("Called to schedule job launch for job {}", jobId); // create the job object in the database with status INIT final Job.Builder jobBuilder = new Job.Builder( jobRequest.getName(), jobRequest.getUser(), jobRequest.getVersion(), jobRequest.getCommandArgs() ) .withId(jobId) .withTags(jobRequest.getTags()) .withStatus(JobStatus.INIT) .withStatusMsg("Job Accepted and in initialization phase."); jobRequest.getDescription().ifPresent(jobBuilder::withDescription); if (!jobRequest.isDisableLogArchival()) { jobBuilder.withArchiveLocation( this.jobsProperties.getLocations().getArchives() + JobConstants.FILE_PATH_DELIMITER + jobId + ".tar.gz" ); } final JobExecution jobExecution = new JobExecution.Builder( this.hostName ) .withId(jobId) .build(); // Log all the job initial job information this.jobPersistenceService.createJob(jobRequest, jobMetadata, jobBuilder.build(), jobExecution); jobStateService.init(jobId); //TODO: Combine the cluster and command selection into a single method/database query for efficiency // Resolve the cluster for the job request based on the tags specified final Cluster cluster = this.getCluster(jobRequest); // Resolve the command for the job request based on command tags and cluster chosen final Command command = this.getCommand(jobRequest, cluster); // Resolve the applications to use based on the command that was selected final List<Application> applications = this.getApplications(jobRequest, command); // Now that we have command how much memory should the job use? final int memory = jobRequest.getMemory() .orElse(command.getMemory().orElse(this.jobsProperties.getMemory().getDefaultJobMemory())); // Save all the runtime information this.setRuntimeEnvironment(jobId, cluster, command, applications, memory); final int maxJobMemory = this.jobsProperties.getMemory().getMaxJobMemory(); if (memory > maxJobMemory) { jobStatus = JobStatus.INVALID; throw new GeniePreconditionException( "Requested " + memory + " MB to run job which is more than the " + maxJobMemory + " MB allowed" ); } log.info("Checking if can run job {} from user {}", jobRequest.getId(), jobRequest.getUser()); final JobsUsersActiveLimitProperties activeLimit = this.jobsProperties.getUsers().getActiveLimit(); if (activeLimit.isEnabled()) { final long activeJobsLimit = activeLimit.getCount(); final long activeJobsCount = this.jobSearchService.getActiveJobCountForUser(jobRequest.getUser()); if (activeJobsCount >= activeJobsLimit) { throw GenieUserLimitExceededException.createForActiveJobsLimit( jobRequest.getUser(), activeJobsCount, activeJobsLimit); } } synchronized (this) { log.info("Checking if can run job {} on this node", jobRequest.getId()); final int maxSystemMemory = this.jobsProperties.getMemory().getMaxSystemMemory(); final int usedMemory = this.jobStateService.getUsedMemory(); if (usedMemory + memory <= maxSystemMemory) { log.info( "Job {} can run on this node as only {}/{} MB are used and requested {} MB", jobId, usedMemory, maxSystemMemory, memory ); // Tell the system a new job has been scheduled so any actions can be taken log.info("Publishing job scheduled event for job {}", jobId); jobStateService.schedule(jobId, jobRequest, cluster, command, applications, memory); return jobId; } else { throw new GenieServerUnavailableException( "Job " + jobId + " can't run on this node " + usedMemory + "/" + maxSystemMemory + " MB are used and requested " + memory + " MB" ); } } } catch (GenieConflictException e) { // Job has not been initiated so we don't have to call JobStateService.done() throw e; } catch (GenieException e) { // // Need to check if the job exists in the JobStateService // because this error can happen before the job is initiated. // if (jobStateService.jobExists(jobId)) { jobStateService.done(jobId); jobPersistenceService.updateJobStatus(jobId, jobStatus, e.getMessage()); } throw e; } catch (Exception e) { // // Need to check if the job exists in the JobStateService // because this error can happen before the job is initiated. // if (jobStateService.jobExists(jobId)) { jobStateService.done(jobId); jobPersistenceService.updateJobStatus(jobId, jobStatus, e.getMessage()); } throw new GenieServerException(e); } finally { this.coordinationTimer.record(System.nanoTime() - coordinationStart, TimeUnit.NANOSECONDS); } } /** * {@inheritDoc} */ @Override public void killJob(@NotBlank final String jobId, @NotBlank final String reason) throws GenieException { this.jobKillService.killJob(jobId, reason); } private void setRuntimeEnvironment( final String jobId, final Cluster cluster, final Command command, final List<Application> applications, final int memory ) throws GenieException { final long jobEnvironmentStart = System.nanoTime(); final String clusterId = cluster .getId() .orElseThrow(() -> new GenieServerException("Cluster has no id")); final String commandId = command .getId() .orElseThrow(() -> new GenieServerException("Command has no id")); try { this.jobPersistenceService.updateJobWithRuntimeEnvironment( jobId, clusterId, commandId, applications .stream() .map(Application::getId) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toList()), memory ); } finally { this.setJobEnvironmentTimer.record(System.nanoTime() - jobEnvironmentStart, TimeUnit.NANOSECONDS); } } private Cluster getCluster(final JobRequest jobRequest) throws GenieException { final long start = System.nanoTime(); try { log.info("Selecting cluster for job {}", jobRequest.getId().orElse(NO_ID_FOUND)); final List<Cluster> clusters = ImmutableList.copyOf( this.clusterService.chooseClusterForJobRequest(jobRequest) ); Cluster cluster = null; if (clusters.isEmpty()) { throw new GeniePreconditionException( "No cluster/command combination found for the given criteria. Unable to continue" ); } else if (clusters.size() == 1) { cluster = clusters.get(0); } else { for (final ClusterLoadBalancer loadBalancer : this.clusterLoadBalancers) { final String loadBalancerClass = ( loadBalancer instanceof TargetClassAware ? ((TargetClassAware) loadBalancer).getTargetClass() : loadBalancer.getClass() ).getCanonicalName(); try { final Cluster selectedCluster = loadBalancer.selectCluster(clusters, jobRequest); if (selectedCluster != null) { // Make sure the cluster existed in the original list of clusters if (clusters.contains(selectedCluster)) { log.debug( "Successfully selected cluster {} using load balancer {}", selectedCluster.getId().orElse(NO_ID_FOUND), loadBalancerClass ); this.registry.counter( this.loadBalancerId .withTag( LOAD_BALANCER_CLASS_TAG, loadBalancerClass ) .withTag( LOAD_BALANCER_STATUS_TAG, LOAD_BALANCER_STATUS_SUCCESS ) ).increment(); cluster = selectedCluster; break; } else { log.error( "Successfully selected cluster {} using load balancer {} but " + "it wasn't in original cluster list {}", selectedCluster.getId().orElse(NO_ID_FOUND), loadBalancerClass, clusters ); this.registry.counter( this.loadBalancerId .withTag( LOAD_BALANCER_CLASS_TAG, loadBalancerClass ) .withTag( LOAD_BALANCER_STATUS_TAG, LOAD_BALANCER_STATUS_INVALID ) ).increment(); } } else { this.registry.counter( this.loadBalancerId .withTag( LOAD_BALANCER_CLASS_TAG, loadBalancerClass ) .withTag( LOAD_BALANCER_STATUS_TAG, LOAD_BALANCER_STATUS_NO_PREFERENCE ) ).increment(); } } catch (final Exception e) { log.error("Cluster load balancer {} threw exception:", loadBalancer, e); this.registry.counter( this.loadBalancerId .withTag( LOAD_BALANCER_CLASS_TAG, loadBalancerClass ) .withTag( LOAD_BALANCER_STATUS_TAG, LOAD_BALANCER_STATUS_EXCEPTION ) ).increment(); } } // Make sure we found a cluster if (cluster == null) { this.noClusterFoundCounter.increment(); throw new GeniePreconditionException( "Unable to select a cluster from using any of the available load balancers." ); } } log.info( "Selected cluster {} for job {}", cluster.getId().orElse(NO_ID_FOUND), jobRequest.getId().orElse(NO_ID_FOUND) ); return cluster; } finally { this.selectClusterTimer.record(System.nanoTime() - start, TimeUnit.NANOSECONDS); } } private Command getCommand(final JobRequest jobRequest, final Cluster cluster) throws GenieException { final long start = System.nanoTime(); try { final String clusterId = cluster.getId().orElseThrow(() -> new GenieServerException("No cluster id.")); final String jobId = jobRequest.getId().orElseThrow(() -> new GenieServerException("No job id")); log.info("Selecting command attached to cluster {} for job {} ", clusterId, jobId); final Set<String> commandCriteria = jobRequest.getCommandCriteria(); // TODO: what happens if the get method throws an error we don't mark the job failed here for ( final Command command : this.clusterService.getCommandsForCluster(clusterId, this.commandStatuses) ) { if (command.getTags().containsAll(jobRequest.getCommandCriteria())) { log.info("Selected command {} for job {} ", command.getId(), jobRequest.getId()); return command; } } throw new GeniePreconditionException( "No command found matching all command criteria [" + commandCriteria + "] attached to cluster with id: " + cluster.getId().orElse(NO_ID_FOUND) ); } finally { this.selectCommandTimer.record(System.nanoTime() - start, TimeUnit.NANOSECONDS); } } private List<Application> getApplications( final JobRequest jobRequest, final Command command ) throws GenieException { final long start = System.nanoTime(); try { final String jobId = jobRequest.getId().orElseThrow(() -> new GenieServerException("No job Id")); final String commandId = command.getId().orElseThrow(() -> new GenieServerException("No command Id")); log.info("Selecting applications for job {} and command {}", jobId, commandId); // TODO: What do we do about application status? Should probably check here final List<Application> applications = new ArrayList<>(); if (jobRequest.getApplications().isEmpty()) { applications.addAll(this.commandService.getApplicationsForCommand(commandId)); } else { for (final String applicationId : jobRequest.getApplications()) { applications.add(this.applicationService.getApplication(applicationId)); } } log.info( "Selected applications {} for job {}", applications .stream() .map(Application::getId) .filter(Optional::isPresent) .map(Optional::get) .reduce((one, two) -> one + "," + two), jobRequest.getId().orElse(NO_ID_FOUND) ); return applications; } finally { this.selectApplicationsTimer.record(System.nanoTime() - start, TimeUnit.NANOSECONDS); } } }
apache-2.0
spring-projects/spring-data-examples
jpa/deferred/src/main/java/example/model/Customer644.java
624
package example.model; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; @Entity public class Customer644 { @Id @GeneratedValue(strategy = GenerationType.AUTO) private long id; private String firstName; private String lastName; protected Customer644() {} public Customer644(String firstName, String lastName) { this.firstName = firstName; this.lastName = lastName; } @Override public String toString() { return String.format("Customer644[id=%d, firstName='%s', lastName='%s']", id, firstName, lastName); } }
apache-2.0
Ericliu001/RosterManager
app/src/test/java/com/example/ericliu/rostermanager/ExampleUnitTest.java
411
package com.example.ericliu.rostermanager; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
apache-2.0
chanil1218/elasticsearch
src/main/java/org/elasticsearch/index/query/NestedFilterParser.java
7225
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.search.*; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.cache.filter.support.CacheKeyFilter; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.search.nested.BlockJoinQuery; import org.elasticsearch.index.search.nested.NonNestedDocsFilter; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; public class NestedFilterParser implements FilterParser { public static final String NAME = "nested"; @Inject public NestedFilterParser() { } @Override public String[] names() { return new String[]{NAME, Strings.toCamelCase(NAME)}; } @Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); Query query = null; Filter filter = null; float boost = 1.0f; String scope = null; String path = null; boolean cache = false; CacheKeyFilter.Key cacheKey = null; String filterName = null; // we need a late binding filter so we can inject a parent nested filter inner nested queries NestedQueryParser.LateBindingParentFilter currentParentFilterContext = NestedQueryParser.parentFilterContext.get(); NestedQueryParser.LateBindingParentFilter usAsParentFilter = new NestedQueryParser.LateBindingParentFilter(); NestedQueryParser.parentFilterContext.set(usAsParentFilter); try { String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("query".equals(currentFieldName)) { query = parseContext.parseInnerQuery(); } else if ("filter".equals(currentFieldName)) { filter = parseContext.parseInnerFilter(); } else { throw new QueryParsingException(parseContext.index(), "[nested] filter does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("path".equals(currentFieldName)) { path = parser.text(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("_scope".equals(currentFieldName)) { scope = parser.text(); } else if ("_name".equals(currentFieldName)) { filterName = parser.text(); } else if ("_cache".equals(currentFieldName)) { cache = parser.booleanValue(); } else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) { cacheKey = new CacheKeyFilter.Key(parser.text()); } else { throw new QueryParsingException(parseContext.index(), "[nested] filter does not support [" + currentFieldName + "]"); } } } if (query == null && filter == null) { throw new QueryParsingException(parseContext.index(), "[nested] requires either 'query' or 'filter' field"); } if (path == null) { throw new QueryParsingException(parseContext.index(), "[nested] requires 'path' field"); } if (filter != null) { query = new DeletionAwareConstantScoreQuery(filter); } query.setBoost(boost); MapperService.SmartNameObjectMapper mapper = parseContext.smartObjectMapper(path); if (mapper == null) { throw new QueryParsingException(parseContext.index(), "[nested] failed to find nested object under path [" + path + "]"); } ObjectMapper objectMapper = mapper.mapper(); if (objectMapper == null) { throw new QueryParsingException(parseContext.index(), "[nested] failed to find nested object under path [" + path + "]"); } if (!objectMapper.nested().isNested()) { throw new QueryParsingException(parseContext.index(), "[nested] nested object under path [" + path + "] is not of nested type"); } Filter childFilter = parseContext.cacheFilter(objectMapper.nestedTypeFilter(), null); usAsParentFilter.filter = childFilter; // wrap the child query to only work on the nested path type query = new FilteredQuery(query, childFilter); Filter parentFilter = currentParentFilterContext; if (parentFilter == null) { parentFilter = NonNestedDocsFilter.INSTANCE; // don't do special parent filtering, since we might have same nested mapping on two different types //if (mapper.hasDocMapper()) { // // filter based on the type... // parentFilter = mapper.docMapper().typeFilter(); //} parentFilter = parseContext.cacheFilter(parentFilter, null); } BlockJoinQuery joinQuery = new BlockJoinQuery(query, parentFilter, BlockJoinQuery.ScoreMode.None); if (scope != null) { SearchContext.current().addNestedQuery(scope, joinQuery); } Filter joinFilter = new QueryWrapperFilter(joinQuery); if (cache) { joinFilter = parseContext.cacheFilter(joinFilter, cacheKey); } if (filterName != null) { parseContext.addNamedFilter(filterName, joinFilter); } return joinFilter; } finally { // restore the thread local one... NestedQueryParser.parentFilterContext.set(currentParentFilterContext); } } }
apache-2.0
pacozaa/BoofCV
main/ip/src/boofcv/core/image/border/FactoryImageBorder.java
9861
/* * Copyright (c) 2011-2015, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.core.image.border; import boofcv.struct.image.*; /** * Contains functions that create classes which handle pixels outside the image border differently. * * @author Peter Abeles */ @SuppressWarnings({"unchecked"}) public class FactoryImageBorder { public static <T extends ImageSingleBand> ImageBorder<T> single(T image, BorderType borderType) { ImageBorder<T> ret = single((Class) image.getClass(), borderType); ret.setImage(image); return ret; } public static <T extends ImageInterleaved> ImageBorder<T> interleaved(T image, BorderType borderType) { ImageBorder<T> ret = interleaved((Class) image.getClass(), borderType); ret.setImage(image); return ret; } /** * Given an image type return the appropriate {@link ImageBorder} class type. * * @param imageType Type of image which is being processed. * @return The ImageBorder for processing the image type. */ public static Class<ImageBorder> lookupBorderClassType( Class<ImageSingleBand> imageType ) { if( (Class)imageType == ImageFloat32.class ) return (Class)ImageBorder1D_F32.class; if( (Class)imageType == ImageFloat64.class ) return (Class)ImageBorder1D_F64.class; else if( ImageInteger.class.isAssignableFrom(imageType) ) return (Class)ImageBorder1D_S32.class; else if( (Class)imageType == ImageSInt64.class ) return (Class)ImageBorder1D_S64.class; else throw new IllegalArgumentException("Unknown image type"); } public static <T extends ImageBase> ImageBorder<T> generic( BorderType borderType, ImageType<T> imageType ) { switch( imageType.getFamily() ) { case SINGLE_BAND: return single(imageType.getImageClass(),borderType); case MULTI_SPECTRAL: return single(imageType.getImageClass(),borderType); case INTERLEAVED: return interleaved(imageType.getImageClass(),borderType); default: throw new IllegalArgumentException("Unknown family"); } } public static <T extends ImageBase> ImageBorder<T> genericValue( double value, ImageType<T> imageType ) { switch( imageType.getFamily() ) { case SINGLE_BAND: return singleValue(imageType.getImageClass(), value); case MULTI_SPECTRAL: return singleValue(imageType.getImageClass(),value); case INTERLEAVED: return interleavedValue(imageType.getImageClass(),value); default: throw new IllegalArgumentException("Unknown family"); } } /** * Creates an instance of the requested algorithms for handling borders pixels on {@link ImageSingleBand}. If type * {@link BorderType#VALUE} is passed in then the value will be set to 0. Alternatively you could * use {@link #singleValue(Class, double)} instead. * * @param imageType Type of image being processed. * @param borderType Which border algorithm should it use. * @return The requested {@link ImageBorder). */ public static <T extends ImageSingleBand> ImageBorder<T> single(Class<T> imageType, BorderType borderType) { Class<?> borderClass; switch(borderType) { case SKIP: throw new IllegalArgumentException("Skip border can't be implemented here and has to be done " + "externally. Call this might be a bug. Instead pass in EXTENDED and manually skip over the " + "pixel in a loop some place."); // borderClass = BorderIndex1D_Exception.class; // break; case NORMALIZED: throw new IllegalArgumentException("Normalized can't be supported by this border interface"); case REFLECT: borderClass = BorderIndex1D_Reflect.class; break; case EXTENDED: borderClass = BorderIndex1D_Extend.class; break; case WRAP: borderClass = BorderIndex1D_Wrap.class; break; case VALUE: return FactoryImageBorder.singleValue(imageType, 0); default: throw new IllegalArgumentException("Border type not supported: "+borderType); } if( imageType == ImageFloat32.class ) return (ImageBorder<T>)new ImageBorder1D_F32(borderClass); if( imageType == ImageFloat64.class ) return (ImageBorder<T>)new ImageBorder1D_F64(borderClass); else if( ImageInteger.class.isAssignableFrom(imageType) ) return (ImageBorder<T>)new ImageBorder1D_S32((Class)borderClass); else if( imageType == ImageSInt64.class ) return (ImageBorder<T>)new ImageBorder1D_S64(borderClass); else throw new IllegalArgumentException("Unknown image type: "+imageType.getSimpleName()); } /** * Creates an instance of the requested algorithms for handling borders pixels on {@link ImageInterleaved}. If type * {@link BorderType#VALUE} is passed in then the value will be set to 0. Alternatively you could * use {@link #singleValue(Class, double)} instead. * * @param imageType Type of image being processed. * @param borderType Which border algorithm should it use. * @return The requested {@link ImageBorder). */ public static <T extends ImageInterleaved> ImageBorder<T> interleaved(Class<T> imageType, BorderType borderType) { Class<?> borderClass; switch(borderType) { case SKIP: throw new IllegalArgumentException("Skip border can't be implemented here and has to be done " + "externally. Call this might be a bug. Instead pass in EXTENDED and manually skip over the " + "pixel in a loop some place."); // borderClass = BorderIndex1D_Exception.class; // break; case NORMALIZED: throw new IllegalArgumentException("Normalized can't be supported by this border interface"); case REFLECT: borderClass = BorderIndex1D_Reflect.class; break; case EXTENDED: borderClass = BorderIndex1D_Extend.class; break; case WRAP: borderClass = BorderIndex1D_Wrap.class; break; case VALUE: return FactoryImageBorder.interleavedValue(imageType, 0); default: throw new IllegalArgumentException("Border type not supported: "+borderType); } if( imageType == InterleavedF32.class ) return (ImageBorder<T>)new ImageBorder1D_IL_F32(borderClass); else if( imageType == InterleavedF64.class ) return (ImageBorder<T>)new ImageBorder1D_IL_F64(borderClass); else if( InterleavedInteger.class.isAssignableFrom(imageType) ) return (ImageBorder<T>)new ImageBorder1D_IL_S32(borderClass); else if( imageType == InterleavedS64.class ) return (ImageBorder<T>)new ImageBorder1D_IL_S64(borderClass); else throw new IllegalArgumentException("Unknown image type: "+imageType.getSimpleName()); } /** * Creates an {@link ImageBorder} that returns the specified value always. * * @see ImageBorderValue * * @param image The image the border is being created for. * @param value The value which will be returned. * @return An {@link ImageBorder} */ public static <T extends ImageSingleBand> ImageBorder<T> singleValue(T image, double value) { ImageBorder border = singleValue(image.getClass(), value); border.setImage(image); return border; } /** * Creates an {@link ImageBorder} that returns the specified value always. * * @see ImageBorderValue * * @param imageType The image type the border is being created for. * @param value The value which will be returned. * @return An {@link ImageBorder} */ public static <T extends ImageSingleBand> ImageBorder<T> singleValue(Class<T> imageType, double value) { if( imageType == ImageFloat32.class ) { return (ImageBorder<T>)new ImageBorderValue.Value_F32((float)value); } else if( imageType == ImageFloat64.class ) { return (ImageBorder<T>)new ImageBorderValue.Value_F64(value); } else if( ImageInteger.class.isAssignableFrom(imageType) ) { return (ImageBorder<T>)new ImageBorderValue.Value_I((int)value); } else { throw new IllegalArgumentException("Unknown image type: "+imageType.getSimpleName()); } } /** * Creates an {@link ImageBorder} that returns the specified value always. * * @see ImageBorderValue * * @param image The image the border is being created for. * @param value The value which will be returned. * @return An {@link ImageBorder} */ public static <T extends ImageInterleaved> ImageBorder<T> interleavedValue(T image, double value) { ImageBorder border = interleavedValue(image.getClass(), value); border.setImage(image); return border; } /** * Creates an {@link ImageBorder} that returns the specified value always. * * @see ImageBorderValue * * @param imageType The image type the border is being created for. * @param value The value which will be returned. * @return An {@link ImageBorder} */ public static <T extends ImageInterleaved> ImageBorder<T> interleavedValue(Class<T> imageType, double value) { if( imageType == InterleavedF32.class ) { return (ImageBorder<T>) new ImageBorderValue.Value_IL_F32((float) value); } else if( imageType == InterleavedF64.class ) { return (ImageBorder<T>)new ImageBorderValue.Value_IL_F64(value); } else if( InterleavedInteger.class.isAssignableFrom(imageType) ) { return (ImageBorder<T>)new ImageBorderValue.Value_IL_S32((int)value); } else if( imageType == InterleavedS64.class ) { return (ImageBorder<T>)new ImageBorderValue.Value_IL_S64((long)value); } else { throw new IllegalArgumentException("Unknown image type: "+imageType.getSimpleName()); } } }
apache-2.0
lxxbluesea/QingGong
app/src/main/java/com/example/qinggong/model/QingGongEntity.java
717
package com.example.qinggong.model; /** * Created by ZJGJK03 on 2014/11/25. */ public class QingGongEntity { public QingGongEntity() { } public QingGongEntity(String age, String month, String sex) { this.age = age; this.month = month; this.sex = sex; } public String age,month,sex; public String getAge() { return age; } public void setAge(String age) { this.age = age; } public String getMonth() { return month; } public void setMonth(String month) { this.month = month; } public String getSex() { return sex; } public void setSex(String sex) { this.sex = sex; } }
apache-2.0
xasx/camunda-bpm-platform
engine/src/test/java/org/camunda/bpm/engine/test/api/authorization/externaltask/GetErrorDetailsAuthorizationTest.java
5028
/* * Copyright © 2013-2018 camunda services GmbH and various authors (info@camunda.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.test.api.authorization.externaltask; import org.camunda.bpm.engine.authorization.Permissions; import org.camunda.bpm.engine.authorization.Resources; import org.camunda.bpm.engine.externaltask.LockedExternalTask; import org.camunda.bpm.engine.runtime.ProcessInstance; import org.camunda.bpm.engine.test.Deployment; import org.camunda.bpm.engine.test.ProcessEngineRule; import org.camunda.bpm.engine.test.api.authorization.util.AuthorizationScenario; import org.camunda.bpm.engine.test.api.authorization.util.AuthorizationTestRule; import org.camunda.bpm.engine.test.util.ProvidedProcessEngineRule; import org.junit.*; import org.junit.rules.RuleChain; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Collection; import java.util.List; import static org.camunda.bpm.engine.test.api.authorization.util.AuthorizationScenario.scenario; import static org.camunda.bpm.engine.test.api.authorization.util.AuthorizationSpec.grant; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.notNullValue; import static org.junit.Assert.assertThat; /** * Please note that if you want to reuse Rule and other fields you should create abstract class * and pack it there. * * @see HandleExternalTaskAuthorizationTest * * @author Askar Akhmerov */ @RunWith(Parameterized.class) public class GetErrorDetailsAuthorizationTest { private static final String ERROR_DETAILS = "theDetails"; protected String deploymentId; private String currentDetails; public ProcessEngineRule engineRule = new ProvidedProcessEngineRule(); public AuthorizationTestRule authRule = new AuthorizationTestRule(engineRule); @Rule public RuleChain chain = RuleChain.outerRule(engineRule).around(authRule); @Parameterized.Parameter public AuthorizationScenario scenario; @Parameterized.Parameters(name = "Scenario {index}") public static Collection<AuthorizationScenario[]> scenarios() { return AuthorizationTestRule.asParameters( scenario() .withoutAuthorizations() .failsDueToRequired( grant(Resources.PROCESS_INSTANCE, "processInstanceId", "userId", Permissions.READ), grant(Resources.PROCESS_DEFINITION, "oneExternalTaskProcess", "userId", Permissions.READ_INSTANCE)), scenario() .withAuthorizations( grant(Resources.PROCESS_INSTANCE, "processInstanceId", "userId", Permissions.READ)) .succeeds(), scenario() .withAuthorizations( grant(Resources.PROCESS_INSTANCE, "*", "userId", Permissions.READ)) .succeeds(), scenario() .withAuthorizations( grant(Resources.PROCESS_DEFINITION, "processDefinitionKey", "userId", Permissions.READ_INSTANCE)) .succeeds(), scenario() .withAuthorizations( grant(Resources.PROCESS_DEFINITION, "*", "userId", Permissions.READ_INSTANCE)) .succeeds() ); } @Before public void setUp() { authRule.createUserAndGroup("userId", "groupId"); } @After public void tearDown() { authRule.deleteUsersAndGroups(); } @Test @Deployment(resources = "org/camunda/bpm/engine/test/api/externaltask/oneExternalTaskProcess.bpmn20.xml") public void testCompleteExternalTask() { // given ProcessInstance processInstance = engineRule.getRuntimeService().startProcessInstanceByKey("oneExternalTaskProcess"); List<LockedExternalTask> tasks = engineRule.getExternalTaskService() .fetchAndLock(5, "workerId") .topic("externalTaskTopic", 5000L) .execute(); LockedExternalTask task = tasks.get(0); //preconditions method engineRule.getExternalTaskService().handleFailure(task.getId(),task.getWorkerId(),"anError",ERROR_DETAILS,1,1000L); // when authRule .init(scenario) .withUser("userId") .bindResource("processInstanceId", processInstance.getId()) .bindResource("processDefinitionKey", "oneExternalTaskProcess") .start(); //execution method currentDetails = engineRule.getExternalTaskService().getExternalTaskErrorDetails(task.getId()); // then if (authRule.assertScenario(scenario)) { //assertion method assertThat(currentDetails,is(ERROR_DETAILS)); } } }
apache-2.0
myshzzx/mlib
core/src/test/java/mysh/codegen/CodeUtilTest.java
4645
package mysh.codegen; import mysh.util.Encodings; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import java.nio.file.Files; import java.nio.file.Paths; import static mysh.codegen.CodeUtil.camel2underline; import static mysh.codegen.CodeUtil.field2MethodSign; import static mysh.codegen.CodeUtil.isLowerCase; import static mysh.codegen.CodeUtil.isUpperCase; import static mysh.codegen.CodeUtil.method2FieldSign; import static mysh.codegen.CodeUtil.toUpperCase; import static mysh.codegen.CodeUtil.underline2FieldCamel; import static mysh.codegen.CodeUtil.underline2camel; /** * @author Mysh * @since 2014/4/17 16:13 */ public class CodeUtilTest { @Test @Disabled public void genPropCopy() throws Exception { byte[] bytes = Files.readAllBytes(Paths.get("e:/temp/code.txt")); String fieldsDefine = Encodings.isUTF8Bytes(bytes) ? new String(bytes, Encodings.UTF_8) : new String(bytes, Encodings.GBK); String code = CodeUtil.genPropCopy("status", "bean", fieldsDefine); System.out.println(code); } @Test public void testUnderline2camel() { Assertions.assertEquals("MyshZzx", underline2camel("mysh_zzx")); Assertions.assertEquals("MyshZZX", underline2camel("mysh_z_z_x")); Assertions.assertEquals("Mysh", underline2camel("MYSH")); Assertions.assertEquals("MZZx", underline2camel("M_Z_ZX")); } @Test public void testUnderline2FieldCamel() { Assertions.assertEquals("myshZzx", underline2FieldCamel("mysh_zzx")); Assertions.assertEquals("myshZZX", underline2FieldCamel("mysh_z_z_x")); Assertions.assertEquals("mysh", underline2FieldCamel("MYSH")); Assertions.assertEquals("mZZx", underline2FieldCamel("M_Z_ZX")); } @Test public void testCamel2underline() { Assertions.assertEquals("MYSH_ZZX", camel2underline("MyshZzx")); Assertions.assertEquals("MYSH_Z_Z_X", camel2underline("MyshZZX")); Assertions.assertEquals("M_Y_S_H_ZZX", camel2underline("MYSHZzx")); Assertions.assertEquals("M_YSH_Z_ZX", camel2underline("MYshZZx")); Assertions.assertEquals("MYSH_Z_ZX", camel2underline("myshZZx")); Assertions.assertEquals("MY_Z_ZX", camel2underline("myZZx")); Assertions.assertEquals("M_Z_ZX", camel2underline("mZZx")); Assertions.assertEquals("M_Z_X", camel2underline("mZX")); Assertions.assertEquals("MY_Z", camel2underline("myZ")); } @Test public void testIsUpperCase() { Assertions.assertTrue(isUpperCase('A')); Assertions.assertTrue(isUpperCase('R')); Assertions.assertTrue(isUpperCase('Z')); Assertions.assertFalse(isUpperCase('a')); Assertions.assertFalse(isUpperCase('k')); Assertions.assertFalse(isUpperCase('z')); Assertions.assertFalse(isUpperCase('\r')); } @Test public void testIsLowerCase() { Assertions.assertTrue(isLowerCase('a')); Assertions.assertTrue(isLowerCase('d')); Assertions.assertTrue(isLowerCase('z')); Assertions.assertFalse(isLowerCase('A')); Assertions.assertFalse(isLowerCase('R')); Assertions.assertFalse(isLowerCase('Z')); Assertions.assertFalse(isUpperCase('\r')); } @Test public void testToUpperCase() { Assertions.assertEquals('A', toUpperCase('a')); Assertions.assertEquals('H', toUpperCase('h')); Assertions.assertEquals('Z', toUpperCase('z')); Assertions.assertEquals('B', toUpperCase('B')); Assertions.assertEquals('\n', toUpperCase('\n')); } @Test public void testMethod2FieldSign() throws Exception { Assertions.assertEquals("field", method2FieldSign("Field")); Assertions.assertEquals("array", method2FieldSign("Array")); Assertions.assertEquals("zzx", method2FieldSign("Zzx")); Assertions.assertEquals("field", method2FieldSign("field")); Assertions.assertEquals("array", method2FieldSign("array")); Assertions.assertEquals("zzx", method2FieldSign("zzx")); } @Test public void testField2MethodSign() throws Exception { Assertions.assertEquals("Method", field2MethodSign("method")); Assertions.assertEquals("Array", field2MethodSign("array")); Assertions.assertEquals("Zzx", field2MethodSign("zzx")); Assertions.assertEquals("Method", field2MethodSign("Method")); Assertions.assertEquals("Array", field2MethodSign("Array")); Assertions.assertEquals("Zzx", field2MethodSign("Zzx")); } }
apache-2.0
uglycustard/buildergenerator
src/test/java/uk/co/buildergenerator/testmodel/Builder.java
91
package uk.co.buildergenerator.testmodel; public interface Builder<T> { T build(); }
apache-2.0
maugomez77/RxJava
src/main/java/rx/internal/operators/CachedObservable.java
16423
/** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.internal.operators; import java.util.concurrent.atomic.*; import rx.*; import rx.exceptions.*; import rx.internal.util.LinkedArrayList; import rx.subscriptions.SerialSubscription; /** * An observable which auto-connects to another observable, caches the elements * from that observable but allows terminating the connection and completing the cache. * * @param <T> the source element type */ public final class CachedObservable<T> extends Observable<T> { /** The cache and replay state. */ private CacheState<T> state; /** * Creates a cached Observable with a default capacity hint of 16. * @param <T> t * @param source the source Observable to cache * @return the CachedObservable instance */ public static <T> CachedObservable<T> from(Observable<? extends T> source) { return from(source, 16); } /** * Creates a cached Observable with the given capacity hint. * @param <T> t * @param source the source Observable to cache * @param capacityHint the hint for the internal buffer size * @return the CachedObservable instance */ public static <T> CachedObservable<T> from(Observable<? extends T> source, int capacityHint) { if (capacityHint < 1) { throw new IllegalArgumentException("capacityHint > 0 required"); } CacheState<T> state = new CacheState<T>(source, capacityHint); CachedSubscribe<T> onSubscribe = new CachedSubscribe<T>(state); return new CachedObservable<T>(onSubscribe, state); } /** * Private constructor because state needs to be shared between the Observable body and * the onSubscribe function. * @param onSubscribe * @param state */ private CachedObservable(OnSubscribe<T> onSubscribe, CacheState<T> state) { super(onSubscribe); this.state = state; } /** * Check if this cached observable is connected to its source. * @return true if already connected */ /* public */boolean isConnected() { return state.isConnected; } /** * Returns true if there are observers subscribed to this observable. * @return */ /* public */ boolean hasObservers() { return state.producers.length != 0; } /** * Returns the number of events currently cached. * @return */ /* public */ int cachedEventCount() { return state.size(); } /** * Contains the active child producers and the values to replay. * * @param <T> */ static final class CacheState<T> extends LinkedArrayList implements Observer<T> { /** The source observable to connect to. */ final Observable<? extends T> source; /** Holds onto the subscriber connected to source. */ final SerialSubscription connection; /** Guarded by connection (not this). */ volatile ReplayProducer<?>[] producers; /** The default empty array of producers. */ static final ReplayProducer<?>[] EMPTY = new ReplayProducer<?>[0]; final NotificationLite<T> nl; /** Set to true after connection. */ volatile boolean isConnected; /** * Indicates that the source has completed emitting values or the * Observable was forcefully terminated. */ boolean sourceDone; public CacheState(Observable<? extends T> source, int capacityHint) { super(capacityHint); this.source = source; this.producers = EMPTY; this.nl = NotificationLite.instance(); this.connection = new SerialSubscription(); } /** * Adds a ReplayProducer to the producers array atomically. * @param p */ public void addProducer(ReplayProducer<T> p) { // guarding by connection to save on allocating another object // thus there are two distinct locks guarding the value-addition and child come-and-go synchronized (connection) { ReplayProducer<?>[] a = producers; int n = a.length; ReplayProducer<?>[] b = new ReplayProducer<?>[n + 1]; System.arraycopy(a, 0, b, 0, n); b[n] = p; producers = b; } } /** * Removes the ReplayProducer (if present) from the producers array atomically. * @param p */ public void removeProducer(ReplayProducer<T> p) { synchronized (connection) { ReplayProducer<?>[] a = producers; int n = a.length; int j = -1; for (int i = 0; i < n; i++) { if (a[i].equals(p)) { j = i; break; } } if (j < 0) { return; } if (n == 1) { producers = EMPTY; return; } ReplayProducer<?>[] b = new ReplayProducer<?>[n - 1]; System.arraycopy(a, 0, b, 0, j); System.arraycopy(a, j + 1, b, j, n - j - 1); producers = b; } } /** * Connects the cache to the source. * Make sure this is called only once. */ public void connect() { Subscriber<T> subscriber = new Subscriber<T>() { @Override public void onNext(T t) { CacheState.this.onNext(t); } @Override public void onError(Throwable e) { CacheState.this.onError(e); } @Override public void onCompleted() { CacheState.this.onCompleted(); } }; connection.set(subscriber); source.unsafeSubscribe(subscriber); isConnected = true; } @Override public void onNext(T t) { if (!sourceDone) { Object o = nl.next(t); add(o); dispatch(); } } @Override public void onError(Throwable e) { if (!sourceDone) { sourceDone = true; Object o = nl.error(e); add(o); connection.unsubscribe(); dispatch(); } } @Override public void onCompleted() { if (!sourceDone) { sourceDone = true; Object o = nl.completed(); add(o); connection.unsubscribe(); dispatch(); } } /** * Signals all known children there is work to do. */ void dispatch() { ReplayProducer<?>[] a = producers; for (ReplayProducer<?> rp : a) { rp.replay(); } } } /** * Manages the subscription of child subscribers by setting up a replay producer and * performs auto-connection of the very first subscription. * @param <T> the value type emitted */ static final class CachedSubscribe<T> extends AtomicBoolean implements OnSubscribe<T> { /** */ private static final long serialVersionUID = -2817751667698696782L; final CacheState<T> state; public CachedSubscribe(CacheState<T> state) { this.state = state; } @Override public void call(Subscriber<? super T> t) { // we can connect first because we replay everything anyway ReplayProducer<T> rp = new ReplayProducer<T>(t, state); state.addProducer(rp); t.add(rp); t.setProducer(rp); // we ensure a single connection here to save an instance field of AtomicBoolean in state. if (!get() && compareAndSet(false, true)) { state.connect(); } // no need to call rp.replay() here because the very first request will trigger it anyway } } /** * Keeps track of the current request amount and the replay position for a child Subscriber. * * @param <T> t */ static final class ReplayProducer<T> extends AtomicLong implements Producer, Subscription { /** */ private static final long serialVersionUID = -2557562030197141021L; /** The actual child subscriber. */ final Subscriber<? super T> child; /** The cache state object. */ final CacheState<T> state; /** * Contains the reference to the buffer segment in replay. * Accessed after reading state.size() and when emitting == true. */ Object[] currentBuffer; /** * Contains the index into the currentBuffer where the next value is expected. * Accessed after reading state.size() and when emitting == true. */ int currentIndexInBuffer; /** * Contains the absolute index up until the values have been replayed so far. */ int index; /** Indicates there is a replay going on; guarded by this. */ boolean emitting; /** Indicates there were some state changes/replay attempts; guarded by this. */ boolean missed; public ReplayProducer(Subscriber<? super T> child, CacheState<T> state) { this.child = child; this.state = state; } @Override public void request(long n) { for (;;) { long r = get(); if (r < 0) { return; } long u = r + n; if (u < 0) { u = Long.MAX_VALUE; } if (compareAndSet(r, u)) { replay(); return; } } } /** * Updates the request count to reflect values have been produced. * @param n * @return */ public long produced(long n) { return addAndGet(-n); } @Override public boolean isUnsubscribed() { return get() < 0; } @Override public void unsubscribe() { long r = get(); if (r >= 0) { r = getAndSet(-1L); // unsubscribed state is negative if (r >= 0) { state.removeProducer(this); } } } /** * Continue replaying available values if there are requests for them. */ public void replay() { // make sure there is only a single thread emitting synchronized (this) { if (emitting) { missed = true; return; } emitting = true; } boolean skipFinal = false; try { final NotificationLite<T> nl = state.nl; final Subscriber<? super T> child = this.child; for (;;) { long r = get(); if (r < 0L) { skipFinal = true; return; } // read the size, if it is non-zero, we can safely read the head and // read values up to the given absolute index int s = state.size(); if (s != 0) { Object[] b = currentBuffer; // latch onto the very first buffer now that it is available. if (b == null) { b = state.head(); currentBuffer = b; } final int n = b.length - 1; int j = index; int k = currentIndexInBuffer; // eagerly emit any terminal event if (r == 0) { Object o = b[k]; if (nl.isCompleted(o)) { child.onCompleted(); skipFinal = true; unsubscribe(); return; } else if (nl.isError(o)) { child.onError(nl.getError(o)); skipFinal = true; unsubscribe(); return; } } else if (r > 0) { int valuesProduced = 0; while (j < s && r > 0) { if (child.isUnsubscribed()) { skipFinal = true; return; } if (k == n) { b = (Object[])b[n]; k = 0; } Object o = b[k]; try { if (nl.accept(child, o)) { skipFinal = true; unsubscribe(); return; } } catch (Throwable err) { Exceptions.throwIfFatal(err); skipFinal = true; unsubscribe(); if (!nl.isError(o) && !nl.isCompleted(o)) { child.onError(OnErrorThrowable.addValueAsLastCause(err, nl.getValue(o))); } return; } k++; j++; r--; valuesProduced++; } if (child.isUnsubscribed()) { skipFinal = true; return; } index = j; currentIndexInBuffer = k; currentBuffer = b; produced(valuesProduced); } } synchronized (this) { if (!missed) { emitting = false; skipFinal = true; return; } missed = false; } } } finally { if (!skipFinal) { synchronized (this) { emitting = false; } } } } } }
apache-2.0
jexp/idea2
platform/lang-impl/src/com/intellij/codeInsight/lookup/LookupValueWithUIHint.java
994
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.lookup; import java.awt.*; /** * Created by IntelliJ IDEA. * User: maxim * Date: 10.12.2004 * Time: 13:25:56 * To change this template use File | Settings | File Templates. * @deprecated use InsertHandler */ public interface LookupValueWithUIHint extends PresentableLookupValue { String getTypeHint(); Color getColorHint(); boolean isBold(); }
apache-2.0
yoking-zhang/demo
src/main/java/org/yoking/pattern/builder/BuilderTest.java
311
package org.yoking.pattern.builder; import java.util.Date; import org.yoking.pattern.builder.Car.Builder; public class BuilderTest { public static void main(String[] args) { Car c = new Builder().id(123l).color(Color.WHITE).manufacturer("SAIC-GM").date(new Date()).build(); System.out.println(c); } }
apache-2.0
gnewton/tuapait
src/main/java/ca/gnewton/tuapait/CacheType.java
90
package ca.gnewton.tuapait; public enum CacheType{ CACHE, STORE, IMMUTABLE_STORE }
apache-2.0
bhavanki/moving-hadoop-to-the-cloud
ch12/src/test/java/com/mh2c/ApacheLogRecordTest.java
2036
/* Copyright 2017 William A. Havanki, Jr. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.mh2c; import static org.junit.Assert.assertEquals; import java.time.ZonedDateTime; import java.time.temporal.ChronoUnit; import org.junit.Before; import org.junit.Test; public class ApacheLogRecordTest { private ZonedDateTime now; private ApacheLogRecord record; @Before public void setUp() { now = ZonedDateTime.now().truncatedTo(ChronoUnit.SECONDS); record = new ApacheLogRecord("203.0.113.101", now, "GET", "/index.html", 200, 123L, "MyBrowser"); } @Test public void testParsing() { String logLine = "203.0.113.101 - - [" + ApacheLogRecord.TIMESTAMP_FORMATTER.format(now) + "] \"GET /index.html HTTP/1.0\" 200 123 \"http://example.com\" \"MyBrowser\""; record = new ApacheLogRecord(logLine); assertEquals("203.0.113.101", record.getIpAddress()); assertEquals(now.toInstant(), record.getDateTime().toInstant()); assertEquals("GET", record.getMethod()); assertEquals("/index.html", record.getResource()); assertEquals(200, record.getStatus()); assertEquals(123L, record.getBytes()); assertEquals("MyBrowser", record.getUserAgent()); } @Test public void testWithIpAddress() { record = record.withIpAddress("203.0.113.102"); assertEquals("203.0.113.102", record.getIpAddress()); } @Test public void testWithUserAgent() { record = record.withUserAgent("Opera"); assertEquals("Opera", record.getUserAgent()); } }
apache-2.0
nao20010128nao/show-java
app/src/main/java/org/jf/dexlib2/iface/instruction/formats/Instruction21lh.java
1754
/* * Copyright 2012, Google Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.jf.dexlib2.iface.instruction.formats; import org.jf.dexlib2.iface.instruction.*; public interface Instruction21lh extends OneRegisterInstruction, LongHatLiteralInstruction { }
apache-2.0
inter6/smtp-sender
src/main/java/com/inter6/mail/job/thread/JobExecuteException.java
284
package com.inter6.mail.job.thread; public class JobExecuteException extends RuntimeException { private static final long serialVersionUID = -8197551587299842462L; public JobExecuteException(String message, Throwable cause) { super(message, cause); } }
apache-2.0
jqno/equalsverifier
equalsverifier-test-core/src/test/java/nl/jqno/equalsverifier/integration/extra_features/GetClassInEqualityComparisonTest.java
1604
package nl.jqno.equalsverifier.integration.extra_features; import nl.jqno.equalsverifier.EqualsVerifier; import org.junit.jupiter.api.Test; public class GetClassInEqualityComparisonTest { @Test public void succeed_whenGetClassIsPartOfEqualityComparison_givenAnAbstractSuperclassAndUsingGetClassIsUsed() { EqualsVerifier.forClass(Identifiable.class).usingGetClass().verify(); } @Test public void succeed_whenGetClassIsPartOfEqualityComparison_givenAConcreteImplementationAndUsingGetClassIsUsed() { EqualsVerifier.forClass(Person.class).usingGetClass().verify(); } @Test public void succeed_whenGetClassIsPartOfEqualityComparison_givenAnotherConcreteImplementationAndUsingGetClassIsUsed() { EqualsVerifier.forClass(Account.class).usingGetClass().verify(); } abstract static class Identifiable { private final int id; public Identifiable(int id) { this.id = id; } @Override public final boolean equals(Object obj) { if (!(obj instanceof Identifiable)) { return false; } Identifiable other = (Identifiable) obj; return id == other.id && getClass() == other.getClass(); } @Override public final int hashCode() { return id; } } static class Person extends Identifiable { public Person(int id) { super(id); } } static class Account extends Identifiable { public Account(int id) { super(id); } } }
apache-2.0
firzhan/webapp_logging
src/main/java/org/wso2/carbon/bpmn/rest/api/runtime/variable/DoubleRestVariableConverter.java
1720
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.bpmn.rest.api.runtime.variable; import org.activiti.engine.ActivitiIllegalArgumentException; import org.wso2.carbon.bpmn.rest.api.engine.variable.RestVariable; /** * @author Frederik Heremans */ public class DoubleRestVariableConverter implements RestVariableConverter { @Override public String getRestTypeName() { return "double"; } @Override public Class< ? > getVariableType() { return Double.class; } @Override public Object getVariableValue(RestVariable result) { if(result.getValue() != null) { if(!(result.getValue() instanceof Number)) { throw new ActivitiIllegalArgumentException("Converter can only convert doubles"); } return ((Number) result.getValue()).doubleValue(); } return null; } @Override public void convertVariableValue(Object variableValue, RestVariable result) { if(variableValue != null) { if(!(variableValue instanceof Double)) { throw new ActivitiIllegalArgumentException("Converter can only convert doubles"); } result.setValue(variableValue); } else { result.setValue(null); } } }
apache-2.0
brianchen2012/syncope
core/src/main/java/org/apache/syncope/core/persistence/validation/attrvalue/AlwaysTrueValidator.java
1569
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.core.persistence.validation.attrvalue; import org.apache.syncope.core.persistence.beans.AbstractAttrValue; import org.apache.syncope.core.persistence.beans.AbstractSchema; public class AlwaysTrueValidator extends AbstractValidator { private static final long serialVersionUID = 872107345555773183L; public AlwaysTrueValidator(final AbstractSchema schema) { super(schema); } @Override protected void doValidate(final AbstractAttrValue attributeValue) throws InvalidAttrValueException { Boolean value = attributeValue.getValue(); if (!value) { String error = "This attribute must be set to \"true\""; throw new InvalidAttrValueException(error); } } }
apache-2.0
Activiti/Activiti
activiti-core/activiti-api-impl/activiti-api-process-model-impl/src/main/java/org/activiti/api/runtime/model/impl/ObjectValueToStringConverter.java
1933
/* * Copyright 2010-2020 Alfresco Software, Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.api.runtime.model.impl; import static org.activiti.api.runtime.model.impl.ProcessVariablesMapTypeRegistry.OBJECT_TYPE_KEY; import java.util.Map; import org.springframework.core.convert.converter.Converter; import com.fasterxml.jackson.databind.ObjectMapper; @ProcessVariableTypeConverter public class ObjectValueToStringConverter implements Converter<ObjectValue, String> { private static final String CLASS = "@class"; private final ObjectMapper objectMapper; public ObjectValueToStringConverter(ObjectMapper objectMapper) { this.objectMapper = objectMapper; } @SuppressWarnings("unchecked") @Override public String convert(ObjectValue source) { try { Map<String, Object> value = objectMapper.convertValue(source, Map.class); if (Map.class.isInstance(value.get(OBJECT_TYPE_KEY))) { Map<String, Object> object = objectMapper.convertValue(source.getObject(), Map.class); if (object.containsKey(CLASS)) { Map.class.cast(value.get(OBJECT_TYPE_KEY)).put(CLASS, object.get(CLASS)); } } return objectMapper.writeValueAsString(value); } catch (Exception cause) { throw new RuntimeException(cause); } } }
apache-2.0
arhimondr/presto
presto-main/src/main/java/com/facebook/presto/sql/planner/optimizations/StreamPropertyDerivations.java
34920
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner.optimizations; import com.facebook.presto.Session; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.TableLayout; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.LocalProperty; import com.facebook.presto.spi.plan.AggregationNode; import com.facebook.presto.spi.plan.DistinctLimitNode; import com.facebook.presto.spi.plan.FilterNode; import com.facebook.presto.spi.plan.LimitNode; import com.facebook.presto.spi.plan.MarkDistinctNode; import com.facebook.presto.spi.plan.PlanNode; import com.facebook.presto.spi.plan.ProjectNode; import com.facebook.presto.spi.plan.TableScanNode; import com.facebook.presto.spi.plan.TopNNode; import com.facebook.presto.spi.plan.UnionNode; import com.facebook.presto.spi.plan.ValuesNode; import com.facebook.presto.spi.relation.RowExpression; import com.facebook.presto.spi.relation.VariableReferenceExpression; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.planner.TypeProvider; import com.facebook.presto.sql.planner.plan.ApplyNode; import com.facebook.presto.sql.planner.plan.AssignUniqueId; import com.facebook.presto.sql.planner.plan.DeleteNode; import com.facebook.presto.sql.planner.plan.EnforceSingleRowNode; import com.facebook.presto.sql.planner.plan.ExchangeNode; import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode; import com.facebook.presto.sql.planner.plan.GroupIdNode; import com.facebook.presto.sql.planner.plan.IndexJoinNode; import com.facebook.presto.sql.planner.plan.IndexSourceNode; import com.facebook.presto.sql.planner.plan.InternalPlanVisitor; import com.facebook.presto.sql.planner.plan.JoinNode; import com.facebook.presto.sql.planner.plan.LateralJoinNode; import com.facebook.presto.sql.planner.plan.OutputNode; import com.facebook.presto.sql.planner.plan.RowNumberNode; import com.facebook.presto.sql.planner.plan.SampleNode; import com.facebook.presto.sql.planner.plan.SemiJoinNode; import com.facebook.presto.sql.planner.plan.SortNode; import com.facebook.presto.sql.planner.plan.SpatialJoinNode; import com.facebook.presto.sql.planner.plan.StatisticsWriterNode; import com.facebook.presto.sql.planner.plan.TableFinishNode; import com.facebook.presto.sql.planner.plan.TableWriterMergeNode; import com.facebook.presto.sql.planner.plan.TableWriterNode; import com.facebook.presto.sql.planner.plan.TopNRowNumberNode; import com.facebook.presto.sql.planner.plan.UnnestNode; import com.facebook.presto.sql.planner.plan.WindowNode; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import javax.annotation.concurrent.Immutable; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import static com.facebook.presto.sql.planner.SystemPartitioningHandle.FIXED_ARBITRARY_DISTRIBUTION; import static com.facebook.presto.sql.planner.optimizations.PropertyDerivations.extractFixedValuesToConstantExpressions; import static com.facebook.presto.sql.planner.optimizations.StreamPropertyDerivations.StreamProperties.StreamDistribution.FIXED; import static com.facebook.presto.sql.planner.optimizations.StreamPropertyDerivations.StreamProperties.StreamDistribution.MULTIPLE; import static com.facebook.presto.sql.planner.optimizations.StreamPropertyDerivations.StreamProperties.StreamDistribution.SINGLE; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static java.lang.String.format; import static java.util.Objects.requireNonNull; public final class StreamPropertyDerivations { private StreamPropertyDerivations() {} public static StreamProperties derivePropertiesRecursively(PlanNode node, Metadata metadata, Session session, TypeProvider types, SqlParser parser) { List<StreamProperties> inputProperties = node.getSources().stream() .map(source -> derivePropertiesRecursively(source, metadata, session, types, parser)) .collect(toImmutableList()); return StreamPropertyDerivations.deriveProperties(node, inputProperties, metadata, session, types, parser); } public static StreamProperties deriveProperties(PlanNode node, StreamProperties inputProperties, Metadata metadata, Session session, TypeProvider types, SqlParser parser) { return deriveProperties(node, ImmutableList.of(inputProperties), metadata, session, types, parser); } public static StreamProperties deriveProperties(PlanNode node, List<StreamProperties> inputProperties, Metadata metadata, Session session, TypeProvider types, SqlParser parser) { requireNonNull(node, "node is null"); requireNonNull(inputProperties, "inputProperties is null"); requireNonNull(metadata, "metadata is null"); requireNonNull(session, "session is null"); requireNonNull(types, "types is null"); requireNonNull(parser, "parser is null"); // properties.otherActualProperties will never be null here because the only way // an external caller should obtain StreamProperties is from this method, and the // last line of this method assures otherActualProperties is set. ActualProperties otherProperties = PropertyDerivations.streamBackdoorDeriveProperties( node, inputProperties.stream() .map(properties -> properties.otherActualProperties) .collect(toImmutableList()), metadata, session, types, parser); StreamProperties result = node.accept(new Visitor(metadata, session, types), inputProperties) .withOtherActualProperties(otherProperties); result.getPartitioningColumns().ifPresent(columns -> verify(node.getOutputVariables().containsAll(columns), "Stream-level partitioning properties contain columns not present in node's output")); Set<VariableReferenceExpression> localPropertyColumns = result.getLocalProperties().stream() .flatMap(property -> property.getColumns().stream()) .collect(Collectors.toSet()); verify(node.getOutputVariables().containsAll(localPropertyColumns), "Stream-level local properties contain columns not present in node's output"); return result; } private static class Visitor extends InternalPlanVisitor<StreamProperties, List<StreamProperties>> { private final Metadata metadata; private final Session session; private final TypeProvider types; private Visitor(Metadata metadata, Session session, TypeProvider types) { this.metadata = metadata; this.session = session; this.types = types; } @Override public StreamProperties visitPlan(PlanNode node, List<StreamProperties> inputProperties) { throw new UnsupportedOperationException("not yet implemented: " + node.getClass().getName()); } // // Joins // @Override public StreamProperties visitJoin(JoinNode node, List<StreamProperties> inputProperties) { StreamProperties leftProperties = inputProperties.get(0); List<VariableReferenceExpression> outputs = node.getOutputVariables(); boolean unordered = PropertyDerivations.spillPossible(session, node.getType()); switch (node.getType()) { case INNER: return leftProperties .translate(column -> PropertyDerivations.filterOrRewrite(outputs, node.getCriteria(), column)) .unordered(unordered); case LEFT: return leftProperties .translate(column -> PropertyDerivations.filterIfMissing(outputs, column)) .unordered(unordered); case RIGHT: // since this is a right join, none of the matched output rows will contain nulls // in the left partitioning columns, and all of the unmatched rows will have // null for all left columns. therefore, the output is still partitioned on the // left columns. the only change is there will be at least two streams so the // output is multiple // There is one exception to this. If the left is partitioned on empty set, we // we can't say that the output is partitioned on empty set, but we can say that // it is partitioned on the left join symbols // todo do something smarter after https://github.com/prestodb/presto/pull/5877 is merged return new StreamProperties(MULTIPLE, Optional.empty(), false); case FULL: // the left can contain nulls in any stream so we can't say anything about the // partitioning, and nulls from the right are produced from a extra new stream // so we will always have multiple streams. return new StreamProperties(MULTIPLE, Optional.empty(), false); default: throw new UnsupportedOperationException("Unsupported join type: " + node.getType()); } } @Override public StreamProperties visitSpatialJoin(SpatialJoinNode node, List<StreamProperties> inputProperties) { StreamProperties leftProperties = inputProperties.get(0); switch (node.getType()) { case INNER: case LEFT: return leftProperties.translate(column -> PropertyDerivations.filterIfMissing(node.getOutputVariables(), column)); default: throw new IllegalArgumentException("Unsupported spatial join type: " + node.getType()); } } @Override public StreamProperties visitIndexJoin(IndexJoinNode node, List<StreamProperties> inputProperties) { StreamProperties probeProperties = inputProperties.get(0); switch (node.getType()) { case INNER: return probeProperties; case SOURCE_OUTER: // the probe can contain nulls in any stream so we can't say anything about the // partitioning but the other properties of the probe will be maintained. return probeProperties.withUnspecifiedPartitioning(); default: throw new UnsupportedOperationException("Unsupported join type: " + node.getType()); } } // // Source nodes // @Override public StreamProperties visitValues(ValuesNode node, List<StreamProperties> context) { // values always produces a single stream return StreamProperties.singleStream(); } @Override public StreamProperties visitTableScan(TableScanNode node, List<StreamProperties> inputProperties) { TableLayout layout = metadata.getLayout(session, node.getTable()); Map<ColumnHandle, VariableReferenceExpression> assignments = ImmutableBiMap.copyOf(node.getAssignments()).inverse(); // Globally constant assignments Set<ColumnHandle> constants = new HashSet<>(); extractFixedValuesToConstantExpressions(node.getCurrentConstraint()).orElse(ImmutableMap.of()) .entrySet().stream() .filter(entry -> !entry.getValue().isNull()) // TODO consider allowing nulls .forEach(entry -> constants.add(entry.getKey())); Optional<Set<VariableReferenceExpression>> streamPartitionSymbols = layout.getStreamPartitioningColumns() .flatMap(columns -> getNonConstantVariables(columns, assignments, constants)); // if we are partitioned on empty set, we must say multiple of unknown partitioning, because // the connector does not guarantee a single split in this case (since it might not understand // that the value is a constant). if (streamPartitionSymbols.isPresent() && streamPartitionSymbols.get().isEmpty()) { return new StreamProperties(MULTIPLE, Optional.empty(), false); } return new StreamProperties(MULTIPLE, streamPartitionSymbols, false); } private Optional<Set<VariableReferenceExpression>> getNonConstantVariables(Set<ColumnHandle> columnHandles, Map<ColumnHandle, VariableReferenceExpression> assignments, Set<ColumnHandle> globalConstants) { // Strip off the constants from the partitioning columns (since those are not required for translation) Set<ColumnHandle> constantsStrippedPartitionColumns = columnHandles.stream() .filter(column -> !globalConstants.contains(column)) .collect(toImmutableSet()); ImmutableSet.Builder<VariableReferenceExpression> builder = ImmutableSet.builder(); for (ColumnHandle column : constantsStrippedPartitionColumns) { VariableReferenceExpression translated = assignments.get(column); if (translated == null) { return Optional.empty(); } builder.add(translated); } return Optional.of(builder.build()); } @Override public StreamProperties visitExchange(ExchangeNode node, List<StreamProperties> inputProperties) { if (node.isEnsureSourceOrdering() || node.getOrderingScheme().isPresent()) { return StreamProperties.ordered(); } if (node.getScope().isRemote()) { // TODO: correctly determine if stream is parallelised // based on session properties return StreamProperties.fixedStreams(); } switch (node.getType()) { case GATHER: return StreamProperties.singleStream(); case REPARTITION: if (node.getPartitioningScheme().getPartitioning().getHandle().equals(FIXED_ARBITRARY_DISTRIBUTION)) { return new StreamProperties(FIXED, Optional.empty(), false); } checkArgument( node.getPartitioningScheme().getPartitioning().getArguments().stream().allMatch(VariableReferenceExpression.class::isInstance), format("Expect all partitioning arguments to be VariableReferenceExpression, but get %s", node.getPartitioningScheme().getPartitioning().getArguments())); return new StreamProperties( FIXED, Optional.of(node.getPartitioningScheme().getPartitioning().getArguments().stream() .map(VariableReferenceExpression.class::cast) .collect(toImmutableList())), false); case REPLICATE: return new StreamProperties(MULTIPLE, Optional.empty(), false); } throw new UnsupportedOperationException("not yet implemented"); } // // Nodes that rewrite and/or drop symbols // @Override public StreamProperties visitProject(ProjectNode node, List<StreamProperties> inputProperties) { StreamProperties properties = Iterables.getOnlyElement(inputProperties); // We can describe properties in terms of inputs that are projected unmodified (i.e., identity projections) Map<VariableReferenceExpression, VariableReferenceExpression> identities = computeIdentityTranslations(node.getAssignments().getMap(), types); return properties.translate(column -> Optional.ofNullable(identities.get(column))); } private static Map<VariableReferenceExpression, VariableReferenceExpression> computeIdentityTranslations(Map<VariableReferenceExpression, RowExpression> assignments, TypeProvider types) { Map<VariableReferenceExpression, VariableReferenceExpression> inputToOutput = new HashMap<>(); for (Map.Entry<VariableReferenceExpression, RowExpression> assignment : assignments.entrySet()) { RowExpression expression = assignment.getValue(); if (expression instanceof VariableReferenceExpression) { inputToOutput.put((VariableReferenceExpression) expression, assignment.getKey()); } } return inputToOutput; } @Override public StreamProperties visitGroupId(GroupIdNode node, List<StreamProperties> inputProperties) { Map<VariableReferenceExpression, VariableReferenceExpression> inputToOutputMappings = new HashMap<>(); for (Map.Entry<VariableReferenceExpression, VariableReferenceExpression> setMapping : node.getGroupingColumns().entrySet()) { if (node.getCommonGroupingColumns().contains(setMapping.getKey())) { // TODO: Add support for translating a property on a single column to multiple columns // when GroupIdNode is copying a single input grouping column into multiple output grouping columns (i.e. aliases), this is basically picking one arbitrarily inputToOutputMappings.putIfAbsent(setMapping.getValue(), setMapping.getKey()); } } // TODO: Add support for translating a property on a single column to multiple columns // this is deliberately placed after the grouping columns, because preserving properties has a bigger perf impact for (VariableReferenceExpression argument : node.getAggregationArguments()) { inputToOutputMappings.putIfAbsent(argument, argument); } return Iterables.getOnlyElement(inputProperties).translate(column -> Optional.ofNullable(inputToOutputMappings.get(column))); } @Override public StreamProperties visitAggregation(AggregationNode node, List<StreamProperties> inputProperties) { StreamProperties properties = Iterables.getOnlyElement(inputProperties); // Only grouped symbols projected symbols are passed through return properties.translate(variable -> node.getGroupingKeys().contains(variable) ? Optional.of(variable) : Optional.empty()); } @Override public StreamProperties visitStatisticsWriterNode(StatisticsWriterNode node, List<StreamProperties> inputProperties) { StreamProperties properties = Iterables.getOnlyElement(inputProperties); // analyze finish only outputs row count return properties.withUnspecifiedPartitioning(); } @Override public StreamProperties visitTableFinish(TableFinishNode node, List<StreamProperties> inputProperties) { StreamProperties properties = Iterables.getOnlyElement(inputProperties); // table finish only outputs the row count return properties.withUnspecifiedPartitioning(); } @Override public StreamProperties visitDelete(DeleteNode node, List<StreamProperties> inputProperties) { StreamProperties properties = Iterables.getOnlyElement(inputProperties); // delete only outputs the row count return properties.withUnspecifiedPartitioning(); } @Override public StreamProperties visitTableWriter(TableWriterNode node, List<StreamProperties> inputProperties) { StreamProperties properties = Iterables.getOnlyElement(inputProperties); // table writer only outputs the row count return properties.withUnspecifiedPartitioning(); } @Override public StreamProperties visitTableWriteMerge(TableWriterMergeNode node, List<StreamProperties> inputProperties) { return Iterables.getOnlyElement(inputProperties); } @Override public StreamProperties visitUnnest(UnnestNode node, List<StreamProperties> inputProperties) { StreamProperties properties = Iterables.getOnlyElement(inputProperties); // We can describe properties in terms of inputs that are projected unmodified (i.e., not the unnested symbols) Set<VariableReferenceExpression> passThroughInputs = ImmutableSet.copyOf(node.getReplicateVariables()); return properties.translate(column -> { if (passThroughInputs.contains(column)) { return Optional.of(column); } return Optional.empty(); }); } @Override public StreamProperties visitExplainAnalyze(ExplainAnalyzeNode node, List<StreamProperties> inputProperties) { StreamProperties properties = Iterables.getOnlyElement(inputProperties); // explain only outputs the plan string return properties.withUnspecifiedPartitioning(); } // // Nodes that gather data into a single stream // @Override public StreamProperties visitIndexSource(IndexSourceNode node, List<StreamProperties> context) { return StreamProperties.singleStream(); } @Override public StreamProperties visitUnion(UnionNode node, List<StreamProperties> context) { // union is implemented using a local gather exchange return StreamProperties.singleStream(); } @Override public StreamProperties visitEnforceSingleRow(EnforceSingleRowNode node, List<StreamProperties> context) { return StreamProperties.singleStream(); } @Override public StreamProperties visitAssignUniqueId(AssignUniqueId node, List<StreamProperties> inputProperties) { StreamProperties properties = Iterables.getOnlyElement(inputProperties); if (properties.getPartitioningColumns().isPresent()) { // preserve input (possibly preferred) partitioning return properties; } return new StreamProperties(properties.getDistribution(), Optional.of(ImmutableList.of(node.getIdVariable())), properties.isOrdered()); } // // Simple nodes that pass through stream properties // @Override public StreamProperties visitOutput(OutputNode node, List<StreamProperties> inputProperties) { return Iterables.getOnlyElement(inputProperties) .translate(column -> PropertyDerivations.filterIfMissing(node.getOutputVariables(), column)); } @Override public StreamProperties visitMarkDistinct(MarkDistinctNode node, List<StreamProperties> inputProperties) { return Iterables.getOnlyElement(inputProperties); } @Override public StreamProperties visitWindow(WindowNode node, List<StreamProperties> inputProperties) { return Iterables.getOnlyElement(inputProperties); } @Override public StreamProperties visitRowNumber(RowNumberNode node, List<StreamProperties> inputProperties) { return Iterables.getOnlyElement(inputProperties); } @Override public StreamProperties visitTopNRowNumber(TopNRowNumberNode node, List<StreamProperties> inputProperties) { return Iterables.getOnlyElement(inputProperties); } @Override public StreamProperties visitTopN(TopNNode node, List<StreamProperties> inputProperties) { // Partial TopN doesn't guarantee that stream is ordered if (node.getStep().equals(TopNNode.Step.PARTIAL)) { return Iterables.getOnlyElement(inputProperties); } return StreamProperties.ordered(); } @Override public StreamProperties visitSort(SortNode node, List<StreamProperties> inputProperties) { StreamProperties sourceProperties = Iterables.getOnlyElement(inputProperties); if (sourceProperties.isSingleStream()) { // stream is only sorted if sort operator is executed without parallelism return StreamProperties.ordered(); } return sourceProperties; } @Override public StreamProperties visitLimit(LimitNode node, List<StreamProperties> inputProperties) { return Iterables.getOnlyElement(inputProperties); } @Override public StreamProperties visitDistinctLimit(DistinctLimitNode node, List<StreamProperties> inputProperties) { return Iterables.getOnlyElement(inputProperties); } @Override public StreamProperties visitSemiJoin(SemiJoinNode node, List<StreamProperties> inputProperties) { return inputProperties.get(0); } @Override public StreamProperties visitApply(ApplyNode node, List<StreamProperties> inputProperties) { throw new IllegalStateException("Unexpected node: " + node.getClass()); } @Override public StreamProperties visitLateralJoin(LateralJoinNode node, List<StreamProperties> inputProperties) { throw new IllegalStateException("Unexpected node: " + node.getClass()); } @Override public StreamProperties visitFilter(FilterNode node, List<StreamProperties> inputProperties) { return Iterables.getOnlyElement(inputProperties); } @Override public StreamProperties visitSample(SampleNode node, List<StreamProperties> inputProperties) { return Iterables.getOnlyElement(inputProperties); } } @Immutable public static final class StreamProperties { public enum StreamDistribution { SINGLE, MULTIPLE, FIXED } private final StreamDistribution distribution; private final Optional<List<VariableReferenceExpression>> partitioningColumns; // if missing => partitioned with some unknown scheme private final boolean ordered; // We are only interested in the local properties, but PropertyDerivations requires input // ActualProperties, so we hold on to the whole object private final ActualProperties otherActualProperties; // NOTE: Partitioning on zero columns (or effectively zero columns if the columns are constant) indicates that all // the rows will be partitioned into a single stream. private StreamProperties(StreamDistribution distribution, Optional<? extends Iterable<VariableReferenceExpression>> partitioningColumns, boolean ordered) { this(distribution, partitioningColumns, ordered, null); } private StreamProperties( StreamDistribution distribution, Optional<? extends Iterable<VariableReferenceExpression>> partitioningColumns, boolean ordered, ActualProperties otherActualProperties) { this.distribution = requireNonNull(distribution, "distribution is null"); this.partitioningColumns = requireNonNull(partitioningColumns, "partitioningProperties is null") .map(ImmutableList::copyOf); checkArgument(distribution != SINGLE || this.partitioningColumns.equals(Optional.of(ImmutableList.of())), "Single stream must be partitioned on empty set"); checkArgument(distribution == SINGLE || !this.partitioningColumns.equals(Optional.of(ImmutableList.of())), "Multiple streams must not be partitioned on empty set"); this.ordered = ordered; checkArgument(!ordered || distribution == SINGLE, "Ordered must be a single stream"); this.otherActualProperties = otherActualProperties; } public List<LocalProperty<VariableReferenceExpression>> getLocalProperties() { checkState(otherActualProperties != null, "otherActualProperties not set"); return otherActualProperties.getLocalProperties(); } private static StreamProperties singleStream() { return new StreamProperties(SINGLE, Optional.of(ImmutableSet.of()), false); } private static StreamProperties fixedStreams() { return new StreamProperties(FIXED, Optional.empty(), false); } private static StreamProperties ordered() { return new StreamProperties(SINGLE, Optional.of(ImmutableSet.of()), true); } private StreamProperties unordered(boolean unordered) { if (unordered) { ActualProperties updatedProperies = null; if (otherActualProperties != null) { updatedProperies = ActualProperties.builderFrom(otherActualProperties) .unordered(true) .build(); } return new StreamProperties( distribution, partitioningColumns, false, updatedProperies); } return this; } public boolean isSingleStream() { return distribution == SINGLE; } public StreamDistribution getDistribution() { return distribution; } public boolean isExactlyPartitionedOn(Iterable<VariableReferenceExpression> columns) { return partitioningColumns.isPresent() && columns.equals(ImmutableList.copyOf(partitioningColumns.get())); } public boolean isPartitionedOn(Iterable<VariableReferenceExpression> columns) { if (!partitioningColumns.isPresent()) { return false; } // partitioned on (k_1, k_2, ..., k_n) => partitioned on (k_1, k_2, ..., k_n, k_n+1, ...) // can safely ignore all constant columns when comparing partition properties return ImmutableSet.copyOf(columns).containsAll(partitioningColumns.get()); } public boolean isOrdered() { return ordered; } private StreamProperties withUnspecifiedPartitioning() { // a single stream has no symbols if (isSingleStream()) { return this; } // otherwise we are distributed on some symbols, but since we are trying to remove all symbols, // just say we have multiple partitions with an unknown scheme return new StreamProperties(distribution, Optional.empty(), ordered); } private StreamProperties withOtherActualProperties(ActualProperties actualProperties) { return new StreamProperties(distribution, partitioningColumns, ordered, actualProperties); } public StreamProperties translate(Function<VariableReferenceExpression, Optional<VariableReferenceExpression>> translator) { return new StreamProperties( distribution, partitioningColumns.flatMap(partitioning -> { ImmutableList.Builder<VariableReferenceExpression> newPartitioningColumns = ImmutableList.builder(); for (VariableReferenceExpression partitioningColumn : partitioning) { Optional<VariableReferenceExpression> translated = translator.apply(partitioningColumn); if (!translated.isPresent()) { return Optional.empty(); } newPartitioningColumns.add(translated.get()); } return Optional.of(newPartitioningColumns.build()); }), ordered, otherActualProperties.translateVariable(translator)); } public Optional<List<VariableReferenceExpression>> getPartitioningColumns() { return partitioningColumns; } @Override public int hashCode() { return Objects.hash(distribution, partitioningColumns); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } StreamProperties other = (StreamProperties) obj; return Objects.equals(this.distribution, other.distribution) && Objects.equals(this.partitioningColumns, other.partitioningColumns); } @Override public String toString() { return toStringHelper(this) .add("distribution", distribution) .add("partitioningColumns", partitioningColumns) .toString(); } } }
apache-2.0
ConsecroMUD/ConsecroMUD
com/suscipio_solutions/consecro_mud/Commands/AutoGuard.java
1260
package com.suscipio_solutions.consecro_mud.Commands; import java.util.Vector; import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB; import com.suscipio_solutions.consecro_mud.core.CMLib; @SuppressWarnings("rawtypes") public class AutoGuard extends StdCommand { public AutoGuard(){} private final String[] access=I(new String[]{"AUTOGUARD","GUARD"}); @Override public String[] getAccessWords(){return access;} @Override public boolean execute(MOB mob, Vector commands, int metaFlags) throws java.io.IOException { if((!mob.isAttribute(MOB.Attrib.AUTOGUARD)) ||((commands.size()>0)&&(((String)commands.firstElement()).toUpperCase().startsWith("G")))) { mob.setAttribute(MOB.Attrib.AUTOGUARD,true); mob.tell(L("You are now on guard. You will no longer follow group leaders.")); if(mob.isMonster()) CMLib.commands().postSay(mob,null,L("I am now on guard."),false,false); } else { mob.setAttribute(MOB.Attrib.AUTOGUARD,false); mob.tell(L("You are no longer on guard. You will now follow group leaders.")); if(mob.isMonster()) CMLib.commands().postSay(mob,null,L("I will now follow my group leader."),false,false); } return false; } @Override public boolean canBeOrdered(){return true;} }
apache-2.0
stikbomb/amolodid
chapter_001/src/test/java/ru/job4j/loop/PaintTest.java
1076
package ru.job4j.loop; import org.junit.Test; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; /** * Test. * * @author Alexander Molodid (a.molodid@gmail.com) * @version $Id$ * @since 0.1 */ public class PaintTest { /** Test added. */ @Test public void whenPiramidWithHeightTwoThenStringWithTwoRows() { Paint paint = new Paint(); String result = paint.piramid(2); String expected = String.format(" ^ %s^^^%s", System.getProperty("line.separator"), System.getProperty("line.separator")); assertThat(result, is(expected)); } /** Test added. */ @Test public void whenPiramidWithHeightThreeThenStringWithThreeRows() { Paint paint = new Paint(); String result = paint.piramid(3); String expected = String.format(" ^ %s ^^^ %s^^^^^%s", System.getProperty("line.separator"), System.getProperty("line.separator"), System.getProperty("line.separator")); assertThat(result, is(expected)); } }
apache-2.0
kingargyle/turmeric-bot
camel-core/src/main/java/org/apache/camel/impl/DefaultPackageScanClassResolver.java
19518
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.lang.annotation.Annotation; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; import java.net.URLDecoder; import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.LinkedHashSet; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarInputStream; import org.apache.camel.impl.scan.AnnotatedWithAnyPackageScanFilter; import org.apache.camel.impl.scan.AnnotatedWithPackageScanFilter; import org.apache.camel.impl.scan.AssignableToPackageScanFilter; import org.apache.camel.impl.scan.CompositePackageScanFilter; import org.apache.camel.spi.PackageScanClassResolver; import org.apache.camel.spi.PackageScanFilter; import org.apache.camel.util.IOHelper; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Default implement of {@link org.apache.camel.spi.PackageScanClassResolver} */ public class DefaultPackageScanClassResolver implements PackageScanClassResolver { protected final transient Log log = LogFactory.getLog(getClass()); private final Set<ClassLoader> classLoaders = new LinkedHashSet<ClassLoader>(); private Set<PackageScanFilter> scanFilters; private String[] acceptableSchemes = {}; public DefaultPackageScanClassResolver() { try { ClassLoader ccl = Thread.currentThread().getContextClassLoader(); if (ccl != null) { if (log.isTraceEnabled()) { log.trace("Adding ContextClassLoader from current thread: " + ccl); } classLoaders.add(ccl); } } catch (Exception e) { // Ignore this exception log.warn("Cannot add ContextClassLoader from current thread due " + e.getMessage() + ". This exception will be ignored."); } classLoaders.add(DefaultPackageScanClassResolver.class.getClassLoader()); } public void addClassLoader(ClassLoader classLoader) { classLoaders.add(classLoader); } public void addFilter(PackageScanFilter filter) { if (scanFilters == null) { scanFilters = new LinkedHashSet<PackageScanFilter>(); } scanFilters.add(filter); } public void removeFilter(PackageScanFilter filter) { if (scanFilters != null) { scanFilters.remove(filter); } } public void setAcceptableSchemes(String schemes) { if (schemes != null) { acceptableSchemes = schemes.split(";"); } } public boolean isAcceptableScheme(String urlPath) { if (urlPath != null) { for (String scheme : acceptableSchemes) { if (urlPath.startsWith(scheme)) { return true; } } } return false; } public Set<ClassLoader> getClassLoaders() { // return a new set to avoid any concurrency issues in other runtimes such as OSGi return Collections.unmodifiableSet(new LinkedHashSet<ClassLoader>(classLoaders)); } public void setClassLoaders(Set<ClassLoader> classLoaders) { // add all the class loaders this.classLoaders.addAll(classLoaders); } @SuppressWarnings("unchecked") public Set<Class<?>> findAnnotated(Class<? extends Annotation> annotation, String... packageNames) { if (packageNames == null) { return Collections.EMPTY_SET; } if (log.isDebugEnabled()) { log.debug("Searching for annotations of " + annotation.getName() + " in packages: " + Arrays.asList(packageNames)); } PackageScanFilter test = getCompositeFilter(new AnnotatedWithPackageScanFilter(annotation, true)); Set<Class<?>> classes = new LinkedHashSet<Class<?>>(); for (String pkg : packageNames) { find(test, pkg, classes); } if (log.isDebugEnabled()) { log.debug("Found: " + classes); } return classes; } @SuppressWarnings("unchecked") public Set<Class<?>> findAnnotated(Set<Class<? extends Annotation>> annotations, String... packageNames) { if (packageNames == null) { return Collections.EMPTY_SET; } if (log.isDebugEnabled()) { log.debug("Searching for annotations of " + annotations + " in packages: " + Arrays.asList(packageNames)); } PackageScanFilter test = getCompositeFilter(new AnnotatedWithAnyPackageScanFilter(annotations, true)); Set<Class<?>> classes = new LinkedHashSet<Class<?>>(); for (String pkg : packageNames) { find(test, pkg, classes); } if (log.isDebugEnabled()) { log.debug("Found: " + classes); } return classes; } @SuppressWarnings("unchecked") public Set<Class<?>> findImplementations(Class parent, String... packageNames) { if (packageNames == null) { return Collections.EMPTY_SET; } if (log.isDebugEnabled()) { log.debug("Searching for implementations of " + parent.getName() + " in packages: " + Arrays.asList(packageNames)); } PackageScanFilter test = getCompositeFilter(new AssignableToPackageScanFilter(parent)); Set<Class<?>> classes = new LinkedHashSet<Class<?>>(); for (String pkg : packageNames) { find(test, pkg, classes); } if (log.isDebugEnabled()) { log.debug("Found: " + classes); } return classes; } @SuppressWarnings("unchecked") public Set<Class<?>> findByFilter(PackageScanFilter filter, String... packageNames) { if (packageNames == null) { return Collections.EMPTY_SET; } Set<Class<?>> classes = new LinkedHashSet<Class<?>>(); for (String pkg : packageNames) { find(filter, pkg, classes); } if (log.isDebugEnabled()) { log.debug("Found: " + classes); } return classes; } protected void find(PackageScanFilter test, String packageName, Set<Class<?>> classes) { packageName = packageName.replace('.', '/'); Set<ClassLoader> set = getClassLoaders(); for (ClassLoader classLoader : set) { find(test, packageName, classLoader, classes); } } protected void find(PackageScanFilter test, String packageName, ClassLoader loader, Set<Class<?>> classes) { if (log.isTraceEnabled()) { log.trace("Searching for: " + test + " in package: " + packageName + " using classloader: " + loader.getClass().getName()); } Enumeration<URL> urls; try { urls = getResources(loader, packageName); if (!urls.hasMoreElements()) { log.trace("No URLs returned by classloader"); } } catch (IOException ioe) { log.warn("Cannot read package: " + packageName, ioe); return; } while (urls.hasMoreElements()) { URL url = null; try { url = urls.nextElement(); if (log.isTraceEnabled()) { log.trace("URL from classloader: " + url); } url = customResourceLocator(url); String urlPath = url.getFile(); urlPath = URLDecoder.decode(urlPath, "UTF-8"); if (log.isTraceEnabled()) { log.trace("Decoded urlPath: " + urlPath + " with protocol: " + url.getProtocol()); } // If it's a file in a directory, trim the stupid file: spec if (urlPath.startsWith("file:")) { // file path can be temporary folder which uses characters that the URLDecoder decodes wrong // for example + being decoded to something else (+ can be used in temp folders on Mac OS) // to remedy this then create new path without using the URLDecoder try { urlPath = new URI(url.getFile()).getPath(); } catch (URISyntaxException e) { // fallback to use as it was given from the URLDecoder // this allows us to work on Windows if users have spaces in paths } if (urlPath.startsWith("file:")) { urlPath = urlPath.substring(5); } } // osgi bundles should be skipped if (url.toString().startsWith("bundle:") || urlPath.startsWith("bundle:")) { log.trace("It's a virtual osgi bundle, skipping"); continue; } // Else it's in a JAR, grab the path to the jar if (urlPath.indexOf('!') > 0) { urlPath = urlPath.substring(0, urlPath.indexOf('!')); } if (log.isTraceEnabled()) { log.trace("Scanning for classes in [" + urlPath + "] matching criteria: " + test); } File file = new File(urlPath); if (file.isDirectory()) { if (log.isTraceEnabled()) { log.trace("Loading from directory using file: " + file); } loadImplementationsInDirectory(test, packageName, file, classes); } else { InputStream stream; if (urlPath.startsWith("http:") || urlPath.startsWith("https:") || urlPath.startsWith("sonicfs:") || isAcceptableScheme(urlPath)) { // load resources using http/https, sonicfs and other acceptable scheme // sonic ESB requires to be loaded using a regular URLConnection if (log.isTraceEnabled()) { log.trace("Loading from jar using url: " + urlPath); } URL urlStream = new URL(urlPath); URLConnection con = urlStream.openConnection(); // disable cache mainly to avoid jar file locking on Windows con.setUseCaches(false); stream = con.getInputStream(); } else { if (log.isTraceEnabled()) { log.trace("Loading from jar using file: " + file); } stream = new FileInputStream(file); } loadImplementationsInJar(test, packageName, stream, urlPath, classes); } } catch (IOException e) { // use debug logging to avoid being to noisy in logs log.debug("Cannot read entries in url: " + url, e); } } } // We can override this method to support the custom ResourceLocator protected URL customResourceLocator(URL url) throws IOException { // Do nothing here return url; } /** * Strategy to get the resources by the given classloader. * <p/> * Notice that in WebSphere platforms there is a {@link WebSpherePackageScanClassResolver} * to take care of WebSphere's odditiy of resource loading. * * @param loader the classloader * @param packageName the packagename for the package to load * @return URL's for the given package * @throws IOException is thrown by the classloader */ protected Enumeration<URL> getResources(ClassLoader loader, String packageName) throws IOException { if (log.isTraceEnabled()) { log.trace("Getting resource URL for package: " + packageName + " with classloader: " + loader); } // If the URL is a jar, the URLClassloader.getResources() seems to require a trailing slash. The // trailing slash is harmless for other URLs if (!packageName.endsWith("/")) { packageName = packageName + "/"; } return loader.getResources(packageName); } private PackageScanFilter getCompositeFilter(PackageScanFilter filter) { if (scanFilters != null) { CompositePackageScanFilter composite = new CompositePackageScanFilter(scanFilters); composite.addFilter(filter); return composite; } return filter; } /** * Finds matches in a physical directory on a filesystem. Examines all files * within a directory - if the File object is not a directory, and ends with * <i>.class</i> the file is loaded and tested to see if it is acceptable * according to the Test. Operates recursively to find classes within a * folder structure matching the package structure. * * @param test a Test used to filter the classes that are discovered * @param parent the package name up to this directory in the package * hierarchy. E.g. if /classes is in the classpath and we wish to * examine files in /classes/org/apache then the values of * <i>parent</i> would be <i>org/apache</i> * @param location a File object representing a directory */ private void loadImplementationsInDirectory(PackageScanFilter test, String parent, File location, Set<Class<?>> classes) { File[] files = location.listFiles(); StringBuilder builder = null; for (File file : files) { builder = new StringBuilder(100); String name = file.getName(); if (name != null) { name = name.trim(); builder.append(parent).append("/").append(name); String packageOrClass = parent == null ? name : builder.toString(); if (file.isDirectory()) { loadImplementationsInDirectory(test, packageOrClass, file, classes); } else if (name.endsWith(".class")) { addIfMatching(test, packageOrClass, classes); } } } } /** * Finds matching classes within a jar files that contains a folder * structure matching the package structure. If the File is not a JarFile or * does not exist a warning will be logged, but no error will be raised. * * @param test a Test used to filter the classes that are discovered * @param parent the parent package under which classes must be in order to * be considered * @param stream the inputstream of the jar file to be examined for classes * @param urlPath the url of the jar file to be examined for classes */ private void loadImplementationsInJar(PackageScanFilter test, String parent, InputStream stream, String urlPath, Set<Class<?>> classes) { JarInputStream jarStream = null; try { jarStream = new JarInputStream(stream); JarEntry entry; while ((entry = jarStream.getNextJarEntry()) != null) { String name = entry.getName(); if (name != null) { name = name.trim(); if (!entry.isDirectory() && name.startsWith(parent) && name.endsWith(".class")) { addIfMatching(test, name, classes); } } } } catch (IOException ioe) { log.warn("Cannot search jar file '" + urlPath + "' for classes matching criteria: " + test + " due to an IOException: " + ioe.getMessage(), ioe); } finally { IOHelper.close(jarStream, urlPath, log); } } /** * Add the class designated by the fully qualified class name provided to * the set of resolved classes if and only if it is approved by the Test * supplied. * * @param test the test used to determine if the class matches * @param fqn the fully qualified name of a class */ protected void addIfMatching(PackageScanFilter test, String fqn, Set<Class<?>> classes) { try { String externalName = fqn.substring(0, fqn.indexOf('.')).replace('/', '.'); Set<ClassLoader> set = getClassLoaders(); boolean found = false; for (ClassLoader classLoader : set) { if (log.isTraceEnabled()) { log.trace("Testing for class " + externalName + " matches criteria [" + test + "] using classloader:" + classLoader); } try { Class<?> type = classLoader.loadClass(externalName); if (log.isTraceEnabled()) { log.trace("Loaded the class: " + type + " in classloader: " + classLoader); } if (test.matches(type)) { if (log.isTraceEnabled()) { log.trace("Found class: " + type + " which matches the filter in classloader: " + classLoader); } classes.add(type); } found = true; break; } catch (ClassNotFoundException e) { if (log.isTraceEnabled()) { log.trace("Cannot find class '" + fqn + "' in classloader: " + classLoader + ". Reason: " + e, e); } } catch (NoClassDefFoundError e) { if (log.isTraceEnabled()) { log.trace("Cannot find the class definition '" + fqn + "' in classloader: " + classLoader + ". Reason: " + e, e); } } } if (!found) { if (log.isDebugEnabled()) { // use debug to avoid being noisy in logs log.debug("Cannot find class '" + fqn + "' in any classloaders: " + set); } } } catch (Exception e) { if (log.isWarnEnabled()) { log.warn("Cannot examine class '" + fqn + "' due to a " + e.getClass().getName() + " with message: " + e.getMessage(), e); } } } }
apache-2.0
lievendoclo/Valkyrie-RCP
valkyrie-rcp-core/src/main/java/org/valkyriercp/application/exceptionhandling/ExceptionPurger.java
1116
/** * Copyright (C) 2015 Valkyrie RCP * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.valkyriercp.application.exceptionhandling; /** * Purges a throwable, ussually by looking into it's chain. * Usefull for unwrapping WrapEverythingException etc. * * @see DefaultExceptionPurger */ public interface ExceptionPurger { /** * Purges the throwable to unwrap it to find the most suitable throwable to evaluate or handle. * * @param e the root exception or error * @return e or a chained Throwable which is part of e's chain */ Throwable purge(Throwable e); }
apache-2.0
consulo/consulo
modules/base/vcs-log-impl/src/main/java/com/intellij/vcs/log/util/StopWatch.java
3234
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.vcs.log.util; import consulo.logging.Logger; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.containers.ContainerUtil; import javax.annotation.Nonnull; import java.util.Collection; import java.util.Map; public class StopWatch { private static final Logger LOG = Logger.getInstance(StopWatch.class); private static final String[] UNIT_NAMES = new String[]{"s", "m", "h"}; private static final long[] UNITS = new long[]{1, 60, 60 * 60}; private static final String MSEC_FORMAT = "%03d"; private final long myStartTime; @Nonnull private final String myOperation; @Nonnull private final Map<VirtualFile, Long> myDurationPerRoot; private StopWatch(@Nonnull String operation) { myOperation = operation; myStartTime = System.currentTimeMillis(); myDurationPerRoot = ContainerUtil.newHashMap(); } @Nonnull public static StopWatch start(@Nonnull String operation) { return new StopWatch(operation); } public void rootCompleted(@Nonnull VirtualFile root) { long totalDuration = System.currentTimeMillis() - myStartTime; long duration = totalDuration - sum(myDurationPerRoot.values()); myDurationPerRoot.put(root, duration); } private static long sum(@Nonnull Collection<Long> durations) { long sum = 0; for (Long duration : durations) { sum += duration; } return sum; } public void report() { String message = myOperation + " took " + formatTime(System.currentTimeMillis() - myStartTime); if (myDurationPerRoot.size() > 1) { message += "\n" + StringUtil.join(myDurationPerRoot.entrySet(), entry -> " " + entry.getKey().getName() + ": " + formatTime(entry.getValue()), "\n"); } LOG.debug(message); } /** * 1h 1m 1.001s */ @Nonnull public static String formatTime(long time) { if (time < 1000 * UNITS[0]) { return time + "ms"; } String result = ""; long remainder = time / 1000; long msec = time % 1000; for (int i = UNITS.length - 1; i >= 0; i--) { if (remainder < UNITS[i]) continue; long quotient = remainder / UNITS[i]; remainder = remainder % UNITS[i]; if (i == 0) { result += quotient + (msec == 0 ? "" : "." + String.format(MSEC_FORMAT, msec)) + UNIT_NAMES[i]; } else { result += quotient + UNIT_NAMES[i] + " "; if (remainder == 0 && msec != 0) { result += "0." + String.format(MSEC_FORMAT, msec) + UNIT_NAMES[0]; } } } return result; } }
apache-2.0
icemagno/mclm
src/main/java/br/mil/mar/casnav/mclm/action/GetCapabilitiesAction.java
1003
package br.mil.mar.casnav.mclm.action; import org.apache.struts2.convention.annotation.Action; import br.mil.mar.casnav.mclm.service.GetCapabilitiesService; import br.mil.mar.casnav.mclm.util.CodeUtils; @Action("getCapabilities") public class GetCapabilitiesAction extends BaseAction { private String url; private String version; private final GetCapabilitiesService getCapabilitiesService; public GetCapabilitiesAction() { getCapabilitiesService = new GetCapabilitiesService(); } @Override public String execute() throws Exception { String jsonResult = getCapabilitiesService.getAsJson(url, version); return CodeUtils.sendHttpResponse(jsonResult, 200); } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } }
apache-2.0
PengYechang/coolweather
src/com/myweather/app/db/CoolWeatherDB.java
3780
package com.myweather.app.db; import java.util.ArrayList; import java.util.List; import com.myweather.app.model.City; import com.myweather.app.model.County; import com.myweather.app.model.Province; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; public class CoolWeatherDB { public static final String DB_NAME = "cool_weather"; public static final int VERSION = 1; private static CoolWeatherDB coolweatherDB; private SQLiteDatabase db; private CoolWeatherDB(Context context) { CoolWeatherOpenHelper dbHelper = new CoolWeatherOpenHelper(context, DB_NAME, null, VERSION); db = dbHelper.getWritableDatabase(); } //»ñÈ¡CoolWeatherDBʵÀý public synchronized static CoolWeatherDB getInstance(Context context){ if(coolweatherDB == null){ coolweatherDB = new CoolWeatherDB(context); } return coolweatherDB; } //Province public void saveProvince(Province province){ if(province != null){ ContentValues values = new ContentValues(); values.put("province_name", province.getProvinceName()); values.put("province_code", province.getProvinceCode()); db.insert("Province", null, values); } } public List<Province> loadProvinces(){ List<Province> list = new ArrayList<Province>(); Cursor cursor = db.query("Province", null, null, null, null, null, null); if(cursor.moveToFirst()){ do{ Province province = new Province(); province.setId(cursor.getInt(cursor.getColumnIndex("id"))); province.setProvinceCode(cursor.getString(cursor.getColumnIndex("province_code"))); province.setProvinceName(cursor.getString(cursor.getColumnIndex("province_name"))); list.add(province); }while(cursor.moveToNext()); } if(cursor != null){ cursor.close(); } return list; } //City public void saveCity(City city){ if(city != null){ ContentValues values = new ContentValues(); values.put("city_name", city.getCityName()); values.put("city_code", city.getCityCode()); values.put("province_id", city.getProvinceId()); db.insert("City", null, values); } } public List<City> loadCities(int provinceId){ List<City> list = new ArrayList<City>(); Cursor cursor = db.query("City", null, "province_id = ?", new String[]{String.valueOf(provinceId)}, null, null, null); if(cursor.moveToFirst()){ do{ City city = new City(); city.setId(cursor.getInt(cursor.getColumnIndex("id"))); city.setCityCode(cursor.getString(cursor.getColumnIndex("city_code"))); city.setCityName(cursor.getString(cursor.getColumnIndex("city_name"))); city.setProvinceId(provinceId); list.add(city); }while(cursor.moveToNext()); } if(cursor != null){ cursor.close(); } return list; } //County public void saveCounty(County county){ if(county != null){ ContentValues values = new ContentValues(); values.put("county_name", county.getCountyName()); values.put("county_code", county.getCountyCode()); values.put("city_id", county.getCityId()); db.insert("County", null, values); } } public List<County> loadCounties(int cityId){ List<County> list = new ArrayList<County>(); Cursor cursor = db.query("County", null, "city_id = ?", new String[]{String.valueOf(cityId)}, null, null, null); if(cursor.moveToFirst()){ do{ County county = new County(); county.setId(cursor.getInt(cursor.getColumnIndex("id"))); county.setCountyCode(cursor.getString(cursor.getColumnIndex("county_code"))); county.setCountyName(cursor.getString(cursor.getColumnIndex("county_name"))); county.setCityId(cityId); list.add(county); }while(cursor.moveToNext()); } if(cursor != null){ cursor.close(); } return list; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-cognitosync/src/main/java/com/amazonaws/services/cognitosync/model/GetBulkPublishDetailsResult.java
18384
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cognitosync.model; import java.io.Serializable; import javax.annotation.Generated; /** * The output for the GetBulkPublishDetails operation. * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cognito-sync-2014-06-30/GetBulkPublishDetails" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetBulkPublishDetailsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID * generation is unique within a region. */ private String identityPoolId; /** The date/time at which the last bulk publish was initiated. */ private java.util.Date bulkPublishStartTime; /** If BulkPublishStatus is SUCCEEDED, the time the last bulk publish operation completed. */ private java.util.Date bulkPublishCompleteTime; /** * Status of the last bulk publish operation, valid values are: * <p> * NOT_STARTED - No bulk publish has been requested for this identity pool * </p> * <p> * IN_PROGRESS - Data is being published to the configured stream * </p> * <p> * SUCCEEDED - All data for the identity pool has been published to the configured stream * </p> * <p> * FAILED - Some portion of the data has failed to publish, check FailureMessage for the cause. * </p> */ private String bulkPublishStatus; /** If BulkPublishStatus is FAILED this field will contain the error message that caused the bulk publish to fail. */ private String failureMessage; /** * A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID * generation is unique within a region. * * @param identityPoolId * A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon * Cognito. GUID generation is unique within a region. */ public void setIdentityPoolId(String identityPoolId) { this.identityPoolId = identityPoolId; } /** * A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID * generation is unique within a region. * * @return A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon * Cognito. GUID generation is unique within a region. */ public String getIdentityPoolId() { return this.identityPoolId; } /** * A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID * generation is unique within a region. * * @param identityPoolId * A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon * Cognito. GUID generation is unique within a region. * @return Returns a reference to this object so that method calls can be chained together. */ public GetBulkPublishDetailsResult withIdentityPoolId(String identityPoolId) { setIdentityPoolId(identityPoolId); return this; } /** * The date/time at which the last bulk publish was initiated. * * @param bulkPublishStartTime * The date/time at which the last bulk publish was initiated. */ public void setBulkPublishStartTime(java.util.Date bulkPublishStartTime) { this.bulkPublishStartTime = bulkPublishStartTime; } /** * The date/time at which the last bulk publish was initiated. * * @return The date/time at which the last bulk publish was initiated. */ public java.util.Date getBulkPublishStartTime() { return this.bulkPublishStartTime; } /** * The date/time at which the last bulk publish was initiated. * * @param bulkPublishStartTime * The date/time at which the last bulk publish was initiated. * @return Returns a reference to this object so that method calls can be chained together. */ public GetBulkPublishDetailsResult withBulkPublishStartTime(java.util.Date bulkPublishStartTime) { setBulkPublishStartTime(bulkPublishStartTime); return this; } /** * If BulkPublishStatus is SUCCEEDED, the time the last bulk publish operation completed. * * @param bulkPublishCompleteTime * If BulkPublishStatus is SUCCEEDED, the time the last bulk publish operation completed. */ public void setBulkPublishCompleteTime(java.util.Date bulkPublishCompleteTime) { this.bulkPublishCompleteTime = bulkPublishCompleteTime; } /** * If BulkPublishStatus is SUCCEEDED, the time the last bulk publish operation completed. * * @return If BulkPublishStatus is SUCCEEDED, the time the last bulk publish operation completed. */ public java.util.Date getBulkPublishCompleteTime() { return this.bulkPublishCompleteTime; } /** * If BulkPublishStatus is SUCCEEDED, the time the last bulk publish operation completed. * * @param bulkPublishCompleteTime * If BulkPublishStatus is SUCCEEDED, the time the last bulk publish operation completed. * @return Returns a reference to this object so that method calls can be chained together. */ public GetBulkPublishDetailsResult withBulkPublishCompleteTime(java.util.Date bulkPublishCompleteTime) { setBulkPublishCompleteTime(bulkPublishCompleteTime); return this; } /** * Status of the last bulk publish operation, valid values are: * <p> * NOT_STARTED - No bulk publish has been requested for this identity pool * </p> * <p> * IN_PROGRESS - Data is being published to the configured stream * </p> * <p> * SUCCEEDED - All data for the identity pool has been published to the configured stream * </p> * <p> * FAILED - Some portion of the data has failed to publish, check FailureMessage for the cause. * </p> * * @param bulkPublishStatus * Status of the last bulk publish operation, valid values are: * <p> * NOT_STARTED - No bulk publish has been requested for this identity pool * </p> * <p> * IN_PROGRESS - Data is being published to the configured stream * </p> * <p> * SUCCEEDED - All data for the identity pool has been published to the configured stream * </p> * <p> * FAILED - Some portion of the data has failed to publish, check FailureMessage for the cause. * </p> * @see BulkPublishStatus */ public void setBulkPublishStatus(String bulkPublishStatus) { this.bulkPublishStatus = bulkPublishStatus; } /** * Status of the last bulk publish operation, valid values are: * <p> * NOT_STARTED - No bulk publish has been requested for this identity pool * </p> * <p> * IN_PROGRESS - Data is being published to the configured stream * </p> * <p> * SUCCEEDED - All data for the identity pool has been published to the configured stream * </p> * <p> * FAILED - Some portion of the data has failed to publish, check FailureMessage for the cause. * </p> * * @return Status of the last bulk publish operation, valid values are: * <p> * NOT_STARTED - No bulk publish has been requested for this identity pool * </p> * <p> * IN_PROGRESS - Data is being published to the configured stream * </p> * <p> * SUCCEEDED - All data for the identity pool has been published to the configured stream * </p> * <p> * FAILED - Some portion of the data has failed to publish, check FailureMessage for the cause. * </p> * @see BulkPublishStatus */ public String getBulkPublishStatus() { return this.bulkPublishStatus; } /** * Status of the last bulk publish operation, valid values are: * <p> * NOT_STARTED - No bulk publish has been requested for this identity pool * </p> * <p> * IN_PROGRESS - Data is being published to the configured stream * </p> * <p> * SUCCEEDED - All data for the identity pool has been published to the configured stream * </p> * <p> * FAILED - Some portion of the data has failed to publish, check FailureMessage for the cause. * </p> * * @param bulkPublishStatus * Status of the last bulk publish operation, valid values are: * <p> * NOT_STARTED - No bulk publish has been requested for this identity pool * </p> * <p> * IN_PROGRESS - Data is being published to the configured stream * </p> * <p> * SUCCEEDED - All data for the identity pool has been published to the configured stream * </p> * <p> * FAILED - Some portion of the data has failed to publish, check FailureMessage for the cause. * </p> * @return Returns a reference to this object so that method calls can be chained together. * @see BulkPublishStatus */ public GetBulkPublishDetailsResult withBulkPublishStatus(String bulkPublishStatus) { setBulkPublishStatus(bulkPublishStatus); return this; } /** * Status of the last bulk publish operation, valid values are: * <p> * NOT_STARTED - No bulk publish has been requested for this identity pool * </p> * <p> * IN_PROGRESS - Data is being published to the configured stream * </p> * <p> * SUCCEEDED - All data for the identity pool has been published to the configured stream * </p> * <p> * FAILED - Some portion of the data has failed to publish, check FailureMessage for the cause. * </p> * * @param bulkPublishStatus * Status of the last bulk publish operation, valid values are: * <p> * NOT_STARTED - No bulk publish has been requested for this identity pool * </p> * <p> * IN_PROGRESS - Data is being published to the configured stream * </p> * <p> * SUCCEEDED - All data for the identity pool has been published to the configured stream * </p> * <p> * FAILED - Some portion of the data has failed to publish, check FailureMessage for the cause. * </p> * @see BulkPublishStatus */ public void setBulkPublishStatus(BulkPublishStatus bulkPublishStatus) { withBulkPublishStatus(bulkPublishStatus); } /** * Status of the last bulk publish operation, valid values are: * <p> * NOT_STARTED - No bulk publish has been requested for this identity pool * </p> * <p> * IN_PROGRESS - Data is being published to the configured stream * </p> * <p> * SUCCEEDED - All data for the identity pool has been published to the configured stream * </p> * <p> * FAILED - Some portion of the data has failed to publish, check FailureMessage for the cause. * </p> * * @param bulkPublishStatus * Status of the last bulk publish operation, valid values are: * <p> * NOT_STARTED - No bulk publish has been requested for this identity pool * </p> * <p> * IN_PROGRESS - Data is being published to the configured stream * </p> * <p> * SUCCEEDED - All data for the identity pool has been published to the configured stream * </p> * <p> * FAILED - Some portion of the data has failed to publish, check FailureMessage for the cause. * </p> * @return Returns a reference to this object so that method calls can be chained together. * @see BulkPublishStatus */ public GetBulkPublishDetailsResult withBulkPublishStatus(BulkPublishStatus bulkPublishStatus) { this.bulkPublishStatus = bulkPublishStatus.toString(); return this; } /** * If BulkPublishStatus is FAILED this field will contain the error message that caused the bulk publish to fail. * * @param failureMessage * If BulkPublishStatus is FAILED this field will contain the error message that caused the bulk publish to * fail. */ public void setFailureMessage(String failureMessage) { this.failureMessage = failureMessage; } /** * If BulkPublishStatus is FAILED this field will contain the error message that caused the bulk publish to fail. * * @return If BulkPublishStatus is FAILED this field will contain the error message that caused the bulk publish to * fail. */ public String getFailureMessage() { return this.failureMessage; } /** * If BulkPublishStatus is FAILED this field will contain the error message that caused the bulk publish to fail. * * @param failureMessage * If BulkPublishStatus is FAILED this field will contain the error message that caused the bulk publish to * fail. * @return Returns a reference to this object so that method calls can be chained together. */ public GetBulkPublishDetailsResult withFailureMessage(String failureMessage) { setFailureMessage(failureMessage); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getIdentityPoolId() != null) sb.append("IdentityPoolId: ").append(getIdentityPoolId()).append(","); if (getBulkPublishStartTime() != null) sb.append("BulkPublishStartTime: ").append(getBulkPublishStartTime()).append(","); if (getBulkPublishCompleteTime() != null) sb.append("BulkPublishCompleteTime: ").append(getBulkPublishCompleteTime()).append(","); if (getBulkPublishStatus() != null) sb.append("BulkPublishStatus: ").append(getBulkPublishStatus()).append(","); if (getFailureMessage() != null) sb.append("FailureMessage: ").append(getFailureMessage()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetBulkPublishDetailsResult == false) return false; GetBulkPublishDetailsResult other = (GetBulkPublishDetailsResult) obj; if (other.getIdentityPoolId() == null ^ this.getIdentityPoolId() == null) return false; if (other.getIdentityPoolId() != null && other.getIdentityPoolId().equals(this.getIdentityPoolId()) == false) return false; if (other.getBulkPublishStartTime() == null ^ this.getBulkPublishStartTime() == null) return false; if (other.getBulkPublishStartTime() != null && other.getBulkPublishStartTime().equals(this.getBulkPublishStartTime()) == false) return false; if (other.getBulkPublishCompleteTime() == null ^ this.getBulkPublishCompleteTime() == null) return false; if (other.getBulkPublishCompleteTime() != null && other.getBulkPublishCompleteTime().equals(this.getBulkPublishCompleteTime()) == false) return false; if (other.getBulkPublishStatus() == null ^ this.getBulkPublishStatus() == null) return false; if (other.getBulkPublishStatus() != null && other.getBulkPublishStatus().equals(this.getBulkPublishStatus()) == false) return false; if (other.getFailureMessage() == null ^ this.getFailureMessage() == null) return false; if (other.getFailureMessage() != null && other.getFailureMessage().equals(this.getFailureMessage()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getIdentityPoolId() == null) ? 0 : getIdentityPoolId().hashCode()); hashCode = prime * hashCode + ((getBulkPublishStartTime() == null) ? 0 : getBulkPublishStartTime().hashCode()); hashCode = prime * hashCode + ((getBulkPublishCompleteTime() == null) ? 0 : getBulkPublishCompleteTime().hashCode()); hashCode = prime * hashCode + ((getBulkPublishStatus() == null) ? 0 : getBulkPublishStatus().hashCode()); hashCode = prime * hashCode + ((getFailureMessage() == null) ? 0 : getFailureMessage().hashCode()); return hashCode; } @Override public GetBulkPublishDetailsResult clone() { try { return (GetBulkPublishDetailsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
alexmao86/swing-rcp
src/main/java/net/sf/anpr/rcp/MessageBus.java
170
package net.sf.anpr.rcp; public interface MessageBus { public void publish(Message message); public void subscribe(String subject, final Mountable mountable); }
apache-2.0
scorpionvicky/elasticsearch
server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java
20905
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.snapshots.blobstore; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.index.store.StoreFileMetadata; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.stream.IntStream; /** * Shard snapshot metadata */ public class BlobStoreIndexShardSnapshot implements ToXContentFragment { /** * Information about snapshotted file */ public static class FileInfo { private final String name; private final ByteSizeValue partSize; private final long partBytes; private final int numberOfParts; private final StoreFileMetadata metadata; /** * Constructs a new instance of file info * * @param name file name as stored in the blob store * @param metadata the files meta data * @param partSize size of the single chunk */ public FileInfo(String name, StoreFileMetadata metadata, ByteSizeValue partSize) { this.name = name; this.metadata = metadata; long partBytes = Long.MAX_VALUE; if (partSize != null && partSize.getBytes() > 0) { partBytes = partSize.getBytes(); } if (metadata.length() == 0) { numberOfParts = 1; } else { long longNumberOfParts = 1L + (metadata.length() - 1L) / partBytes; // ceil(len/partBytes), but beware of long overflow numberOfParts = (int)longNumberOfParts; if (numberOfParts != longNumberOfParts) { // also beware of int overflow, although 2^32 parts is already ludicrous throw new IllegalArgumentException("part size [" + partSize + "] too small for file [" + metadata + "]"); } } this.partSize = partSize; this.partBytes = partBytes; assert IntStream.range(0, numberOfParts).mapToLong(this::partBytes).sum() == metadata.length(); } /** * Returns the base file name * * @return file name */ public String name() { return name; } /** * Returns part name if file is stored as multiple parts * * @param part part number * @return part name */ public String partName(int part) { if (numberOfParts > 1) { return name + ".part" + part; } else { return name; } } /** * Returns base file name from part name * * @param blobName part name * @return base file name */ public static String canonicalName(String blobName) { if (blobName.contains(".part")) { return blobName.substring(0, blobName.indexOf(".part")); } return blobName; } /** * Returns original file name * * @return original file name */ public String physicalName() { return metadata.name(); } /** * File length * * @return file length */ public long length() { return metadata.length(); } /** * Returns part size * * @return part size */ public ByteSizeValue partSize() { return partSize; } /** * Returns the size (in bytes) of a given part * * @return the size (in bytes) of a given part */ public long partBytes(int part) { assert 0 <= part && part < numberOfParts : part + " vs " + numberOfParts; if (numberOfParts == 1) { return length(); } // First and last-but-one parts have a size equal to partBytes if (part < (numberOfParts - 1)) { return partBytes; } // Last part size is deducted from the length and the number of parts final long lastPartBytes = length() - (this.partBytes * (numberOfParts - 1)); assert 0 < lastPartBytes && lastPartBytes <= partBytes : lastPartBytes + " vs " + partBytes; return lastPartBytes; } /** * Returns number of parts * * @return number of parts */ public int numberOfParts() { return numberOfParts; } /** * Returns file md5 checksum provided by {@link org.elasticsearch.index.store.Store} * * @return file checksum */ public String checksum() { return metadata.checksum(); } /** * Returns the StoreFileMetadata for this file info. */ public StoreFileMetadata metadata() { return metadata; } /** * Checks if a file in a store is the same file * * @param md file in a store * @return true if file in a store this this file have the same checksum and length */ public boolean isSame(StoreFileMetadata md) { return metadata.isSame(md); } /** * Checks if a file in a store is the same file * * @param fileInfo file in a store * @return true if file in a store this this file have the same checksum and length */ public boolean isSame(FileInfo fileInfo) { if (numberOfParts != fileInfo.numberOfParts) { return false; } if (partBytes != fileInfo.partBytes) { return false; } if (!name.equals(fileInfo.name)) { return false; } if (partSize != null) { if (!partSize.equals(fileInfo.partSize)) { return false; } } else { if (fileInfo.partSize != null) { return false; } } return metadata.isSame(fileInfo.metadata); } static final String NAME = "name"; static final String PHYSICAL_NAME = "physical_name"; static final String LENGTH = "length"; static final String CHECKSUM = "checksum"; static final String PART_SIZE = "part_size"; static final String WRITTEN_BY = "written_by"; static final String META_HASH = "meta_hash"; /** * Serializes file info into JSON * * @param file file info * @param builder XContent builder */ public static void toXContent(FileInfo file, XContentBuilder builder) throws IOException { builder.startObject(); builder.field(NAME, file.name); builder.field(PHYSICAL_NAME, file.metadata.name()); builder.field(LENGTH, file.metadata.length()); builder.field(CHECKSUM, file.metadata.checksum()); if (file.partSize != null) { builder.field(PART_SIZE, file.partSize.getBytes()); } if (file.metadata.writtenBy() != null) { builder.field(WRITTEN_BY, file.metadata.writtenBy()); } if (file.metadata.hash() != null && file.metadata().hash().length > 0) { BytesRef br = file.metadata.hash(); builder.field(META_HASH, br.bytes, br.offset, br.length); } builder.endObject(); } /** * Parses JSON that represents file info * * @param parser parser * @return file info */ public static FileInfo fromXContent(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); String name = null; String physicalName = null; long length = -1; String checksum = null; ByteSizeValue partSize = null; Version writtenBy = null; String writtenByStr = null; BytesRef metaHash = new BytesRef(); if (token == XContentParser.Token.START_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { String currentFieldName = parser.currentName(); token = parser.nextToken(); if (token.isValue()) { if (NAME.equals(currentFieldName)) { name = parser.text(); } else if (PHYSICAL_NAME.equals(currentFieldName)) { physicalName = parser.text(); } else if (LENGTH.equals(currentFieldName)) { length = parser.longValue(); } else if (CHECKSUM.equals(currentFieldName)) { checksum = parser.text(); } else if (PART_SIZE.equals(currentFieldName)) { partSize = new ByteSizeValue(parser.longValue()); } else if (WRITTEN_BY.equals(currentFieldName)) { writtenByStr = parser.text(); writtenBy = Lucene.parseVersionLenient(writtenByStr, null); } else if (META_HASH.equals(currentFieldName)) { metaHash.bytes = parser.binaryValue(); metaHash.offset = 0; metaHash.length = metaHash.bytes.length; } else { throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName); } } else { throw new ElasticsearchParseException("unexpected token [{}]", token); } } else { throw new ElasticsearchParseException("unexpected token [{}]",token); } } } // Verify that file information is complete if (name == null || Strings.validFileName(name) == false) { throw new ElasticsearchParseException("missing or invalid file name [" + name + "]"); } else if (physicalName == null || Strings.validFileName(physicalName) == false) { throw new ElasticsearchParseException("missing or invalid physical file name [" + physicalName + "]"); } else if (length < 0) { throw new ElasticsearchParseException("missing or invalid file length"); } else if (writtenBy == null) { throw new ElasticsearchParseException("missing or invalid written_by [" + writtenByStr + "]"); } else if (checksum == null) { throw new ElasticsearchParseException("missing checksum for name [" + name + "]"); } return new FileInfo(name, new StoreFileMetadata(physicalName, length, checksum, writtenBy, metaHash), partSize); } @Override public String toString() { return "[name: " + name + ", numberOfParts: " + numberOfParts + ", partSize: " + partSize + ", partBytes: " + partBytes + ", metadata: " + metadata + "]"; } } /** * Snapshot name */ private final String snapshot; private final long indexVersion; private final long startTime; private final long time; private final int incrementalFileCount; private final long incrementalSize; private final List<FileInfo> indexFiles; /** * Constructs new shard snapshot metadata from snapshot metadata * * @param snapshot snapshot name * @param indexVersion index version * @param indexFiles list of files in the shard * @param startTime snapshot start time * @param time snapshot running time * @param incrementalFileCount incremental of files that were snapshotted * @param incrementalSize incremental size of snapshot */ public BlobStoreIndexShardSnapshot(String snapshot, long indexVersion, List<FileInfo> indexFiles, long startTime, long time, int incrementalFileCount, long incrementalSize ) { assert snapshot != null; assert indexVersion >= 0; this.snapshot = snapshot; this.indexVersion = indexVersion; this.indexFiles = List.copyOf(indexFiles); this.startTime = startTime; this.time = time; this.incrementalFileCount = incrementalFileCount; this.incrementalSize = incrementalSize; } /** * Returns snapshot name * * @return snapshot name */ public String snapshot() { return snapshot; } /** * Returns list of files in the shard * * @return list of files */ public List<FileInfo> indexFiles() { return indexFiles; } /** * Returns snapshot start time */ public long startTime() { return startTime; } /** * Returns snapshot running time */ public long time() { return time; } /** * Returns incremental of files that were snapshotted */ public int incrementalFileCount() { return incrementalFileCount; } /** * Returns total number of files that are referenced by this snapshot */ public int totalFileCount() { return indexFiles.size(); } /** * Returns incremental of files size that were snapshotted */ public long incrementalSize() { return incrementalSize; } /** * Returns total size of all files that where snapshotted */ public long totalSize() { return indexFiles.stream().mapToLong(fi -> fi.metadata().length()).sum(); } private static final String NAME = "name"; private static final String INDEX_VERSION = "index_version"; private static final String START_TIME = "start_time"; private static final String TIME = "time"; private static final String FILES = "files"; // for the sake of BWC keep the actual property names as in 6.x // + there is a constraint in #fromXContent() that leads to ElasticsearchParseException("unknown parameter [incremental_file_count]"); private static final String INCREMENTAL_FILE_COUNT = "number_of_files"; private static final String INCREMENTAL_SIZE = "total_size"; private static final ParseField PARSE_NAME = new ParseField(NAME); private static final ParseField PARSE_INDEX_VERSION = new ParseField(INDEX_VERSION, "index-version"); private static final ParseField PARSE_START_TIME = new ParseField(START_TIME); private static final ParseField PARSE_TIME = new ParseField(TIME); private static final ParseField PARSE_INCREMENTAL_FILE_COUNT = new ParseField(INCREMENTAL_FILE_COUNT); private static final ParseField PARSE_INCREMENTAL_SIZE = new ParseField(INCREMENTAL_SIZE); private static final ParseField PARSE_FILES = new ParseField(FILES); /** * Serializes shard snapshot metadata info into JSON * * @param builder XContent builder * @param params parameters */ @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(NAME, snapshot); builder.field(INDEX_VERSION, indexVersion); builder.field(START_TIME, startTime); builder.field(TIME, time); builder.field(INCREMENTAL_FILE_COUNT, incrementalFileCount); builder.field(INCREMENTAL_SIZE, incrementalSize); builder.startArray(FILES); for (FileInfo fileInfo : indexFiles) { FileInfo.toXContent(fileInfo, builder); } builder.endArray(); return builder; } /** * Parses shard snapshot metadata * * @param parser parser * @return shard snapshot metadata */ public static BlobStoreIndexShardSnapshot fromXContent(XContentParser parser) throws IOException { String snapshot = null; long indexVersion = -1; long startTime = 0; long time = 0; int incrementalFileCount = 0; long incrementalSize = 0; List<FileInfo> indexFiles = new ArrayList<>(); if (parser.currentToken() == null) { // fresh parser? move to the first token parser.nextToken(); } XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.START_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); final String currentFieldName = parser.currentName(); token = parser.nextToken(); if (token.isValue()) { if (PARSE_NAME.match(currentFieldName, parser.getDeprecationHandler())) { snapshot = parser.text(); } else if (PARSE_INDEX_VERSION.match(currentFieldName, parser.getDeprecationHandler())) { // The index-version is needed for backward compatibility with v 1.0 indexVersion = parser.longValue(); } else if (PARSE_START_TIME.match(currentFieldName, parser.getDeprecationHandler())) { startTime = parser.longValue(); } else if (PARSE_TIME.match(currentFieldName, parser.getDeprecationHandler())) { time = parser.longValue(); } else if (PARSE_INCREMENTAL_FILE_COUNT.match(currentFieldName, parser.getDeprecationHandler())) { incrementalFileCount = parser.intValue(); } else if (PARSE_INCREMENTAL_SIZE.match(currentFieldName, parser.getDeprecationHandler())) { incrementalSize = parser.longValue(); } else { throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName); } } else if (token == XContentParser.Token.START_ARRAY) { if (PARSE_FILES.match(currentFieldName, parser.getDeprecationHandler())) { while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { indexFiles.add(FileInfo.fromXContent(parser)); } } else { throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName); } } else { throw new ElasticsearchParseException("unexpected token [{}]", token); } } } return new BlobStoreIndexShardSnapshot(snapshot, indexVersion, Collections.unmodifiableList(indexFiles), startTime, time, incrementalFileCount, incrementalSize); } }
apache-2.0
guai/HikariCP
hikaricp-java6/src/test/java/ru/programpark/hikari/TestMetrics.java
4850
/* * Copyright (C) 2013, 2014 Brett Wooldridge * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ru.programpark.hikari; import com.codahale.metrics.*; import com.codahale.metrics.health.HealthCheck.Result; import com.codahale.metrics.health.HealthCheckRegistry; import org.junit.Assert; import org.junit.Test; import ru.programpark.hikari.util.UtilityElf; import java.sql.Connection; import java.sql.SQLException; import java.util.SortedMap; import java.util.concurrent.TimeUnit; /** * Test HikariCP/CodaHale metrics integration. * * @author Brett Wooldridge */ public class TestMetrics { @Test public void testMetricWait() throws SQLException { MetricRegistry metricRegistry = new MetricRegistry(); HikariConfig config = new HikariConfig(); config.setMinimumIdle(1); config.setMaximumPoolSize(1); config.setMetricRegistry(metricRegistry); config.setInitializationFailFast(false); config.setPoolName("test"); config.setDataSourceClassName("ru.programpark.hikari.mocks.StubDataSource"); HikariDataSource ds = new HikariDataSource(config); try { ds.getConnection().close(); Timer timer = metricRegistry.getTimers(new MetricFilter() { /** {@inheritDoc} */ @Override public boolean matches(String name, Metric metric) { return "test.pool.Wait".equals(MetricRegistry.name("test", "pool", "Wait")); } }).values().iterator().next(); Assert.assertEquals(1, timer.getCount()); Assert.assertTrue(timer.getMeanRate() > 0.0); } finally { ds.close(); } } @Test public void testMetricUsage() throws SQLException { MetricRegistry metricRegistry = new MetricRegistry(); HikariConfig config = new HikariConfig(); config.setMinimumIdle(1); config.setMaximumPoolSize(1); config.setMetricRegistry(metricRegistry); config.setInitializationFailFast(false); config.setPoolName("test"); config.setDataSourceClassName("ru.programpark.hikari.mocks.StubDataSource"); HikariDataSource ds = new HikariDataSource(config); try { Connection connection = ds.getConnection(); UtilityElf.quietlySleep(250L); connection.close(); Histogram histo = metricRegistry.getHistograms(new MetricFilter() { /** {@inheritDoc} */ @Override public boolean matches(String name, Metric metric) { return "test.pool.Usage".equals(MetricRegistry.name("test", "pool", "Usage")); } }).values().iterator().next(); Assert.assertEquals(1, histo.getCount()); double seventyFifth = histo.getSnapshot().get75thPercentile(); Assert.assertTrue("Seventy-fith percentile less than 250ms: " + seventyFifth, seventyFifth >= 250.0); } finally { ds.close(); } } @Test public void testHealthChecks() throws Exception { MetricRegistry metricRegistry = new MetricRegistry(); HealthCheckRegistry healthRegistry = new HealthCheckRegistry(); HikariConfig config = new HikariConfig(); config.setMaximumPoolSize(10); config.setMetricRegistry(metricRegistry); config.setHealthCheckRegistry(healthRegistry); config.setPoolName("test"); config.setDataSourceClassName("ru.programpark.hikari.mocks.StubDataSource"); config.addHealthCheckProperty("connectivityCheckTimeoutMs", "1000"); config.addHealthCheckProperty("expected99thPercentileMs", "10"); HikariDataSource ds = new HikariDataSource(config); try { UtilityElf.quietlySleep(TimeUnit.SECONDS.toMillis(2)); Connection connection = ds.getConnection(); connection.close(); connection = ds.getConnection(); connection.close(); SortedMap<String, Result> healthChecks = healthRegistry.runHealthChecks(); Result connectivityResult = healthChecks.get("test.pool.ConnectivityCheck"); Assert.assertTrue(connectivityResult.isHealthy()); Result slaResult = healthChecks.get("test.pool.Connection99Percent"); Assert.assertTrue(slaResult.isHealthy()); } finally { ds.close(); } } }
apache-2.0
googleads/googleads-java-lib
modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202202/Role.java
3932
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.admanager.jaxws.v202202; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * * Each {@code Role} provides a user with permissions to perform specific operations in the system. * * * <p>Java class for Role complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="Role"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="id" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="status" type="{https://www.google.com/apis/ads/publisher/v202202}RoleStatus" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "Role", propOrder = { "id", "name", "description", "status" }) public class Role { protected Long id; protected String name; protected String description; @XmlSchemaType(name = "string") protected RoleStatus status; /** * Gets the value of the id property. * * @return * possible object is * {@link Long } * */ public Long getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link Long } * */ public void setId(Long value) { this.id = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the description property. * * @return * possible object is * {@link String } * */ public String getDescription() { return description; } /** * Sets the value of the description property. * * @param value * allowed object is * {@link String } * */ public void setDescription(String value) { this.description = value; } /** * Gets the value of the status property. * * @return * possible object is * {@link RoleStatus } * */ public RoleStatus getStatus() { return status; } /** * Sets the value of the status property. * * @param value * allowed object is * {@link RoleStatus } * */ public void setStatus(RoleStatus value) { this.status = value; } }
apache-2.0
SowaLabs/OpenNLP
opennlp-tools/src/main/java/opennlp/tools/doccat/BagOfWordsFeatureGenerator.java
1758
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package opennlp.tools.doccat; import java.util.ArrayList; import java.util.Collection; import opennlp.tools.util.featuregen.StringPattern; /** * Generates a feature for each word in a document. */ public class BagOfWordsFeatureGenerator implements FeatureGenerator { private boolean useOnlyAllLetterTokens = false; public BagOfWordsFeatureGenerator() { } BagOfWordsFeatureGenerator(boolean useOnlyAllLetterTokens) { this.useOnlyAllLetterTokens = useOnlyAllLetterTokens; } public Collection<String> extractFeatures(String[] text) { Collection<String> bagOfWords = new ArrayList<String>(text.length); for (String word : text) { if (useOnlyAllLetterTokens) { StringPattern pattern = StringPattern.recognize(word); if (pattern.isAllLetter()) bagOfWords.add("bow=" + word); } else { bagOfWords.add("bow=" + word); } } return bagOfWords; } }
apache-2.0
LithidSoftware/android_Findex
src/com/lithidsw/findex/ef/DirectoryManager.java
3653
package com.lithidsw.findex.ef; import android.app.ActionBar; import android.app.Activity; import android.content.SharedPreferences; import android.os.Bundle; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.GridView; import android.widget.TextView; import com.lithidsw.findex.R; import com.lithidsw.findex.info.DirPickerInfo; import com.lithidsw.findex.utils.C; import java.util.ArrayList; public class DirectoryManager extends Activity { private SharedPreferences mPrefs; private GridView mGridView; private DirectoryAdapter mAdapter; private TextView mTextView; ArrayList<DirPickerInfo> mDirs = new ArrayList<DirPickerInfo>(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mPrefs = getSharedPreferences(C.PREF, MODE_PRIVATE); setTheme(getResources().getIdentifier( mPrefs.getString(C.PREF_THEME, C.DEFAULT_THEME), "style", C.THIS) ); setContentView(R.layout.directory_manage); mTextView = (TextView) findViewById(R.id.no_content_list); mGridView = (GridView) findViewById(R.id.dir_list); if (mPrefs.getBoolean(C.PREF_TOGGLE_GRID, false)) { mGridView.setNumColumns(getResources().getInteger(R.integer.grid_items)); } else { mGridView.setNumColumns(1); } mGridView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { removeDir(mDirs.get(i).dir); } }); mAdapter = new DirectoryAdapter(this, mDirs); mGridView.setAdapter(mAdapter); setupDirs(); setupActionBar(); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: finish(); return true; default: return super.onOptionsItemSelected(item); } } private void setupActionBar() { ActionBar actionBar = getActionBar(); if (actionBar != null) { actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setHomeButtonEnabled(true); } } private void update() { mAdapter.notifyDataSetChanged(); if (mDirs.size() > 0) { mGridView.setVisibility(View.VISIBLE); mTextView.setVisibility(View.GONE); } else { mGridView.setVisibility(View.GONE); mTextView.setVisibility(View.VISIBLE); } } private void setupDirs() { String[] folders = mPrefs.getString(C.PREF_EXCLUDE_FOLDERS, "").split("::"); mDirs.clear(); for (String string : folders) { if (string.length() > 0) { DirPickerInfo info = new DirPickerInfo(); info.dir = string; info.name = string; mDirs.add(info); } } update(); } private void removeDir(String dir) { String[] folders = mPrefs.getString(C.PREF_EXCLUDE_FOLDERS, "").split("::"); String directories = ""; for (String string : folders) { if (string.length() > 0) { if (!string.equals(dir)) { directories = string + "::"; } } } mPrefs.edit().putString(C.PREF_EXCLUDE_FOLDERS, directories).commit(); setupDirs(); } }
apache-2.0
peterhoeltschi/AzureStorage
microsoft-azure-storage-test/src/com/microsoft/azure/storage/ServicePropertiesTests.java
36936
/** * Copyright Microsoft Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.microsoft.azure.storage; import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; import java.util.List; import junit.framework.TestCase; import com.microsoft.azure.storage.blob.CloudBlobClient; import com.microsoft.azure.storage.core.SR; import com.microsoft.azure.storage.queue.CloudQueueClient; import com.microsoft.azure.storage.table.CloudTableClient; public class ServicePropertiesTests extends TestCase { /** * Test Analytics Disable Service Properties * * @throws StorageException * @throws InterruptedException */ public void testAnalyticsDisable() throws StorageException, InterruptedException { ServiceClient client = TestHelper.createCloudBlobClient(); ServiceProperties props = new ServiceProperties(); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); testAnalyticsDisable(client, props); client = TestHelper.createCloudQueueClient(); props = new ServiceProperties(); testAnalyticsDisable(client, props); client = TestHelper.createCloudTableClient(); props = new ServiceProperties(); testAnalyticsDisable(client, props); } private void testAnalyticsDisable(ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { props.getLogging().setLogOperationTypes(EnumSet.noneOf(LoggingOperations.class)); props.getLogging().setRetentionIntervalInDays(null); props.getLogging().setVersion("1.0"); props.getHourMetrics().setMetricsLevel(MetricsLevel.DISABLED); props.getHourMetrics().setRetentionIntervalInDays(null); props.getHourMetrics().setVersion("1.0"); props.getMinuteMetrics().setMetricsLevel(MetricsLevel.DISABLED); props.getMinuteMetrics().setRetentionIntervalInDays(null); props.getMinuteMetrics().setVersion("1.0"); props.getCors().getCorsRules().clear(); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); } /** * Test Analytics Default Service Version * * @throws StorageException * @throws InterruptedException */ public void testAnalyticsDefaultServiceVersion() throws StorageException, InterruptedException { ServiceClient client = TestHelper.createCloudBlobClient(); ServiceProperties props = new ServiceProperties(); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); testAnalyticsDefaultServiceVersion(client, props); client = TestHelper.createCloudQueueClient(); props = new ServiceProperties(); testAnalyticsDefaultServiceVersion(client, props); client = TestHelper.createCloudTableClient(); props = new ServiceProperties(); testAnalyticsDefaultServiceVersion(client, props); } private void testAnalyticsDefaultServiceVersion(ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { if (client.getClass().equals(CloudBlobClient.class)) { props.setDefaultServiceVersion("2009-09-19"); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); props.setDefaultServiceVersion("2011-08-18"); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); props.setDefaultServiceVersion("2012-02-12"); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); } else { try { props.setDefaultServiceVersion("2009-09-19"); callUploadServiceProps(client, props); fail("Should not be able to set default Service Version for non Blob Client"); } catch (IllegalArgumentException e) { assertEquals(e.getMessage(), SR.DEFAULT_SERVICE_VERSION_ONLY_SET_FOR_BLOB_SERVICE); } catch (Exception e) { fail(); } } } /** * Test Analytics Logging Operations * * @throws StorageException * @throws InterruptedException */ public void testAnalyticsLoggingOperations() throws StorageException, InterruptedException { ServiceClient client = TestHelper.createCloudBlobClient(); ServiceProperties props = new ServiceProperties(); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); testAnalyticsLoggingOperations(client, props); client = TestHelper.createCloudQueueClient(); props = new ServiceProperties(); testAnalyticsLoggingOperations(client, props); client = TestHelper.createCloudTableClient(); props = new ServiceProperties(); testAnalyticsLoggingOperations(client, props); } private void testAnalyticsLoggingOperations(ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { // None props.getLogging().setLogOperationTypes(EnumSet.noneOf(LoggingOperations.class)); props.getLogging().setRetentionIntervalInDays(null); props.getLogging().setVersion("1.0"); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); // None props.getLogging().setLogOperationTypes(EnumSet.allOf(LoggingOperations.class)); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); } /** * Test Analytics Hour Metrics Level * * @throws StorageException * @throws InterruptedException */ public void testAnalyticsMetricsLevel() throws StorageException, InterruptedException { ServiceClient client = TestHelper.createCloudBlobClient(); ServiceProperties props = new ServiceProperties(); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); testAnalyticsMetricsLevel(client, props); client = TestHelper.createCloudQueueClient(); props = new ServiceProperties(); testAnalyticsMetricsLevel(client, props); client = TestHelper.createCloudTableClient(); props = new ServiceProperties(); testAnalyticsMetricsLevel(client, props); } private void testAnalyticsMetricsLevel(ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { // None props.getHourMetrics().setMetricsLevel(MetricsLevel.DISABLED); props.getHourMetrics().setRetentionIntervalInDays(null); props.getHourMetrics().setVersion("1.0"); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); // Service props.getHourMetrics().setMetricsLevel(MetricsLevel.SERVICE); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); // ServiceAndAPI props.getHourMetrics().setMetricsLevel(MetricsLevel.SERVICE_AND_API); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); } /** * Test Analytics Minute Metrics Level * * @throws StorageException * @throws InterruptedException */ public void testAnalyticsMinuteMetricsLevel() throws StorageException, InterruptedException { ServiceClient client = TestHelper.createCloudBlobClient(); ServiceProperties props = new ServiceProperties(); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); testAnalyticsMinuteMetricsLevel(client, props); client = TestHelper.createCloudQueueClient(); props = new ServiceProperties(); testAnalyticsMinuteMetricsLevel(client, props); client = TestHelper.createCloudTableClient(); props = new ServiceProperties(); testAnalyticsMinuteMetricsLevel(client, props); } private void testAnalyticsMinuteMetricsLevel(ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { // None props.getMinuteMetrics().setMetricsLevel(MetricsLevel.DISABLED); props.getMinuteMetrics().setRetentionIntervalInDays(null); props.getMinuteMetrics().setVersion("1.0"); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); // Service props.getMinuteMetrics().setMetricsLevel(MetricsLevel.SERVICE); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); // ServiceAndAPI props.getMinuteMetrics().setMetricsLevel(MetricsLevel.SERVICE_AND_API); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); } /** * Test Analytics Retention Policies * * @throws StorageException * @throws InterruptedException */ public void testAnalyticsRetentionPolicies() throws StorageException, InterruptedException { ServiceClient client = TestHelper.createCloudBlobClient(); ServiceProperties props = new ServiceProperties(); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); testAnalyticsRetentionPolicies(client, props); client = TestHelper.createCloudQueueClient(); props = new ServiceProperties(); testAnalyticsRetentionPolicies(client, props); client = TestHelper.createCloudTableClient(); props = new ServiceProperties(); testAnalyticsRetentionPolicies(client, props); } private void testAnalyticsRetentionPolicies(ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { // Set retention policy null with metrics disabled. props.getHourMetrics().setMetricsLevel(MetricsLevel.DISABLED); props.getHourMetrics().setRetentionIntervalInDays(null); props.getMinuteMetrics().setMetricsLevel(MetricsLevel.DISABLED); props.getMinuteMetrics().setRetentionIntervalInDays(null); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); // Set retention policy not null with metrics enabled. props.getHourMetrics().setRetentionIntervalInDays(1); props.getHourMetrics().setMetricsLevel(MetricsLevel.SERVICE); props.getMinuteMetrics().setRetentionIntervalInDays(1); props.getMinuteMetrics().setMetricsLevel(MetricsLevel.SERVICE); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); // Set retention policy not null with metrics enabled. props.getHourMetrics().setRetentionIntervalInDays(2); props.getHourMetrics().setMetricsLevel(MetricsLevel.SERVICE_AND_API); props.getMinuteMetrics().setRetentionIntervalInDays(2); props.getMinuteMetrics().setMetricsLevel(MetricsLevel.SERVICE_AND_API); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); // Set retention policy null with logging disabled. props.getLogging().setRetentionIntervalInDays(null); props.getLogging().setLogOperationTypes(EnumSet.noneOf(LoggingOperations.class)); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); // Set retention policy not null with logging disabled. props.getLogging().setRetentionIntervalInDays(3); props.getLogging().setLogOperationTypes(EnumSet.noneOf(LoggingOperations.class)); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); // Set retention policy null with logging enabled. props.getLogging().setRetentionIntervalInDays(null); props.getLogging().setLogOperationTypes(EnumSet.allOf(LoggingOperations.class)); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); // Set retention policy not null with logging enabled. props.getLogging().setRetentionIntervalInDays(4); props.getLogging().setLogOperationTypes(EnumSet.allOf(LoggingOperations.class)); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); } /** * Test CORS with different rules. * * @throws StorageException * @throws InterruptedException */ public void testCloudValidCorsRules() throws StorageException, InterruptedException { ServiceClient client = TestHelper.createCloudBlobClient(); ServiceProperties props = new ServiceProperties(); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); testCloudValidCorsRules(client, props); client = TestHelper.createCloudQueueClient(); props = new ServiceProperties(); testCloudValidCorsRules(client, props); client = TestHelper.createCloudTableClient(); props = new ServiceProperties(); testCloudValidCorsRules(client, props); } private void testCloudValidCorsRules(ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { CorsRule ruleMinRequired = new CorsRule(); ruleMinRequired.getAllowedOrigins().add("www.xyz.com"); ruleMinRequired.getAllowedMethods().add(CorsHttpMethods.GET); final CorsRule ruleBasic = new CorsRule(); ruleBasic.getAllowedOrigins().addAll(Arrays.asList("www.ab.com", "www.bc.com")); ruleBasic.getAllowedMethods().addAll(EnumSet.of(CorsHttpMethods.GET, CorsHttpMethods.PUT)); ruleBasic.getAllowedHeaders().addAll( Arrays.asList("x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo")); ruleBasic.getExposedHeaders().addAll( Arrays.asList("x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd")); ruleBasic.setMaxAgeInSeconds(500); CorsRule ruleAllMethods = new CorsRule(); ruleAllMethods.getAllowedOrigins().addAll(Arrays.asList("www.ab.com", "www.bc.com")); ruleAllMethods.getAllowedMethods().addAll(EnumSet.allOf(CorsHttpMethods.class)); CorsRule ruleSingleExposedHeader = new CorsRule(); ruleSingleExposedHeader.getAllowedOrigins().add("www.ab.com"); ruleSingleExposedHeader.getAllowedMethods().add(CorsHttpMethods.GET); ruleSingleExposedHeader.getExposedHeaders().add("x-ms-meta-bcd"); CorsRule ruleSingleExposedPrefixHeader = new CorsRule(); ruleSingleExposedPrefixHeader.getAllowedOrigins().add("www.ab.com"); ruleSingleExposedPrefixHeader.getAllowedMethods().add(CorsHttpMethods.GET); ruleSingleExposedPrefixHeader.getExposedHeaders().add("x-ms-meta-data*"); CorsRule ruleSingleAllowedHeader = new CorsRule(); ruleSingleAllowedHeader.getAllowedOrigins().add("www.ab.com"); ruleSingleAllowedHeader.getAllowedMethods().add(CorsHttpMethods.GET); ruleSingleAllowedHeader.getAllowedHeaders().add("x-ms-meta-xyz"); CorsRule ruleSingleAllowedPrefixHeader = new CorsRule(); ruleSingleAllowedPrefixHeader.getAllowedOrigins().add("www.ab.com"); ruleSingleAllowedPrefixHeader.getAllowedMethods().add(CorsHttpMethods.GET); ruleSingleAllowedPrefixHeader.getAllowedHeaders().add("x-ms-meta-target*"); CorsRule ruleAllowAll = new CorsRule(); ruleAllowAll.getAllowedOrigins().add("*"); ruleAllowAll.getAllowedMethods().add(CorsHttpMethods.GET); ruleAllowAll.getAllowedHeaders().add("*"); ruleAllowAll.getExposedHeaders().add("*"); this.testCorsRules(ruleBasic, client, props); this.testCorsRules(ruleMinRequired, client, props); this.testCorsRules(ruleAllMethods, client, props); this.testCorsRules(ruleSingleExposedHeader, client, props); this.testCorsRules(ruleSingleExposedPrefixHeader, client, props); this.testCorsRules(ruleSingleAllowedHeader, client, props); this.testCorsRules(ruleSingleAllowedPrefixHeader, client, props); this.testCorsRules(ruleAllowAll, client, props); List<CorsRule> testList = new ArrayList<CorsRule>(); // Empty rule set should delete all rules this.testCorsRules(testList, client, props); // Test duplicate rules testList.add(ruleBasic); testList.add(ruleBasic); this.testCorsRules(testList, client, props); // Test max number of rules (five) testList.clear(); testList.add(ruleBasic); testList.add(ruleMinRequired); testList.add(ruleAllMethods); testList.add(ruleSingleExposedHeader); testList.add(ruleSingleExposedPrefixHeader); this.testCorsRules(testList, client, props); // Test max number of rules (six) testList.clear(); testList.add(ruleBasic); testList.add(ruleMinRequired); testList.add(ruleAllMethods); testList.add(ruleSingleExposedHeader); testList.add(ruleSingleExposedPrefixHeader); testList.add(ruleSingleAllowedHeader); try { this.testCorsRules(testList, client, props); fail("Expecting exception but no exception received. Services are limited to a maximum of five CORS rules."); } catch (StorageException e) { } catch (Exception e) { fail("Invalid exception " + e + " received when expecting StorageException"); } } /** * Test CORS with invalid values. */ public void testCorsExpectedExceptions() throws StorageException { ServiceClient client = TestHelper.createCloudBlobClient(); ServiceProperties props = new ServiceProperties(); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); testCorsExpectedExceptions(client, props); client = TestHelper.createCloudQueueClient(); props = new ServiceProperties(); testCorsExpectedExceptions(client, props); client = TestHelper.createCloudTableClient(); props = new ServiceProperties(); testCorsExpectedExceptions(client, props); } private void testCorsExpectedExceptions(ServiceClient client, ServiceProperties props) { CorsRule ruleEmpty = new CorsRule(); CorsRule ruleInvalidMaxAge = new CorsRule(); ruleInvalidMaxAge.getAllowedOrigins().add("www.xyz.com"); ruleInvalidMaxAge.getAllowedMethods().add(CorsHttpMethods.GET); ruleInvalidMaxAge.setMaxAgeInSeconds(-1); try { this.testCorsRules(ruleEmpty, client, props); fail("No exception received. A CORS rule must contain at least one allowed origin and allowed method."); } catch (StorageException e) { assertEquals(e.getCause().getClass(), IllegalArgumentException.class); assertEquals(e.getCause().getMessage(), "A CORS rule must contain at least one allowed origin and allowed method, and " + "MaxAgeInSeconds cannot have a value less than zero."); } catch (Exception e) { fail("Invalid exception " + e.getClass() + " received when expecting StorageException"); } try { this.testCorsRules(ruleInvalidMaxAge, client, props); fail("No exception received. MaxAgeInSeconds cannot have a value less than 0."); } catch (StorageException e) { assertEquals(e.getCause().getClass(), IllegalArgumentException.class); assertEquals(e.getCause().getMessage(), "A CORS rule must contain at least one allowed origin and allowed method, and " + "MaxAgeInSeconds cannot have a value less than zero."); } catch (Exception e) { fail("Invalid exception " + e + " received when expecting StorageException"); } } /** * Test CORS with a valid and invalid number of origin values sent to server. * * @throws StorageException * @throws InterruptedException */ public void testCorsMaxOrigins() throws StorageException, InterruptedException { ServiceClient client = TestHelper.createCloudBlobClient(); ServiceProperties props = new ServiceProperties(); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); testCorsMaxOrigins(client, props); client = TestHelper.createCloudQueueClient(); props = new ServiceProperties(); testCorsMaxOrigins(client, props); client = TestHelper.createCloudTableClient(); props = new ServiceProperties(); testCorsMaxOrigins(client, props); } private void testCorsMaxOrigins(ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { CorsRule ruleManyOrigins = new CorsRule(); ruleManyOrigins.getAllowedMethods().add(CorsHttpMethods.GET); // Add maximum number of allowed origins for (int i = 0; i < 64; i++) { ruleManyOrigins.getAllowedOrigins().add("www.xyz" + i + ".com"); } this.testCorsRules(ruleManyOrigins, client, props); ruleManyOrigins.getAllowedOrigins().add("www.xyz64.com"); try { this.testCorsRules(ruleManyOrigins, client, props); fail("No exception received. A maximum of 64 origins are allowed."); } catch (StorageException e) { } catch (Exception e) { fail("Invalid exception " + e + " received when expecting StorageException"); } } /** * Test CORS with a valid and invalid number of header values sent to server. * * @throws StorageException * @throws InterruptedException */ public void testCorsMaxHeaders() throws StorageException, InterruptedException { ServiceClient client = TestHelper.createCloudBlobClient(); ServiceProperties props = new ServiceProperties(); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); testCorsMaxHeaders(client, props); client = TestHelper.createCloudQueueClient(); props = new ServiceProperties(); testCorsMaxHeaders(client, props); client = TestHelper.createCloudTableClient(); props = new ServiceProperties(); testCorsMaxHeaders(client, props); } private void testCorsMaxHeaders(ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { CorsRule ruleManyHeaders = new CorsRule(); ruleManyHeaders.getAllowedOrigins().add("www.xyz.com"); ruleManyHeaders.getAllowedMethods().add(CorsHttpMethods.GET); ruleManyHeaders.getAllowedHeaders().addAll(Arrays.asList("x-ms-meta-target*", "x-ms-meta-other*")); ruleManyHeaders.getExposedHeaders().addAll(Arrays.asList("x-ms-meta-data*", "x-ms-meta-source*")); // Add maximum number of non-prefixed headers for (int i = 0; i < 64; i++) { ruleManyHeaders.getAllowedHeaders().add("x-ms-meta-" + i); ruleManyHeaders.getExposedHeaders().add("x-ms-meta-" + i); } this.testCorsRules(ruleManyHeaders, client, props); // Test with too many Exposed Headers (65) ruleManyHeaders.getExposedHeaders().add("x-ms-meta-toomany"); try { this.testCorsRules(ruleManyHeaders, client, props); fail("No exception received. A maximum of 64 exposed headers are allowed."); } catch (StorageException e) { } catch (Exception e) { fail("Invalid exception " + e + " received when expecting StorageException"); } ruleManyHeaders.getExposedHeaders().remove("x-ms-meta-toomany"); // Test with too many Allowed Headers (65) ruleManyHeaders.getAllowedHeaders().add("x-ms-meta-toomany"); try { this.testCorsRules(ruleManyHeaders, client, props); fail("No exception received. A maximum of 64 allowed headers are allowed."); } catch (StorageException e) { } catch (Exception e) { fail("Invalid exception " + e + " received when expecting StorageException"); } ruleManyHeaders.getExposedHeaders().remove("x-ms-meta-toomany"); // Test with too many Exposed Prefixed Headers (three) ruleManyHeaders.getExposedHeaders().add("x-ms-meta-toomany*"); try { this.testCorsRules(ruleManyHeaders, client, props); fail("No exception received. A maximum of 2 exposed headers are allowed."); } catch (StorageException e) { } catch (Exception e) { fail("Invalid exception " + e + " received when expecting StorageException"); } ruleManyHeaders.getExposedHeaders().remove("x-ms-meta-toomany*"); // Test with too many Allowed Prefixed Headers (three) ruleManyHeaders.getAllowedHeaders().add("x-ms-meta-toomany*"); try { this.testCorsRules(ruleManyHeaders, client, props); fail("No exception received. A maximum of 64 allowed headers are allowed."); } catch (StorageException e) { } catch (Exception e) { fail("Invalid exception " + e + " received when expecting StorageException"); } ruleManyHeaders.getExposedHeaders().remove("x-ms-meta-toomany*"); } /** * Test Optional Service Properties * * @throws StorageException * @throws InterruptedException */ public void testOptionalServiceProperties() throws StorageException, InterruptedException { ServiceClient client = TestHelper.createCloudBlobClient(); ServiceProperties props = new ServiceProperties(); props.setDefaultServiceVersion(Constants.HeaderConstants.TARGET_STORAGE_VERSION); testOptionalServiceProperties(client, props); client = TestHelper.createCloudQueueClient(); props = new ServiceProperties(); testOptionalServiceProperties(client, props); client = TestHelper.createCloudTableClient(); props = new ServiceProperties(); testOptionalServiceProperties(client, props); } private void testOptionalServiceProperties(ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { // None props.getLogging().setLogOperationTypes(EnumSet.of(LoggingOperations.READ, LoggingOperations.WRITE)); props.getLogging().setRetentionIntervalInDays(5); props.getLogging().setVersion("1.0"); // None props.getHourMetrics().setMetricsLevel(MetricsLevel.SERVICE); props.getHourMetrics().setRetentionIntervalInDays(6); props.getHourMetrics().setVersion("1.0"); // None props.getMinuteMetrics().setMetricsLevel(MetricsLevel.SERVICE); props.getMinuteMetrics().setRetentionIntervalInDays(6); props.getMinuteMetrics().setVersion("1.0"); props.getCors().getCorsRules().clear(); callUploadServiceProps(client, props); ServiceProperties newProps = new ServiceProperties(); newProps.setLogging(null); newProps.setHourMetrics(null); newProps.setMinuteMetrics(null); final CorsRule ruleBasic = new CorsRule(); ruleBasic.getAllowedOrigins().addAll(Arrays.asList("www.ab.com", "www.bc.com")); ruleBasic.getAllowedMethods().addAll(EnumSet.of(CorsHttpMethods.GET, CorsHttpMethods.PUT)); ruleBasic.getAllowedHeaders().addAll( Arrays.asList("x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo")); ruleBasic.getExposedHeaders().addAll( Arrays.asList("x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd")); ruleBasic.setMaxAgeInSeconds(500); newProps.getCors().getCorsRules().add(ruleBasic); callUploadServiceProps(client, newProps); props.setCors(newProps.getCors()); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); newProps.setLogging(props.getLogging()); newProps.setHourMetrics(props.getHourMetrics()); newProps.setMinuteMetrics(props.getMinuteMetrics()); newProps.setCors(null); callUploadServiceProps(client, newProps); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); } private void callUploadServiceProps(ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { if (client.getClass().equals(CloudBlobClient.class)) { CloudBlobClient blobClient = (CloudBlobClient) client; blobClient.uploadServiceProperties(props); Thread.sleep(30000); } else if (client.getClass().equals(CloudTableClient.class)) { CloudTableClient tableClient = (CloudTableClient) client; tableClient.uploadServiceProperties(props); Thread.sleep(30000); } else if (client.getClass().equals(CloudQueueClient.class)) { CloudQueueClient queueClient = (CloudQueueClient) client; queueClient.uploadServiceProperties(props); Thread.sleep(30000); } else { fail(); } } private ServiceProperties callDownloadServiceProperties(ServiceClient client) throws StorageException { if (client.getClass().equals(CloudBlobClient.class)) { CloudBlobClient blobClient = (CloudBlobClient) client; return blobClient.downloadServiceProperties(); } else if (client.getClass().equals(CloudTableClient.class)) { CloudTableClient tableClient = (CloudTableClient) client; return tableClient.downloadServiceProperties(); } else if (client.getClass().equals(CloudQueueClient.class)) { CloudQueueClient queueClient = (CloudQueueClient) client; return queueClient.downloadServiceProperties(); } else { fail(); } return null; } /** * Takes a CorsRule and tries to upload it. Then tries to download it and compares it to the initial CorsRule. */ private void testCorsRules(CorsRule rule, ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { props.getCors().getCorsRules().clear(); props.getCors().getCorsRules().add(rule); callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); } /** * Takes a List of CorsRules and tries to upload them. Then tries to download them and compares the list to the * initial CorsRule List. */ private void testCorsRules(List<CorsRule> corsRules, ServiceClient client, ServiceProperties props) throws StorageException, InterruptedException { props.getCors().getCorsRules().clear(); for (CorsRule rule : corsRules) { props.getCors().getCorsRules().add(rule); } callUploadServiceProps(client, props); assertServicePropertiesAreEqual(props, callDownloadServiceProperties(client)); } /** * Checks two ServiceProperties for equality */ private static void assertServicePropertiesAreEqual(ServiceProperties propsA, ServiceProperties propsB) { if (propsA.getLogging() != null && propsB.getLogging() != null) { assertTrue(propsA.getLogging().getLogOperationTypes().equals(propsB.getLogging().getLogOperationTypes())); assertEquals(propsA.getLogging().getRetentionIntervalInDays(), propsB.getLogging() .getRetentionIntervalInDays()); assertEquals(propsA.getLogging().getVersion(), propsB.getLogging().getVersion()); } else { assertNull(propsA.getLogging()); assertNull(propsB.getLogging()); } if (propsA.getHourMetrics() != null && propsB.getHourMetrics() != null) { assertTrue(propsA.getHourMetrics().getMetricsLevel().equals(propsB.getHourMetrics().getMetricsLevel())); assertEquals(propsA.getHourMetrics().getRetentionIntervalInDays(), propsB.getHourMetrics() .getRetentionIntervalInDays()); assertEquals(propsA.getHourMetrics().getVersion(), propsB.getHourMetrics().getVersion()); } else { assertNull(propsA.getHourMetrics()); assertNull(propsB.getHourMetrics()); } if (propsA.getMinuteMetrics() != null && propsB.getMinuteMetrics() != null) { assertTrue(propsA.getMinuteMetrics().getMetricsLevel().equals(propsB.getMinuteMetrics().getMetricsLevel())); assertEquals(propsA.getMinuteMetrics().getRetentionIntervalInDays(), propsB.getMinuteMetrics() .getRetentionIntervalInDays()); assertEquals(propsA.getMinuteMetrics().getVersion(), propsB.getMinuteMetrics().getVersion()); } else { assertNull(propsA.getMinuteMetrics()); assertNull(propsB.getMinuteMetrics()); } if (propsA.getDefaultServiceVersion() != null && propsB.getDefaultServiceVersion() != null) { assertEquals(propsA.getDefaultServiceVersion(), propsB.getDefaultServiceVersion()); } else { assertNull(propsA.getDefaultServiceVersion()); assertNull(propsB.getDefaultServiceVersion()); } if (propsA.getCors() != null && propsB.getCors() != null) { assertEquals(propsA.getCors().getCorsRules().size(), propsB.getCors().getCorsRules().size()); // Check that rules are equal and in the same order. for (int i = 0; i < propsA.getCors().getCorsRules().size(); i++) { CorsRule ruleA = propsA.getCors().getCorsRules().get(i); CorsRule ruleB = propsB.getCors().getCorsRules().get(i); assertTrue(ruleA.getAllowedOrigins().size() == ruleB.getAllowedOrigins().size() && ruleA.getAllowedOrigins().containsAll(ruleB.getAllowedOrigins())); assertTrue(ruleA.getExposedHeaders().size() == ruleB.getExposedHeaders().size() && ruleA.getExposedHeaders().containsAll(ruleB.getExposedHeaders())); assertTrue(ruleA.getAllowedHeaders().size() == ruleB.getAllowedHeaders().size() && ruleA.getAllowedHeaders().containsAll(ruleB.getAllowedHeaders())); assertTrue(ruleA.getAllowedMethods().equals(ruleB.getAllowedMethods())); assertTrue(ruleA.getMaxAgeInSeconds() == ruleB.getMaxAgeInSeconds()); } } else { assertNull(propsA.getCors()); assertNull(propsB.getCors()); } } }
apache-2.0
kettas/commons-dbutils
src/main/java/org/apache/commons/kettas/JDBCPaginRunner.java
34875
package org.apache.commons.kettas; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.ParameterMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.sql.DataSource; import org.apache.commons.dbutils.DbUtils; import org.apache.commons.dbutils.Pagin; import org.apache.commons.dbutils.ResultSetHandler; import org.apache.commons.dbutils.RowProcessor; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * JDBC分页扩展 可支持的数据库类型包括MSsql,MySql,Oracle * * @author 杨伦亮 上午1:13:40 */ public class JDBCPaginRunner { private static Long count = 0l; /** * Is {@link ParameterMetaData#getParameterType(int)} broken (have we tried * it yet)? */ private volatile boolean pmdKnownBroken = true; private static Log log=LogFactory.getLog(JDBCPaginRunner.class); /** * The DataSource to retrieve connections from. */ protected final DataSource ds; /** * Constructor for JDBCPaginRunner. */ public JDBCPaginRunner() { super(); ds = null; } /** * Constructor for JDBCPaginRunner, allows workaround for Oracle drivers * * @param pmdKnownBroken * Oracle drivers don't support * {@link ParameterMetaData#getParameterType(int) }; if * <code>pmdKnownBroken</code> is set to true, we won't even * try it; if false, we'll try it, and if it breaks, we'll * remember not to use it again. */ public JDBCPaginRunner(boolean pmdKnownBroken) { super(); this.pmdKnownBroken = pmdKnownBroken; ds = null; } /** * Constructor for JDBCPaginRunner, allows workaround for Oracle drivers. * Methods that do not take a <code>Connection</code> parameter will * retrieve connections from this <code>DataSource</code>. * * @param ds * The <code>DataSource</code> to retrieve connections from. */ public JDBCPaginRunner(DataSource ds) { super(); this.ds = ds; } /** * Constructor for JDBCPaginRunner, allows workaround for Oracle drivers. * Methods that do not take a <code>Connection</code> parameter will * retrieve connections from this <code>DataSource</code>. * * @param ds * The <code>DataSource</code> to retrieve connections from. * @param pmdKnownBroken * Oracle drivers don't support * {@link ParameterMetaData#getParameterType(int) }; if * <code>pmdKnownBroken</code> is set to true, we won't even * try it; if false, we'll try it, and if it breaks, we'll * remember not to use it again. */ public JDBCPaginRunner(DataSource ds, boolean pmdKnownBroken) { super(); this.pmdKnownBroken = pmdKnownBroken; this.ds = ds; } /** * Execute a batch of SQL INSERT, UPDATE, or DELETE queries. * * @param conn * The Connection to use to run the query. The caller is * responsible for closing this Connection. * @param sql * The SQL to execute. * @param params * An array of query replacement parameters. Each row in this * array is one set of batch replacement values. * @return The number of rows updated per statement. * @throws SQLException * if a database access error occurs * @since DbUtils 1.1 */ public int[] batch(Connection conn, String sql, Object[][] params) throws SQLException { PreparedStatement stmt = null; int[] rows = null; try { if(log.isDebugEnabled()){ log.debug(sql); } stmt = this.prepareStatement(conn, sql); for (int i = 0; i < params.length; i++) { this.fillStatement(stmt, params[i]); stmt.addBatch(); } rows = stmt.executeBatch(); } catch (SQLException e) { this.rethrow(e, sql, (Object[]) params); } finally { close(stmt); } return rows; } /** * Execute a batch of SQL INSERT, UPDATE, or DELETE queries. The * <code>Connection</code> is retrieved from the <code>DataSource</code> * set in the constructor. This <code>Connection</code> must be in * auto-commit mode or the update will not be saved. * * @param sql * The SQL to execute. * @param params * An array of query replacement parameters. Each row in this * array is one set of batch replacement values. * @return The number of rows updated per statement. * @throws SQLException * if a database access error occurs * @since DbUtils 1.1 */ public int[] batch(String sql, Object[][] params) throws SQLException { Connection conn = this.prepareConnection(); try { if(log.isDebugEnabled()){ log.debug(sql); } return this.batch(conn, sql, params); } finally { close(conn); } } /** * Fill the <code>PreparedStatement</code> replacement parameters with the * given objects. * * @param stmt * PreparedStatement to fill * @param params * Query replacement parameters; <code>null</code> is a valid * value to pass in. * @throws SQLException * if a database access error occurs */ public void fillStatement(PreparedStatement stmt, Object... params) throws SQLException { if (params == null) { return; } ParameterMetaData pmd = null; if (!pmdKnownBroken) { pmd = stmt.getParameterMetaData(); if (pmd.getParameterCount() < params.length) { throw new SQLException("Too many parameters: expected " + pmd.getParameterCount() + ", was given " + params.length); } } for (int i = 0; i < params.length; i++) { if (params[i] != null) { if (params[i] instanceof java.lang.String && params[i].toString().length() < 1) { int sqlType = Types.VARCHAR; if (!pmdKnownBroken) { try { sqlType = pmd.getParameterType(i + 1); } catch (SQLException e) { pmdKnownBroken = true; } } stmt.setNull(i + 1, sqlType); } else { stmt.setObject(i + 1, params[i]); } } else { // VARCHAR works with many drivers regardless // of the actual column type. Oddly, NULL and // OTHER don't work with Oracle's drivers. int sqlType = Types.VARCHAR; if (!pmdKnownBroken) { try { sqlType = pmd.getParameterType(i + 1); } catch (SQLException e) { pmdKnownBroken = true; } } stmt.setNull(i + 1, sqlType); } } } /** * Fill the <code>PreparedStatement</code> replacement parameters with the * given object's bean property values. * * @param stmt * PreparedStatement to fill * @param bean * a JavaBean object * @param properties * an ordered array of properties; this gives the order to insert * values in the statement * @throws SQLException * if a database access error occurs */ public void fillStatementWithBean(PreparedStatement stmt, Object bean, PropertyDescriptor[] properties) throws SQLException { Object[] params = new Object[properties.length]; for (int i = 0; i < properties.length; i++) { PropertyDescriptor property = properties[i]; Object value = null; Method method = property.getReadMethod(); if (method == null) { throw new RuntimeException("No read method for bean property " + bean.getClass() + " " + property.getName()); } try { value = method.invoke(bean, new Object[0]); } catch (InvocationTargetException e) { throw new RuntimeException("Couldn't invoke method: " + method, e); } catch (IllegalArgumentException e) { throw new RuntimeException( "Couldn't invoke method with 0 arguments: " + method, e); } catch (IllegalAccessException e) { throw new RuntimeException("Couldn't invoke method: " + method, e); } params[i] = value; } fillStatement(stmt, params); } /** * Fill the <code>PreparedStatement</code> replacement parameters with the * given object's bean property values. * * @param stmt * PreparedStatement to fill * @param bean * a JavaBean object * @param propertyNames * an ordered array of property names (these should match the * getters/setters); this gives the order to insert values in the * statement * @throws SQLException * if a database access error occurs */ public void fillStatementWithBean(PreparedStatement stmt, Object bean, String... propertyNames) throws SQLException { PropertyDescriptor[] descriptors; try { descriptors = Introspector.getBeanInfo(bean.getClass()) .getPropertyDescriptors(); } catch (IntrospectionException e) { throw new RuntimeException("Couldn't introspect bean " + bean.getClass().toString(), e); } PropertyDescriptor[] sorted = new PropertyDescriptor[propertyNames.length]; for (int i = 0; i < propertyNames.length; i++) { String propertyName = propertyNames[i]; if (propertyName == null) { throw new NullPointerException("propertyName can't be null: " + i); } boolean found = false; for (int j = 0; j < descriptors.length; j++) { PropertyDescriptor descriptor = descriptors[j]; if (propertyName.equals(descriptor.getName())) { sorted[i] = descriptor; found = true; break; } } if (!found) { throw new RuntimeException("Couldn't find bean property: " + bean.getClass() + " " + propertyName); } } fillStatementWithBean(stmt, bean, sorted); } /** * Returns the <code>DataSource</code> this runner is using. * <code>JDBCPaginRunner</code> methods always call this method to get the * <code>DataSource</code> so subclasses can provide specialized behavior. * * @return DataSource the runner is using */ public DataSource getDataSource() { return this.ds; } /** * Factory method that creates and initializes a * <code>PreparedStatement</code> object for the given SQL. * <code>JDBCPaginRunner</code> methods always call this method to prepare * statements for them. Subclasses can override this method to provide * special PreparedStatement configuration if needed. This implementation * simply calls <code>conn.prepareStatement(sql)</code>. * * @param conn * The <code>Connection</code> used to create the * <code>PreparedStatement</code> * @param sql * The SQL statement to prepare. * @return An initialized <code>PreparedStatement</code>. * @throws SQLException * if a database access error occurs */ protected PreparedStatement prepareStatement(Connection conn, String sql) throws SQLException { return conn.prepareStatement(sql); } /** * 提供优化查询接口-自定义查询方式 */ protected PreparedStatement prepareStatement(Connection conn, String sql,int resultSetType, int resultSetConcurrency) throws SQLException { return conn.prepareStatement(sql,resultSetType,resultSetConcurrency); } /** * Factory method that creates and initializes a <code>Connection</code> * object. <code>JDBCPaginRunner</code> methods always call this method to * retrieve connections from its DataSource. Subclasses can override this * method to provide special <code>Connection</code> configuration if * needed. This implementation simply calls <code>ds.getConnection()</code>. * * @return An initialized <code>Connection</code>. * @throws SQLException * if a database access error occurs * @since DbUtils 1.1 */ protected Connection prepareConnection() throws SQLException { if (this.getDataSource() == null) { throw new SQLException( "JDBCPaginRunner requires a DataSource to be " + "invoked in this way, or a Connection should be passed in"); } return this.getDataSource().getConnection(); } /** * Execute an SQL SELECT query with a single replacement parameter. The * caller is responsible for closing the connection. * * @param <T> * The type of object that the handler returns * @param conn * The connection to execute the query in. * @param sql * The query to execute. * @param param * The replacement parameter. * @param rsh * The handler that converts the results into an object. * @return The object returned by the handler. * @throws SQLException * if a database access error occurs * @deprecated Use * {@link #query(Connection, String, ResultSetHandler, Object...)} */ public <T> T query(Connection conn, String sql, Object param, ResultSetHandler<T> rsh) throws SQLException { return this.<T> query(conn, sql, rsh, new Object[] { param }); } /** * Execute an SQL SELECT query with replacement parameters. The caller is * responsible for closing the connection. * * @param <T> * The type of object that the handler returns * @param conn * The connection to execute the query in. * @param sql * The query to execute. * @param params * The replacement parameters. * @param rsh * The handler that converts the results into an object. * @return The object returned by the handler. * @throws SQLException * if a database access error occurs * @deprecated Use * {@link #query(Connection,String,ResultSetHandler,Object...)} * instead */ public <T> T query(Connection conn, String sql, Object[] params, ResultSetHandler<T> rsh) throws SQLException { return query(conn, sql, rsh, params); } /** * Execute an SQL SELECT query with replacement parameters. The caller is * responsible for closing the connection. * * @param <T> * The type of object that the handler returns * @param conn * The connection to execute the query in. * @param sql * The query to execute. * @param rsh * The handler that converts the results into an object. * @param params * The replacement parameters. * @return The object returned by the handler. * @throws SQLException * if a database access error occurs */ public <T> T query(Connection conn, String sql, ResultSetHandler<T> rsh, Object... params) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; T result = null; try { // if (dataBaseVersion == null) { SQLVersion.VersionName dataBaseVersion = SQLVersion.getVersionName(conn.getMetaData()); // } if(log.isDebugEnabled()){ log.debug(sql); } if(dataBaseVersion != SQLVersion.VersionName.Sqlite){ stmt = this.prepareStatement(conn,sql,ResultSet.TYPE_SCROLL_INSENSITIVE,ResultSet.CONCUR_READ_ONLY); }else{ stmt=this.prepareStatement(conn, sql); } this.fillStatement(stmt, params); rs = this.wrap(stmt.executeQuery()); result = rsh.handle(rs); } catch (SQLException e) { this.rethrow(e, sql, params); } finally { try { close(rs); } finally { close(stmt); } } return result; } /** * Execute an SQL SELECT query without any replacement parameters. The * caller is responsible for closing the connection. * * @param <T> * The type of object that the handler returns * @param conn * The connection to execute the query in. * @param sql * The query to execute. * @param rsh * The handler that converts the results into an object. * @return The object returned by the handler. * @throws SQLException * if a database access error occurs */ public <T> T query(Connection conn, String sql, ResultSetHandler<T> rsh) throws SQLException { return this.query(conn, sql, rsh, (Object[]) null); } /** * Executes the given SELECT SQL with a single replacement parameter. The * <code>Connection</code> is retrieved from the <code>DataSource</code> * set in the constructor. * * @param <T> * The type of object that the handler returns * @param sql * The SQL statement to execute. * @param param * The replacement parameter. * @param rsh * The handler used to create the result object from the * <code>ResultSet</code>. * * @return An object generated by the handler. * @throws SQLException * if a database access error occurs * @deprecated Use {@link #query(String, ResultSetHandler, Object...)} */ public <T> T query(String sql, Object param, ResultSetHandler<T> rsh) throws SQLException { return this.query(sql, rsh, new Object[] { param }); } /** * Executes the given SELECT SQL query and returns a result object. The * <code>Connection</code> is retrieved from the <code>DataSource</code> * set in the constructor. * * @param <T> * The type of object that the handler returns * @param sql * The SQL statement to execute. * @param params * Initialize the PreparedStatement's IN parameters with this * array. * * @param rsh * The handler used to create the result object from the * <code>ResultSet</code>. * * @return An object generated by the handler. * @throws SQLException * if a database access error occurs * @deprecated Use {@link #query(String, ResultSetHandler, Object...)} */ public <T> T query(String sql, Object[] params, ResultSetHandler<T> rsh) throws SQLException { return query(sql, rsh, params); } /** * Executes the given SELECT SQL query and returns a result object. The * <code>Connection</code> is retrieved from the <code>DataSource</code> * set in the constructor. * * @param <T> * The type of object that the handler returns * @param sql * The SQL statement to execute. * @param rsh * The handler used to create the result object from the * <code>ResultSet</code>. * @param params * Initialize the PreparedStatement's IN parameters with this * array. * @return An object generated by the handler. * @throws SQLException * if a database access error occurs */ public <T> T query(String sql, ResultSetHandler<T> rsh, Object... params) throws SQLException { Connection conn = this.prepareConnection(); try { return this.query(conn, sql, rsh, params); } finally { close(conn); } } public java.sql.ResultSet executeQuery(Connection connection,String sql,Object...obj) throws SQLException { java.sql.ResultSet rs=null; java.sql.PreparedStatement ps=null; try { if(log.isDebugEnabled()){ log.debug(sql); } ps=connection.prepareStatement(sql,ResultSet.TYPE_SCROLL_INSENSITIVE,ResultSet.CONCUR_READ_ONLY); for(int i=0;obj!=null&&i<obj.length;i++){ ps.setObject(i+1, obj[i]); } rs=ps.executeQuery(); javax.sql.rowset.CachedRowSet rowSet=new com.sun.rowset.CachedRowSetImpl(); rowSet.populate(rs); return rowSet; }catch (SQLException e) { e.printStackTrace(); this.rethrow(e, sql, obj); } catch (Exception e) { throw new SQLException("查询出错(" + e.getMessage() + ")!", e); }finally { try{ if(rs!=null){ close(rs); } }finally{ if(ps!=null){ close(ps); } } } return null; } /** * Executes the given SELECT SQL without any replacement parameters. The * <code>Connection</code> is retrieved from the <code>DataSource</code> * set in the constructor. * * @param <T> * The type of object that the handler returns * @param sql * The SQL statement to execute. * @param rsh * The handler used to create the result object from the * <code>ResultSet</code>. * * @return An object generated by the handler. * @throws SQLException * if a database access error occurs */ public <T> T query(String sql, ResultSetHandler<T> rsh) throws SQLException { return this.query(sql, rsh, (Object[]) null); } /** * Throws a new exception with a more informative error message. * * @param cause * The original exception that will be chained to the new * exception when it's rethrown. * * @param sql * The query that was executing when the exception happened. * * @param params * The query replacement parameters; <code>null</code> is a * valid value to pass in. * * @throws SQLException * if a database access error occurs */ protected void rethrow(SQLException cause, String sql, Object... params) throws SQLException { String causeMessage = cause.getMessage(); if (causeMessage == null) { causeMessage = ""; } StringBuilder msg = new StringBuilder(causeMessage); msg.append(" Query: "); msg.append(sql); if (params != null) { msg.append(" Parameters: "); msg.append(Arrays.deepToString(params)); } SQLException e = new SQLException(msg.toString(), cause.getSQLState(), cause.getErrorCode()); e.setNextException(cause); throw e; } /** * Execute an SQL INSERT, UPDATE, or DELETE query without replacement * parameters. * * @param conn * The connection to use to run the query. * @param sql * The SQL to execute. * @return The number of rows updated. * @throws SQLException * if a database access error occurs */ public int update(Connection conn, String sql) throws SQLException { return this.update(conn, sql, (Object[]) null); } /** * Execute an SQL INSERT, UPDATE, or DELETE query with a single replacement * parameter. * * @param conn * The connection to use to run the query. * @param sql * The SQL to execute. * @param param * The replacement parameter. * @return The number of rows updated. * @throws SQLException * if a database access error occurs */ public int update(Connection conn, String sql, Object param) throws SQLException { return this.update(conn, sql, new Object[] { param }); } /** * Execute an SQL INSERT, UPDATE, or DELETE query. * * @param conn * The connection to use to run the query. * @param sql * The SQL to execute. * @param params * The query replacement parameters. * @return The number of rows updated. * @throws SQLException * if a database access error occurs */ public int update(Connection conn, String sql, Object... params) throws SQLException { PreparedStatement stmt = null; int rows = 0; try { if(log.isDebugEnabled()){ log.debug(sql); } stmt = this.prepareStatement(conn, sql); this.fillStatement(stmt, params); rows = stmt.executeUpdate(); } catch (SQLException e) { this.rethrow(e, sql, params); } finally { close(stmt); } return rows; } /** * Executes the given INSERT, UPDATE, or DELETE SQL statement without any * replacement parameters. The <code>Connection</code> is retrieved from * the <code>DataSource</code> set in the constructor. This * <code>Connection</code> must be in auto-commit mode or the update will * not be saved. * * @param sql * The SQL statement to execute. * @throws SQLException * if a database access error occurs * @return The number of rows updated. */ public int update(String sql) throws SQLException { return this.update(sql, (Object[]) null); } /** * Executes the given INSERT, UPDATE, or DELETE SQL statement with a single * replacement parameter. The <code>Connection</code> is retrieved from * the <code>DataSource</code> set in the constructor. This * <code>Connection</code> must be in auto-commit mode or the update will * not be saved. * * @param sql * The SQL statement to execute. * @param param * The replacement parameter. * @throws SQLException * if a database access error occurs * @return The number of rows updated. */ public int update(String sql, Object param) throws SQLException { return this.update(sql, new Object[] { param }); } /** * Executes the given INSERT, UPDATE, or DELETE SQL statement. The * <code>Connection</code> is retrieved from the <code>DataSource</code> * set in the constructor. This <code>Connection</code> must be in * auto-commit mode or the update will not be saved. * * @param sql * The SQL statement to execute. * @param params * Initializes the PreparedStatement's IN (i.e. '?') parameters. * @throws SQLException * if a database access error occurs * @return The number of rows updated. */ public int update(String sql, Object... params) throws SQLException { Connection conn = this.prepareConnection(); try { return this.update(conn, sql, params); } finally { close(conn); } } /** * Wrap the <code>ResultSet</code> in a decorator before processing it. * This implementation returns the <code>ResultSet</code> it is given * without any decoration. * * <p> * Often, the implementation of this method can be done in an anonymous * inner class like this: * </p> * * <pre> * JDBCPaginRunner run = new JDBCPaginRunner() { * protected ResultSet wrap(ResultSet rs) { * return StringTrimmedResultSet.wrap(rs); * } * }; * </pre> * * @param rs * The <code>ResultSet</code> to decorate; never * <code>null</code>. * @return The <code>ResultSet</code> wrapped in some decorator. */ protected ResultSet wrap(ResultSet rs) { return rs; } /** * Close a <code>Connection</code>. This implementation avoids closing if * null and does <strong>not</strong> suppress any exceptions. Subclasses * can override to provide special handling like logging. * * @param conn * Connection to close * @throws SQLException * if a database access error occurs * @since DbUtils 1.1 */ protected void close(Connection conn) throws SQLException { DbUtils.close(conn); } /** * Close a <code>Statement</code>. This implementation avoids closing if * null and does <strong>not</strong> suppress any exceptions. Subclasses * can override to provide special handling like logging. * * @param stmt * Statement to close * @throws SQLException * if a database access error occurs * @since DbUtils 1.1 */ protected void close(Statement stmt) throws SQLException { DbUtils.close(stmt); } /** * Close a <code>ResultSet</code>. This implementation avoids closing if * null and does <strong>not</strong> suppress any exceptions. Subclasses * can override to provide special handling like logging. * * @param rs * ResultSet to close * @throws SQLException * if a database access error occurs * @since DbUtils 1.1 */ protected void close(ResultSet rs) throws SQLException { DbUtils.close(rs); } /** * Oracle 分页 * * @param <T> * @param conn * @param sql * @param params * @param rsh * @param start * @param end * @return * @throws SQLException */ public <T> T limitOracle(Connection conn, String sql, Object[] params, ResultSetHandler<T> rsh, int start, int end) throws SQLException { T result = null; ResultSet rs = null; PreparedStatement ps = null; try { sql = "select * from (select row_.*,rownum rownum_ from ( " + sql + ")row_ where rownum <= " + end + ") where rownum_ > " + start; if(log.isDebugEnabled()){ log.debug(sql); } ps = conn.prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, java.sql.ResultSet.CONCUR_READ_ONLY); if (params != null && params.length > 0) { this.fillStatement(ps, params); } rs = ps.executeQuery(); if (rs.next()) { rs.beforeFirst(); return rsh.handle(rs); } } finally { if (rs != null) { DbUtils.closeQuietly(rs); } if (ps != null) { DbUtils.closeQuietly(ps); } } return result; } public <T> T limitSqlLit(Connection conn, String sql, Object[] params, ResultSetHandler<T> rsh, int start, int end) throws SQLException { T result = null; ResultSet rs = null; PreparedStatement ps = null; try { sql = sql + " limit " + (start > 0 ? (start + "," + end) : end); if(log.isDebugEnabled()){ log.debug(sql); } ps = conn.prepareStatement(sql); if (params != null && params.length > 0) { this.fillStatement(ps, params); } rs = ps.executeQuery(); return rsh.handle(rs); } finally { if (rs != null) { DbUtils.closeQuietly(rs); } if (ps != null) { DbUtils.closeQuietly(ps); } } } /** * Mysql方式分页处理 * * @param <T> * @param conn * @param sql * @param params * @param rsh * @param start * @param end * @return * @throws SQLException */ public <T> T limitMysql(Connection conn, String sql, Object[] params, ResultSetHandler<T> rsh, int start, int end) throws SQLException { T result = null; ResultSet rs = null; PreparedStatement ps = null; try { sql = sql + " limit " + (start > 0 ? (start + "," + end) : end); if(log.isDebugEnabled()){ log.debug(sql); } ps = conn.prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE,java.sql.ResultSet.CONCUR_READ_ONLY); if (params != null && params.length > 0) { this.fillStatement(ps, params); } rs = ps.executeQuery(); if (rs.next()) { rs.beforeFirst(); return rsh.handle(rs); } } finally { if (rs != null) { DbUtils.closeQuietly(rs); } if (ps != null) { DbUtils.closeQuietly(ps); } } return result; } /** * Mysql方式分页处理 * * @param <T> * @param conn * @param sql * @param params * @param rsh * @param start * @param end * @return * @throws SQLException */ public <T> T limitMSsql(Connection conn, String sql, Object[] params, ResultSetHandler<T> rsh, int start, int end) throws SQLException { /* * * 2)、MSSQL: @sql limit @start,@end */ T result = null; ResultSet rs = null; PreparedStatement ps = null; try { sql = sql.replaceFirst("select", "select top " + end + " "); if(log.isDebugEnabled()){ log.debug(sql); } ps = conn.prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, java.sql.ResultSet.CONCUR_READ_ONLY); if (params != null && params.length > 0) { this.fillStatement(ps, params); } ps.setMaxRows(end); rs = ps.executeQuery(); if (rs.next()) { rs.absolute(start); result = rsh.handle(rs); } } finally { if (rs != null) { rs.close(); } if (ps != null) { ps.close(); } // if(conn!=null){ // conn.close(); // } } return result; } /** * 指针分页 * * @param <T> * @param conn * 数据库的连接 * @param sql * 执行的sql * @param params * 执行sql补充的参数 * @param rsh * 数据处理方法(接口) * @param start * 指针开始的位置 * @param count * 指针返回的最大记录数量(用此便可实现sqlserver 分页) * @throws Exception */ public <T> T limit(Connection conn, String sql, Object[] params, ResultSetHandler<T> rsh, int start, int count) throws SQLException { // if (dataBaseVersion == null) { SQLVersion.VersionName dataBaseVersion = SQLVersion.getVersionName(conn.getMetaData()); // } if (dataBaseVersion == SQLVersion.VersionName.SqlServer) {// 采用Sqlserver分页 jdts1.2.5+sql2005测试 start,count成功 return limitMSsql(conn, sql, params, rsh, start, start+count); } else if (dataBaseVersion == SQLVersion.VersionName.MySql) {// 采用Mysql方式分页 return limitMysql(conn, sql, params, rsh, start, count); } else if(dataBaseVersion == SQLVersion.VersionName.Sqlite){ return limitSqlLit(conn, sql, params, rsh, start, count); }else if (dataBaseVersion == SQLVersion.VersionName.Oracle) {// 采用Oracle方式分页 return limitOracle(conn, sql, params, rsh, start, start + count); } else { throw new SQLException("un support DataBase version to paginQuery!"); } } /** * 分页查询 * * @param dataSource * 数据源JNDI * @param countSql * 查询数据总记录的sql * @param queryAllSql * 查询当前页内容的sql * @param params * 查询时的参数 * @param rsh * 结果集 * @param processor * 行处理类 * @param pageNum * 当前页页码 * @param maxRow * 每页显示总数 * @return Pagin * @throws Exception */ public Pagin pagin(Connection connection, final String countSql, final String queryAllSql, final Object[] params, final ResultSetHandler rsh, final RowProcessor processor, final int pageNum, final int maxRow) throws SQLException { ResultSet rSet = null; Statement statement = null; PreparedStatement pStatement = null; try { int maxCount = 0; if(log.isDebugEnabled()){ log.debug(countSql); } // 不需要传参数 if (countSql.indexOf("?") == -1 || (params == null || params.length < 1)) { statement = connection.createStatement(); rSet = statement.executeQuery(countSql); rSet.next(); maxCount = rSet.getInt(1); rSet.close(); } else { pStatement = connection.prepareStatement(countSql, ResultSet.TYPE_SCROLL_INSENSITIVE, java.sql.ResultSet.CONCUR_READ_ONLY); this.fillStatement(pStatement, params); rSet = pStatement.executeQuery(); rSet.next(); maxCount = rSet.getInt(1); rSet.close(); } List list = (List) limit(connection, queryAllSql, params, rsh, (pageNum - 1) * maxRow, maxRow); return new Pagin(pageNum, maxCount, maxRow, list); } catch (SQLException e) { e.printStackTrace(); this.rethrow(e, countSql + "\n" + queryAllSql, params); } catch (Exception e) { throw new SQLException("分页出错(" + e.getMessage() + ")!", e); } finally { if (rSet != null) { try { rSet.close(); } catch (Exception x) { } } if (statement != null) { try { statement.close(); } catch (Exception x) { } } if (pStatement != null) { try { pStatement.close(); } catch (Exception x) { } } } return new Pagin(pageNum, maxRow, maxRow, new ArrayList(0)); } public static Long getCount() { return count; } }
apache-2.0
googleapis/java-dialogflow-cx
proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/DtmfInput.java
24423
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3/session.proto package com.google.cloud.dialogflow.cx.v3; /** * * * <pre> * Represents the input for dtmf event. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.DtmfInput} */ public final class DtmfInput extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.DtmfInput) DtmfInputOrBuilder { private static final long serialVersionUID = 0L; // Use DtmfInput.newBuilder() to construct. private DtmfInput(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DtmfInput() { digits_ = ""; finishDigit_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DtmfInput(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DtmfInput( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); digits_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); finishDigit_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.SessionProto .internal_static_google_cloud_dialogflow_cx_v3_DtmfInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.SessionProto .internal_static_google_cloud_dialogflow_cx_v3_DtmfInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.DtmfInput.class, com.google.cloud.dialogflow.cx.v3.DtmfInput.Builder.class); } public static final int DIGITS_FIELD_NUMBER = 1; private volatile java.lang.Object digits_; /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @return The digits. */ @java.lang.Override public java.lang.String getDigits() { java.lang.Object ref = digits_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); digits_ = s; return s; } } /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @return The bytes for digits. */ @java.lang.Override public com.google.protobuf.ByteString getDigitsBytes() { java.lang.Object ref = digits_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); digits_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FINISH_DIGIT_FIELD_NUMBER = 2; private volatile java.lang.Object finishDigit_; /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @return The finishDigit. */ @java.lang.Override public java.lang.String getFinishDigit() { java.lang.Object ref = finishDigit_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); finishDigit_ = s; return s; } } /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @return The bytes for finishDigit. */ @java.lang.Override public com.google.protobuf.ByteString getFinishDigitBytes() { java.lang.Object ref = finishDigit_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); finishDigit_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(digits_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, digits_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(finishDigit_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, finishDigit_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(digits_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, digits_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(finishDigit_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, finishDigit_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.DtmfInput)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3.DtmfInput other = (com.google.cloud.dialogflow.cx.v3.DtmfInput) obj; if (!getDigits().equals(other.getDigits())) return false; if (!getFinishDigit().equals(other.getFinishDigit())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DIGITS_FIELD_NUMBER; hash = (53 * hash) + getDigits().hashCode(); hash = (37 * hash) + FINISH_DIGIT_FIELD_NUMBER; hash = (53 * hash) + getFinishDigit().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dialogflow.cx.v3.DtmfInput prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents the input for dtmf event. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.DtmfInput} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.DtmfInput) com.google.cloud.dialogflow.cx.v3.DtmfInputOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.SessionProto .internal_static_google_cloud_dialogflow_cx_v3_DtmfInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.SessionProto .internal_static_google_cloud_dialogflow_cx_v3_DtmfInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.DtmfInput.class, com.google.cloud.dialogflow.cx.v3.DtmfInput.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3.DtmfInput.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); digits_ = ""; finishDigit_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3.SessionProto .internal_static_google_cloud_dialogflow_cx_v3_DtmfInput_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.DtmfInput getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3.DtmfInput.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.DtmfInput build() { com.google.cloud.dialogflow.cx.v3.DtmfInput result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.DtmfInput buildPartial() { com.google.cloud.dialogflow.cx.v3.DtmfInput result = new com.google.cloud.dialogflow.cx.v3.DtmfInput(this); result.digits_ = digits_; result.finishDigit_ = finishDigit_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3.DtmfInput) { return mergeFrom((com.google.cloud.dialogflow.cx.v3.DtmfInput) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.DtmfInput other) { if (other == com.google.cloud.dialogflow.cx.v3.DtmfInput.getDefaultInstance()) return this; if (!other.getDigits().isEmpty()) { digits_ = other.digits_; onChanged(); } if (!other.getFinishDigit().isEmpty()) { finishDigit_ = other.finishDigit_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dialogflow.cx.v3.DtmfInput parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dialogflow.cx.v3.DtmfInput) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object digits_ = ""; /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @return The digits. */ public java.lang.String getDigits() { java.lang.Object ref = digits_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); digits_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @return The bytes for digits. */ public com.google.protobuf.ByteString getDigitsBytes() { java.lang.Object ref = digits_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); digits_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @param value The digits to set. * @return This builder for chaining. */ public Builder setDigits(java.lang.String value) { if (value == null) { throw new NullPointerException(); } digits_ = value; onChanged(); return this; } /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @return This builder for chaining. */ public Builder clearDigits() { digits_ = getDefaultInstance().getDigits(); onChanged(); return this; } /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @param value The bytes for digits to set. * @return This builder for chaining. */ public Builder setDigitsBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); digits_ = value; onChanged(); return this; } private java.lang.Object finishDigit_ = ""; /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @return The finishDigit. */ public java.lang.String getFinishDigit() { java.lang.Object ref = finishDigit_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); finishDigit_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @return The bytes for finishDigit. */ public com.google.protobuf.ByteString getFinishDigitBytes() { java.lang.Object ref = finishDigit_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); finishDigit_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @param value The finishDigit to set. * @return This builder for chaining. */ public Builder setFinishDigit(java.lang.String value) { if (value == null) { throw new NullPointerException(); } finishDigit_ = value; onChanged(); return this; } /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @return This builder for chaining. */ public Builder clearFinishDigit() { finishDigit_ = getDefaultInstance().getFinishDigit(); onChanged(); return this; } /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @param value The bytes for finishDigit to set. * @return This builder for chaining. */ public Builder setFinishDigitBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); finishDigit_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.DtmfInput) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.DtmfInput) private static final com.google.cloud.dialogflow.cx.v3.DtmfInput DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.DtmfInput(); } public static com.google.cloud.dialogflow.cx.v3.DtmfInput getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DtmfInput> PARSER = new com.google.protobuf.AbstractParser<DtmfInput>() { @java.lang.Override public DtmfInput parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DtmfInput(input, extensionRegistry); } }; public static com.google.protobuf.Parser<DtmfInput> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DtmfInput> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.DtmfInput getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache-2.0
MegatronKing/SVG-Android
svg-generator/src/main/java/com/github/megatronking/svg/generator/svg/parser/element/SymbolElementParser.java
1205
/* * Copyright (C) 2017, Megatron King * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.github.megatronking.svg.generator.svg.parser.element; import com.github.megatronking.svg.generator.svg.model.Symbol; import com.github.megatronking.svg.generator.svg.parser.SvgGroupNodeAbstractElementParser; import com.github.megatronking.svg.generator.svg.parser.attribute.SymbolAttributeParser; /** * Parse the svg's symbol element. * * @author Megatron King * @since 2016/11/23 10:46 */ public class SymbolElementParser extends SvgGroupNodeAbstractElementParser<Symbol> { public SymbolElementParser() { super(new SymbolAttributeParser()); } }
apache-2.0
Jackygq1982/hbase_src
hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringFixedLength.java
1867
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; /** * An {@code DataType} that encodes fixed-length values encoded using * {@link Bytes#toBytes(String)}. Intended to make it easier to transition * away from direct use of {@link Bytes}. * @see Bytes#toBytes(String) * @see Bytes#toString(byte[], int, int) * @see RawString */ @InterfaceAudience.Public @InterfaceStability.Evolving public class RawStringFixedLength extends FixedLengthWrapper<String> { /** * Create a {@code RawStringFixedLength} using the specified * {@code order} and {@code length}. */ public RawStringFixedLength(Order order, int length) { super(new RawString(order), length); } /** * Create a {@code RawStringFixedLength} of the specified {@code length}. */ public RawStringFixedLength(int length) { super(new RawString(), length); } }
apache-2.0
OpenTOSCA/container
org.opentosca.bus/org.opentosca.bus.management.api.soaphttp/src/main/java/org/opentosca/bus/management/api/soaphttp/model/ReceiveNotifyPartners.java
5060
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference // Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2018.07.16 at 01:55:00 PM CEST // package org.opentosca.bus.management.api.soaphttp.model; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * Java class for ReceiveNotifyPartners complex type. */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "receiveNotifyPartners", propOrder = {"planCorrelation", "planChorCorrelation", "csarID", "serviceTemplateIDNamespaceURI", "serviceTemplateIDLocalPart", "messageID", "params", "doc"}) public class ReceiveNotifyPartners { @XmlElement(name = "PlanCorrelationID") protected String planCorrelation; @XmlElement(name = "PlanChorCorrelationID") protected String planChorCorrelation; @XmlElement(name = "CsarID", required = true) protected String csarID; @XmlElement(name = "ServiceTemplateIDNamespaceURI", required = true) protected String serviceTemplateIDNamespaceURI; @XmlElement(name = "ServiceTemplateIDLocalPart", required = true) protected String serviceTemplateIDLocalPart; @XmlElement(name = "MessageID", required = true) protected String messageID; @XmlElement(name = "Params") protected ParamsMap params; @XmlElement(name = "Doc") protected Doc doc; /** * Gets the value of the PlanCorrelationID property. * * @return possible object is {@link String } */ public String getPlanCorrelationID() { return this.planCorrelation; } /** * Sets the value of the PlanCorrelationID property. * * @param value allowed object is {@link String } */ public void setPlanCorrelationID(final String value) { this.planCorrelation = value; } /** * Gets the value of the csarID property. * * @return possible object is {@link String } */ public String getCsarID() { return this.csarID; } /** * Sets the value of the csarID property. * * @param value allowed object is {@link String } */ public void setCsarID(final String value) { this.csarID = value; } /** * Gets the value of the serviceTemplateIDNamespaceURI property. * * @return possible object is {@link String } */ public String getServiceTemplateIDNamespaceURI() { return this.serviceTemplateIDNamespaceURI; } /** * Sets the value of the serviceTemplateIDNamespaceURI property. * * @param value allowed object is {@link String } */ public void setServiceTemplateIDNamespaceURI(final String value) { this.serviceTemplateIDNamespaceURI = value; } /** * Gets the value of the serviceTemplateIDLocalPart property. * * @return possible object is {@link String } */ public String getServiceTemplateIDLocalPart() { return this.serviceTemplateIDLocalPart; } /** * Sets the value of the serviceTemplateIDLocalPart property. * * @param value allowed object is {@link String } */ public void setServiceTemplateIDLocalPart(final String value) { this.serviceTemplateIDLocalPart = value; } /** * Gets the value of the messageID property. * * @return possible object is {@link String } */ public String getMessageID() { return this.messageID; } /** * Sets the value of the messageID property. * * @param value allowed object is {@link String } */ public void setMessageID(final String value) { this.messageID = value; } /** * Gets the value of the params property. * * @return possible object is {@link ParamsMap } */ public ParamsMap getParams() { return this.params; } /** * Sets the value of the params property. * * @param value allowed object is {@link ParamsMap } */ public void setParams(final ParamsMap value) { this.params = value; } /** * Gets the value of the doc property. * * @return possible object is {@link Doc } */ public Doc getDoc() { return this.doc; } /** * Sets the value of the doc property. * * @param value allowed object is {@link Doc } */ public void setDoc(final Doc value) { this.doc = value; } /** * @return the planChorCorrelation */ public String getPlanChorCorrelation() { return planChorCorrelation; } /** * @param planChorCorrelation the planChorCorrelation to set */ public void setPlanChorCorrelation(String planChorCorrelation) { this.planChorCorrelation = planChorCorrelation; } }
apache-2.0
barnyard/pi
freepastry/src/rice/pastry/direct/DirectPastryNode.java
3714
/******************************************************************************* "FreePastry" Peer-to-Peer Application Development Substrate Copyright 2002-2007, Rice University. Copyright 2006-2007, Max Planck Institute for Software Systems. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Rice University (RICE), Max Planck Institute for Software Systems (MPI-SWS) nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. This software is provided by RICE, MPI-SWS and the contributors on an "as is" basis, without any representations or warranties of any kind, express or implied including, but not limited to, representations or warranties of non-infringement, merchantability or fitness for a particular purpose. In no event shall RICE, MPI-SWS or contributors be liable for any direct, indirect, incidental, special, exemplary, or consequential damages (including, but not limited to, procurement of substitute goods or services; loss of use, data, or profits; or business interruption) however caused and on any theory of liability, whether in contract, strict liability, or tort (including negligence or otherwise) arising in any way out of the use of this software, even if advised of the possibility of such damage. *******************************************************************************/ package rice.pastry.direct; import java.util.Hashtable; import java.util.Map; import org.mpisws.p2p.transport.SocketRequestHandle; import rice.environment.Environment; import rice.environment.logging.Logger; import rice.p2p.commonapi.appsocket.AppSocketReceiver; import rice.p2p.commonapi.rawserialization.InputBuffer; import rice.pastry.Id; import rice.pastry.NodeHandle; import rice.pastry.PastryNode; import rice.pastry.ReadyStrategy; import rice.pastry.ScheduledMessage; import rice.pastry.client.PastryAppl; import rice.pastry.join.InitiateJoin; import rice.pastry.messaging.Message; import rice.pastry.routing.RouteMessage; import rice.pastry.transport.PMessageNotification; import rice.pastry.transport.PMessageReceipt; import rice.selector.SelectorManager; import rice.selector.Timer; /** * Direct pastry node. Subclasses PastryNode, and does about nothing else. * * @version $Id: DirectPastryNode.java 4221 2008-05-19 16:41:19Z jeffh $ * * @author Sitaram Iyer */ public class DirectPastryNode { /** * Used for proximity calculation of DirectNodeHandle. This will probably go * away when we switch to a byte-level protocol. */ static private Hashtable<Thread, PastryNode> currentNode = new Hashtable<Thread, PastryNode>(); /** * Returns the previous one. * * @param dnh * @return */ public static synchronized PastryNode setCurrentNode(PastryNode dpn) { Thread current = Thread.currentThread(); PastryNode ret = currentNode.get(current); if (dpn == null) { currentNode.remove(current); } else { currentNode.put(current, dpn); } return ret; } public static synchronized PastryNode getCurrentNode() { Thread current = Thread.currentThread(); PastryNode ret = currentNode.get(current); return ret; } }
apache-2.0
javers/javers
javers-persistence-sql/src/main/java/org/javers/repository/sql/DialectName.java
551
package org.javers.repository.sql; import org.polyjdbc.core.dialect.Dialect; import org.polyjdbc.core.dialect.DialectRegistry; /** * Proper JDBC driver .jar should be provided on the classpath * * @author bartosz walacik */ public enum DialectName { H2, POSTGRES, ORACLE, MYSQL, /** Microsoft SQL Server*/ MSSQL; public DialectRegistry getPolyDialectName() { return DialectRegistry.valueOf(this.name()); } public Dialect getPolyDialect() { return getPolyDialectName().getDialect(); } }
apache-2.0
nms-htc/soccer-data
src/main/java/com/nms/football/model/Team.java
2684
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.nms.football.model; /** * Contain data of TEAM table. * * @author Cuong */ public class Team { private int id; private String code; private String name; private String country; private String stadium; private String homeLinkUrl; private String wikiLink; public Team() { } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getCountry() { return country; } public void setCountry(String country) { this.country = country; } public String getStadium() { return stadium; } public void setStadium(String stadium) { this.stadium = stadium; } public String getHomeLinkUrl() { return homeLinkUrl; } public void setHomeLinkUrl(String homeLinkUrl) { this.homeLinkUrl = homeLinkUrl; } public String getWikiLink() { return wikiLink; } public void setWikiLink(String wikiLink) { this.wikiLink = wikiLink; } @Override public int hashCode() { int hash = 0; if (id != 0) { hash += id; } if (code != null) { hash += code.hashCode(); } if (name != null) { hash += name.hashCode(); } if (country != null) { hash += country.hashCode(); } return hash; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Team other = (Team) obj; if (this.id != other.id) { return false; } if ((this.code == null) ? (other.code != null) : !this.code.equals(other.code)) { return false; } if ((this.name == null) ? (other.name != null) : !this.name.equals(other.name)) { return false; } return !((this.country == null) ? (other.country != null) : !this.country.equals(other.country)); } }
apache-2.0
mans2singh/nifi
nifi-nar-bundles/nifi-atlas-bundle/nifi-atlas-reporting-task/src/main/java/org/apache/nifi/atlas/reporting/ReportLineageToAtlas.java
44609
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.atlas.reporting; import static org.apache.commons.lang3.StringUtils.isEmpty; import static org.apache.nifi.reporting.util.provenance.ProvenanceEventConsumer.PROVENANCE_BATCH_SIZE; import static org.apache.nifi.reporting.util.provenance.ProvenanceEventConsumer.PROVENANCE_START_POSITION; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.time.Instant; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.ServiceLoader; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.stream.Stream; import org.apache.atlas.ApplicationProperties; import org.apache.atlas.AtlasServiceException; import org.apache.commons.lang3.StringUtils; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.config.SslConfigs; import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.RequiresInstanceClassLoading; import org.apache.nifi.annotation.behavior.Stateful; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.annotation.lifecycle.OnStopped; import org.apache.nifi.annotation.lifecycle.OnUnscheduled; import org.apache.nifi.atlas.hook.NiFiAtlasHook; import org.apache.nifi.atlas.NiFiAtlasClient; import org.apache.nifi.atlas.NiFiFlow; import org.apache.nifi.atlas.NiFiFlowAnalyzer; import org.apache.nifi.atlas.provenance.AnalysisContext; import org.apache.nifi.atlas.provenance.StandardAnalysisContext; import org.apache.nifi.atlas.provenance.lineage.CompleteFlowPathLineage; import org.apache.nifi.atlas.provenance.lineage.LineageStrategy; import org.apache.nifi.atlas.provenance.lineage.SimpleFlowPathLineage; import org.apache.nifi.atlas.resolver.ClusterResolver; import org.apache.nifi.atlas.resolver.ClusterResolvers; import org.apache.nifi.atlas.resolver.RegexClusterResolver; import org.apache.nifi.atlas.security.AtlasAuthN; import org.apache.nifi.atlas.security.Basic; import org.apache.nifi.atlas.security.Kerberos; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.PropertyValue; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.state.Scope; import org.apache.nifi.context.PropertyContext; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.controller.status.ProcessGroupStatus; import org.apache.nifi.kerberos.KerberosCredentialsService; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.provenance.ProvenanceEventRecord; import org.apache.nifi.provenance.ProvenanceRepository; import org.apache.nifi.reporting.AbstractReportingTask; import org.apache.nifi.reporting.EventAccess; import org.apache.nifi.reporting.ReportingContext; import org.apache.nifi.reporting.util.provenance.ProvenanceEventConsumer; import org.apache.nifi.ssl.SSLContextService; import com.sun.jersey.api.client.ClientResponse; @Tags({"atlas", "lineage"}) @CapabilityDescription("Report NiFi flow data set level lineage to Apache Atlas." + " End-to-end lineages across NiFi environments and other systems can be reported if those are" + " connected by different protocols and data set, such as NiFi Site-to-Site, Kafka topic or Hive tables ... etc." + " Atlas lineage reported by this reporting task can be useful to grasp the high level relationships between processes and data sets," + " in addition to NiFi provenance events providing detailed event level lineage." + " See 'Additional Details' for further description and limitations.") @Stateful(scopes = Scope.LOCAL, description = "Stores the Reporting Task's last event Id so that on restart the task knows where it left off.") @DynamicProperty(name = "hostnamePattern.<ClusterName>", value = "hostname Regex patterns", description = RegexClusterResolver.PATTERN_PROPERTY_PREFIX_DESC, expressionLanguageScope = ExpressionLanguageScope.VARIABLE_REGISTRY) // In order for each reporting task instance to have its own static objects such as KafkaNotification. @RequiresInstanceClassLoading public class ReportLineageToAtlas extends AbstractReportingTask { static final PropertyDescriptor ATLAS_URLS = new PropertyDescriptor.Builder() .name("atlas-urls") .displayName("Atlas URLs") .description("Comma separated URL of Atlas Servers" + " (e.g. http://atlas-server-hostname:21000 or https://atlas-server-hostname:21443)." + " For accessing Atlas behind Knox gateway, specify Knox gateway URL" + " (e.g. https://knox-hostname:8443/gateway/{topology-name}/atlas).") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .build(); public static final PropertyDescriptor ATLAS_CONNECT_TIMEOUT = new PropertyDescriptor.Builder() .name("atlas-connect-timeout") .displayName("Atlas Connect Timeout") .description("Max wait time for connection to Atlas.") .required(true) .defaultValue("60 sec") .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .build(); public static final PropertyDescriptor ATLAS_READ_TIMEOUT = new PropertyDescriptor.Builder() .name("atlas-read-timeout") .displayName("Atlas Read Timeout") .description("Max wait time for response from Atlas.") .required(true) .defaultValue("60 sec") .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .build(); static final AllowableValue ATLAS_AUTHN_BASIC = new AllowableValue("basic", "Basic", "Use username and password."); static final AllowableValue ATLAS_AUTHN_KERBEROS = new AllowableValue("kerberos", "Kerberos", "Use Kerberos keytab file."); static final PropertyDescriptor ATLAS_AUTHN_METHOD = new PropertyDescriptor.Builder() .name("atlas-authentication-method") .displayName("Atlas Authentication Method") .description("Specify how to authenticate this reporting task to Atlas server.") .required(true) .allowableValues(ATLAS_AUTHN_BASIC, ATLAS_AUTHN_KERBEROS) .defaultValue(ATLAS_AUTHN_BASIC.getValue()) .build(); public static final PropertyDescriptor ATLAS_USER = new PropertyDescriptor.Builder() .name("atlas-username") .displayName("Atlas Username") .description("User name to communicate with Atlas.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .build(); public static final PropertyDescriptor ATLAS_PASSWORD = new PropertyDescriptor.Builder() .name("atlas-password") .displayName("Atlas Password") .description("Password to communicate with Atlas.") .required(false) .sensitive(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .build(); static final PropertyDescriptor ATLAS_CONF_DIR = new PropertyDescriptor.Builder() .name("atlas-conf-dir") .displayName("Atlas Configuration Directory") .description("Directory path that contains 'atlas-application.properties' file." + " If not specified and 'Create Atlas Configuration File' is disabled," + " then, 'atlas-application.properties' file under root classpath is used.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .build(); public static final PropertyDescriptor ATLAS_NIFI_URL = new PropertyDescriptor.Builder() .name("atlas-nifi-url") .displayName("NiFi URL for Atlas") .description("NiFi URL is used in Atlas to represent this NiFi cluster (or standalone instance)." + " It is recommended to use one that can be accessible remotely instead of using 'localhost'.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.URL_VALIDATOR) .build(); public static final PropertyDescriptor ATLAS_DEFAULT_CLUSTER_NAME = new PropertyDescriptor.Builder() .name("atlas-default-cluster-name") .displayName("Atlas Default Cluster Name") .description("Cluster name for Atlas entities reported by this ReportingTask." + " If not specified, 'atlas.cluster.name' in Atlas Configuration File is used." + " Cluster name mappings can be configured by user defined properties." + " See additional detail for detail.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .build(); static final PropertyDescriptor ATLAS_CONF_CREATE = new PropertyDescriptor.Builder() .name("atlas-conf-create") .displayName("Create Atlas Configuration File") .description("If enabled, 'atlas-application.properties' file will be created in 'Atlas Configuration Directory'" + " automatically when this Reporting Task starts." + " Note that the existing configuration file will be overwritten.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .allowableValues("true", "false") .defaultValue("false") .build(); static final PropertyDescriptor SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder() .name("ssl-context-service") .displayName("SSL Context Service") .description("Specifies the SSL Context Service to use for communicating with Atlas and Kafka.") .required(false) .identifiesControllerService(SSLContextService.class) .build(); static final PropertyDescriptor KAFKA_BOOTSTRAP_SERVERS = new PropertyDescriptor.Builder() .name("kafka-bootstrap-servers") .displayName("Kafka Bootstrap Servers") .description("Kafka Bootstrap Servers to send Atlas hook notification messages based on NiFi provenance events." + " E.g. 'localhost:9092'" + " NOTE: Once this reporting task has started, restarting NiFi is required to changed this property" + " as Atlas library holds a unmodifiable static reference to Kafka client.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .build(); static final AllowableValue SEC_PLAINTEXT = new AllowableValue("PLAINTEXT", "PLAINTEXT", "PLAINTEXT"); static final AllowableValue SEC_SSL = new AllowableValue("SSL", "SSL", "SSL"); static final AllowableValue SEC_SASL_PLAINTEXT = new AllowableValue("SASL_PLAINTEXT", "SASL_PLAINTEXT", "SASL_PLAINTEXT"); static final AllowableValue SEC_SASL_SSL = new AllowableValue("SASL_SSL", "SASL_SSL", "SASL_SSL"); static final PropertyDescriptor KAFKA_SECURITY_PROTOCOL = new PropertyDescriptor.Builder() .name("kafka-security-protocol") .displayName("Kafka Security Protocol") .description("Protocol used to communicate with Kafka brokers to send Atlas hook notification messages." + " Corresponds to Kafka's 'security.protocol' property.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .allowableValues(SEC_PLAINTEXT, SEC_SSL, SEC_SASL_PLAINTEXT, SEC_SASL_SSL) .defaultValue(SEC_PLAINTEXT.getValue()) .build(); public static final PropertyDescriptor NIFI_KERBEROS_PRINCIPAL = new PropertyDescriptor.Builder() .name("nifi-kerberos-principal") .displayName("NiFi Kerberos Principal") .description("The Kerberos principal for this NiFi instance to access Atlas API and Kafka brokers." + " If not set, it is expected to set a JAAS configuration file in the JVM properties defined in the bootstrap.conf file." + " This principal will be set into 'sasl.jaas.config' Kafka's property.") .required(false) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .build(); public static final PropertyDescriptor NIFI_KERBEROS_KEYTAB = new PropertyDescriptor.Builder() .name("nifi-kerberos-keytab") .displayName("NiFi Kerberos Keytab") .description("The Kerberos keytab for this NiFi instance to access Atlas API and Kafka brokers." + " If not set, it is expected to set a JAAS configuration file in the JVM properties defined in the bootstrap.conf file." + " This principal will be set into 'sasl.jaas.config' Kafka's property.") .required(false) .addValidator(StandardValidators.FILE_EXISTS_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .build(); public static final PropertyDescriptor KERBEROS_CREDENTIALS_SERVICE = new PropertyDescriptor.Builder() .name("kerberos-credentials-service") .displayName("Kerberos Credentials Service") .description("Specifies the Kerberos Credentials Controller Service that should be used for authenticating with Kerberos") .identifiesControllerService(KerberosCredentialsService.class) .required(false) .build(); static final PropertyDescriptor KAFKA_KERBEROS_SERVICE_NAME = new PropertyDescriptor.Builder() .name("kafka-kerberos-service-name") .displayName("Kafka Kerberos Service Name") .description("The service name that matches the primary name of the Kafka server configured in the broker JAAS file." + " This can be defined either in Kafka's JAAS config or in Kafka's config." + " Corresponds to Kafka's 'security.protocol' property." + " It is ignored unless one of the SASL options of the <Security Protocol> are selected.") .required(false) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .defaultValue("kafka") .build(); static final AllowableValue LINEAGE_STRATEGY_SIMPLE_PATH = new AllowableValue("SimplePath", "Simple Path", "Map NiFi provenance events and target Atlas DataSets to statically created 'nifi_flow_path' Atlas Processes." + " See also 'Additional Details'."); static final AllowableValue LINEAGE_STRATEGY_COMPLETE_PATH = new AllowableValue("CompletePath", "Complete Path", "Create separate 'nifi_flow_path' Atlas Processes for each distinct input and output DataSet combinations" + " by looking at the complete route for a given FlowFile. See also 'Additional Details."); static final PropertyDescriptor NIFI_LINEAGE_STRATEGY = new PropertyDescriptor.Builder() .name("nifi-lineage-strategy") .displayName("NiFi Lineage Strategy") .description("Specifies granularity on how NiFi data flow should be reported to Atlas." + " NOTE: It is strongly recommended to keep using the same strategy once this reporting task started to keep Atlas data clean." + " Switching strategies will not delete Atlas entities created by the old strategy." + " Having mixed entities created by different strategies makes Atlas lineage graph noisy." + " For more detailed description on each strategy and differences, refer 'NiFi Lineage Strategy' section in Additional Details.") .required(true) .allowableValues(LINEAGE_STRATEGY_SIMPLE_PATH, LINEAGE_STRATEGY_COMPLETE_PATH) .defaultValue(LINEAGE_STRATEGY_SIMPLE_PATH.getValue()) .build(); private static final String ATLAS_PROPERTIES_FILENAME = "atlas-application.properties"; private static final String ATLAS_PROPERTY_CLIENT_CONNECT_TIMEOUT_MS = "atlas.client.connectTimeoutMSecs"; private static final String ATLAS_PROPERTY_CLIENT_READ_TIMEOUT_MS = "atlas.client.readTimeoutMSecs"; private static final String ATLAS_PROPERTY_CLUSTER_NAME = "atlas.cluster.name"; private static final String ATLAS_PROPERTY_ENABLE_TLS = "atlas.enableTLS"; private static final String ATLAS_KAFKA_PREFIX = "atlas.kafka."; private static final String ATLAS_PROPERTY_KAFKA_BOOTSTRAP_SERVERS = ATLAS_KAFKA_PREFIX + "bootstrap.servers"; private static final String ATLAS_PROPERTY_KAFKA_CLIENT_ID = ATLAS_KAFKA_PREFIX + ProducerConfig.CLIENT_ID_CONFIG; private final ServiceLoader<ClusterResolver> clusterResolverLoader = ServiceLoader.load(ClusterResolver.class); private volatile AtlasAuthN atlasAuthN; private volatile Properties atlasProperties; private volatile boolean isTypeDefCreated = false; private volatile String defaultClusterName; private volatile ProvenanceEventConsumer consumer; private volatile ClusterResolvers clusterResolvers; private volatile NiFiAtlasHook nifiAtlasHook; private volatile LineageStrategy lineageStrategy; @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { final List<PropertyDescriptor> properties = new ArrayList<>(); properties.add(ATLAS_URLS); properties.add(ATLAS_CONNECT_TIMEOUT); properties.add(ATLAS_READ_TIMEOUT); properties.add(ATLAS_AUTHN_METHOD); properties.add(ATLAS_USER); properties.add(ATLAS_PASSWORD); properties.add(ATLAS_CONF_DIR); properties.add(ATLAS_NIFI_URL); properties.add(ATLAS_DEFAULT_CLUSTER_NAME); properties.add(NIFI_LINEAGE_STRATEGY); properties.add(PROVENANCE_START_POSITION); properties.add(PROVENANCE_BATCH_SIZE); properties.add(SSL_CONTEXT_SERVICE); // Following properties are required if ATLAS_CONF_CREATE is enabled. // Otherwise should be left blank. properties.add(ATLAS_CONF_CREATE); properties.add(KERBEROS_CREDENTIALS_SERVICE); properties.add(NIFI_KERBEROS_PRINCIPAL); properties.add(NIFI_KERBEROS_KEYTAB); properties.add(KAFKA_KERBEROS_SERVICE_NAME); properties.add(KAFKA_BOOTSTRAP_SERVERS); properties.add(KAFKA_SECURITY_PROTOCOL); return properties; } @Override protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(String propertyDescriptorName) { for (ClusterResolver resolver : clusterResolverLoader) { final PropertyDescriptor propertyDescriptor = resolver.getSupportedDynamicPropertyDescriptor(propertyDescriptorName); if(propertyDescriptor != null) { return propertyDescriptor; } } return null; } private void parseAtlasUrls(final PropertyValue atlasUrlsProp, final Consumer<String> urlStrConsumer) { final String atlasUrlsStr = atlasUrlsProp.evaluateAttributeExpressions().getValue(); if (atlasUrlsStr != null && !atlasUrlsStr.isEmpty()) { Arrays.stream(atlasUrlsStr.split(",")) .map(String::trim) .forEach(urlStrConsumer); } } @Override protected Collection<ValidationResult> customValidate(ValidationContext context) { final Collection<ValidationResult> results = new ArrayList<>(); final boolean isSSLContextServiceSet = context.getProperty(SSL_CONTEXT_SERVICE).isSet(); final ValidationResult.Builder invalidSSLService = new ValidationResult.Builder() .subject(SSL_CONTEXT_SERVICE.getDisplayName()).valid(false); parseAtlasUrls(context.getProperty(ATLAS_URLS), input -> { final ValidationResult.Builder builder = new ValidationResult.Builder().subject(ATLAS_URLS.getDisplayName()).input(input); try { final URL url = new URL(input); if ("https".equalsIgnoreCase(url.getProtocol()) && !isSSLContextServiceSet) { results.add(invalidSSLService.explanation("required by HTTPS Atlas access").build()); } else { results.add(builder.explanation("Valid URI").valid(true).build()); } } catch (Exception e) { results.add(builder.explanation("Contains invalid URI: " + e).valid(false).build()); } }); final String atlasAuthNMethod = context.getProperty(ATLAS_AUTHN_METHOD).getValue(); final AtlasAuthN atlasAuthN = getAtlasAuthN(atlasAuthNMethod); results.addAll(atlasAuthN.validate(context)); clusterResolverLoader.forEach(resolver -> results.addAll(resolver.validate(context))); if (context.getProperty(ATLAS_CONF_CREATE).asBoolean()) { Stream.of(ATLAS_CONF_DIR, ATLAS_DEFAULT_CLUSTER_NAME, KAFKA_BOOTSTRAP_SERVERS) .filter(p -> !context.getProperty(p).isSet()) .forEach(p -> results.add(new ValidationResult.Builder() .subject(p.getDisplayName()) .explanation("required to create Atlas configuration file.") .valid(false).build())); validateKafkaProperties(context, results, isSSLContextServiceSet, invalidSSLService); } return results; } private void validateKafkaProperties(ValidationContext context, Collection<ValidationResult> results, boolean isSSLContextServiceSet, ValidationResult.Builder invalidSSLService) { final String kafkaSecurityProtocol = context.getProperty(KAFKA_SECURITY_PROTOCOL).getValue(); if ((SEC_SSL.equals(kafkaSecurityProtocol) || SEC_SASL_SSL.equals(kafkaSecurityProtocol)) && !isSSLContextServiceSet) { results.add(invalidSSLService.explanation("required by SSL Kafka connection").build()); } final String explicitPrincipal = context.getProperty(NIFI_KERBEROS_PRINCIPAL).evaluateAttributeExpressions().getValue(); final String explicitKeytab = context.getProperty(NIFI_KERBEROS_KEYTAB).evaluateAttributeExpressions().getValue(); final KerberosCredentialsService credentialsService = context.getProperty(ReportLineageToAtlas.KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class); String principal; String keytab; if (credentialsService == null) { principal = explicitPrincipal; keytab = explicitKeytab; } else { principal = credentialsService.getPrincipal(); keytab = credentialsService.getKeytab(); } if (SEC_SASL_PLAINTEXT.equals(kafkaSecurityProtocol) || SEC_SASL_SSL.equals(kafkaSecurityProtocol)) { if (!context.getProperty(KAFKA_KERBEROS_SERVICE_NAME).isSet()) { results.add(new ValidationResult.Builder() .subject(KAFKA_KERBEROS_SERVICE_NAME.getDisplayName()) .explanation("Required by Kafka SASL authentication.") .valid(false) .build()); } if (keytab == null || principal == null) { results.add(new ValidationResult.Builder() .subject("Kerberos Authentication") .explanation("Keytab and Principal are required for Kerberos authentication with Apache Kafka.") .valid(false) .build()); } } } @OnScheduled public void setup(ConfigurationContext context) throws IOException { // initAtlasClient has to be done first as it loads AtlasProperty. initAtlasProperties(context); initLineageStrategy(context); initClusterResolvers(context); } private void initLineageStrategy(ConfigurationContext context) throws IOException { nifiAtlasHook = new NiFiAtlasHook(); final String strategy = context.getProperty(NIFI_LINEAGE_STRATEGY).getValue(); if (LINEAGE_STRATEGY_SIMPLE_PATH.equals(strategy)) { lineageStrategy = new SimpleFlowPathLineage(); } else if (LINEAGE_STRATEGY_COMPLETE_PATH.equals(strategy)) { lineageStrategy = new CompleteFlowPathLineage(); } lineageStrategy.setLineageContext(nifiAtlasHook); initProvenanceConsumer(context); } private void initClusterResolvers(ConfigurationContext context) { final Set<ClusterResolver> loadedClusterResolvers = new LinkedHashSet<>(); clusterResolverLoader.forEach(resolver -> { resolver.configure(context); loadedClusterResolvers.add(resolver); }); clusterResolvers = new ClusterResolvers(Collections.unmodifiableSet(loadedClusterResolvers), defaultClusterName); } private void initAtlasProperties(ConfigurationContext context) throws IOException { List<String> urls = new ArrayList<>(); parseAtlasUrls(context.getProperty(ATLAS_URLS), urls::add); final boolean isAtlasApiSecure = urls.stream().anyMatch(url -> url.toLowerCase().startsWith("https")); final String atlasAuthNMethod = context.getProperty(ATLAS_AUTHN_METHOD).getValue(); final String confDirStr = context.getProperty(ATLAS_CONF_DIR).evaluateAttributeExpressions().getValue(); final File confDir = confDirStr != null && !confDirStr.isEmpty() ? new File(confDirStr) : null; atlasProperties = new Properties(); final File atlasPropertiesFile = new File(confDir, ATLAS_PROPERTIES_FILENAME); final Boolean createAtlasConf = context.getProperty(ATLAS_CONF_CREATE).asBoolean(); if (!createAtlasConf) { // Load existing properties file. if (atlasPropertiesFile.isFile()) { getLogger().info("Loading {}", new Object[]{atlasPropertiesFile}); try (InputStream in = new FileInputStream(atlasPropertiesFile)) { atlasProperties.load(in); } } else { final String fileInClasspath = "/" + ATLAS_PROPERTIES_FILENAME; try (InputStream in = ReportLineageToAtlas.class.getResourceAsStream(fileInClasspath)) { getLogger().info("Loading {} from classpath", new Object[]{fileInClasspath}); if (in == null) { throw new ProcessException(String.format("Could not find %s in classpath." + " Please add it to classpath," + " or specify %s a directory containing Atlas properties file," + " or enable %s to generate it.", fileInClasspath, ATLAS_CONF_DIR.getDisplayName(), ATLAS_CONF_CREATE.getDisplayName())); } atlasProperties.load(in); } } } // Resolve default cluster name. defaultClusterName = context.getProperty(ATLAS_DEFAULT_CLUSTER_NAME).evaluateAttributeExpressions().getValue(); if (defaultClusterName == null || defaultClusterName.isEmpty()) { // If default cluster name is not specified by processor configuration, then load it from Atlas config. defaultClusterName = atlasProperties.getProperty(ATLAS_PROPERTY_CLUSTER_NAME); } // If default cluster name is still not defined, processor should not be able to start. if (defaultClusterName == null || defaultClusterName.isEmpty()) { throw new ProcessException("Default cluster name is not defined."); } String atlasConnectTimeoutMs = context.getProperty(ATLAS_CONNECT_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue() + ""; String atlasReadTimeoutMs = context.getProperty(ATLAS_READ_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue() + ""; atlasAuthN = getAtlasAuthN(atlasAuthNMethod); atlasAuthN.configure(context); // Create Atlas configuration file if necessary. if (createAtlasConf) { atlasProperties.put(ATLAS_PROPERTY_CLIENT_CONNECT_TIMEOUT_MS, atlasConnectTimeoutMs); atlasProperties.put(ATLAS_PROPERTY_CLIENT_READ_TIMEOUT_MS, atlasReadTimeoutMs); atlasProperties.put(ATLAS_PROPERTY_CLUSTER_NAME, defaultClusterName); atlasProperties.put(ATLAS_PROPERTY_ENABLE_TLS, String.valueOf(isAtlasApiSecure)); setKafkaConfig(atlasProperties, context); atlasAuthN.populateProperties(atlasProperties); try (FileOutputStream fos = new FileOutputStream(atlasPropertiesFile)) { String ts = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSX") .withZone(ZoneOffset.UTC) .format(Instant.now()); atlasProperties.store(fos, "Generated by Apache NiFi ReportLineageToAtlas ReportingTask at " + ts); } } getLogger().debug("Force reloading Atlas application properties."); ApplicationProperties.forceReload(); if (confDir != null) { // If atlasConfDir is not set, atlas-application.properties will be searched under classpath. Properties props = System.getProperties(); final String atlasConfProp = "atlas.conf"; props.setProperty(atlasConfProp, confDir.getAbsolutePath()); getLogger().debug("{} has been set to: {}", new Object[]{atlasConfProp, props.getProperty(atlasConfProp)}); } } /** * In order to avoid authentication expiration issues (i.e. Kerberos ticket and DelegationToken expiration), * create Atlas client instance at every onTrigger execution. */ protected NiFiAtlasClient createNiFiAtlasClient(ReportingContext context) { List<String> urls = new ArrayList<>(); parseAtlasUrls(context.getProperty(ATLAS_URLS), urls::add); try { return new NiFiAtlasClient(atlasAuthN.createClient(urls.toArray(new String[]{}))); } catch (final NullPointerException e) { throw new ProcessException(String.format("Failed to initialize Atlas client due to %s." + " Make sure 'atlas-application.properties' is in the directory specified with %s" + " or under root classpath if not specified.", e, ATLAS_CONF_DIR.getDisplayName()), e); } } private AtlasAuthN getAtlasAuthN(String atlasAuthNMethod) { final AtlasAuthN atlasAuthN; switch (atlasAuthNMethod) { case "basic" : atlasAuthN = new Basic(); break; case "kerberos" : atlasAuthN = new Kerberos(); break; default: throw new IllegalArgumentException(atlasAuthNMethod + " is not supported as an Atlas authentication method."); } return atlasAuthN; } private void initProvenanceConsumer(final ConfigurationContext context) throws IOException { consumer = new ProvenanceEventConsumer(); consumer.setStartPositionValue(context.getProperty(PROVENANCE_START_POSITION).getValue()); consumer.setBatchSize(context.getProperty(PROVENANCE_BATCH_SIZE).asInteger()); consumer.addTargetEventType(lineageStrategy.getTargetEventTypes()); consumer.setLogger(getLogger()); consumer.setScheduled(true); } @OnUnscheduled public void onUnscheduled() { if (consumer != null) { // Tell provenance consumer to stop pulling more provenance events. // This should be called from @OnUnscheduled to stop the loop in the thread called from onTrigger. consumer.setScheduled(false); } } @OnStopped public void onStopped() { if (nifiAtlasHook != null) { nifiAtlasHook.close(); nifiAtlasHook = null; } } @Override public void onTrigger(ReportingContext context) { final String clusterNodeId = context.getClusterNodeIdentifier(); final boolean isClustered = context.isClustered(); if (isClustered && isEmpty(clusterNodeId)) { // Clustered, but this node's ID is unknown. Not ready for processing yet. return; } // If standalone or being primary node in a NiFi cluster, this node is responsible for doing primary tasks. final boolean isResponsibleForPrimaryTasks = !isClustered || getNodeTypeProvider().isPrimary(); final NiFiAtlasClient atlasClient = createNiFiAtlasClient(context); // Create Entity defs in Atlas if there's none yet. if (!isTypeDefCreated) { try { if (isResponsibleForPrimaryTasks) { // Create NiFi type definitions in Atlas type system. atlasClient.registerNiFiTypeDefs(false); } else { // Otherwise, just check existence of NiFi type definitions. if (!atlasClient.isNiFiTypeDefsRegistered()) { getLogger().debug("NiFi type definitions are not ready in Atlas type system yet."); return; } } isTypeDefCreated = true; } catch (AtlasServiceException e) { throw new RuntimeException("Failed to check and create NiFi flow type definitions in Atlas due to " + e, e); } } // Regardless of whether being a primary task node, each node has to analyse NiFiFlow. // Assuming each node has the same flow definition, that is guaranteed by NiFi cluster management mechanism. final NiFiFlow nifiFlow = createNiFiFlow(context, atlasClient); if (isResponsibleForPrimaryTasks) { try { atlasClient.registerNiFiFlow(nifiFlow); } catch (AtlasServiceException e) { throw new RuntimeException("Failed to register NiFI flow. " + e, e); } } // NOTE: There is a race condition between the primary node and other nodes. // If a node notifies an event related to a NiFi component which is not yet created by NiFi primary node, // then the notification message will fail due to having a reference to a non-existing entity. nifiAtlasHook.setAtlasClient(atlasClient); consumeNiFiProvenanceEvents(context, nifiFlow); } private NiFiFlow createNiFiFlow(ReportingContext context, NiFiAtlasClient atlasClient) { final ProcessGroupStatus rootProcessGroup = context.getEventAccess().getGroupStatus("root"); final String flowName = rootProcessGroup.getName(); final String nifiUrl = context.getProperty(ATLAS_NIFI_URL).evaluateAttributeExpressions().getValue(); final String clusterName; try { final String nifiHostName = new URL(nifiUrl).getHost(); clusterName = clusterResolvers.fromHostNames(nifiHostName); } catch (MalformedURLException e) { throw new IllegalArgumentException("Failed to parse NiFi URL, " + e.getMessage(), e); } NiFiFlow existingNiFiFlow = null; try { // Retrieve Existing NiFiFlow from Atlas. existingNiFiFlow = atlasClient.fetchNiFiFlow(rootProcessGroup.getId(), clusterName); } catch (AtlasServiceException e) { if (ClientResponse.Status.NOT_FOUND.equals(e.getStatus())){ getLogger().debug("Existing flow was not found for {}@{}", new Object[]{rootProcessGroup.getId(), clusterName}); } else { throw new RuntimeException("Failed to fetch existing NiFI flow. " + e, e); } } final NiFiFlow nifiFlow = existingNiFiFlow != null ? existingNiFiFlow : new NiFiFlow(rootProcessGroup.getId()); nifiFlow.setFlowName(flowName); nifiFlow.setUrl(nifiUrl); nifiFlow.setClusterName(clusterName); final NiFiFlowAnalyzer flowAnalyzer = new NiFiFlowAnalyzer(); flowAnalyzer.analyzeProcessGroup(nifiFlow, rootProcessGroup); flowAnalyzer.analyzePaths(nifiFlow); return nifiFlow; } private void consumeNiFiProvenanceEvents(ReportingContext context, NiFiFlow nifiFlow) { final EventAccess eventAccess = context.getEventAccess(); final AnalysisContext analysisContext = new StandardAnalysisContext(nifiFlow, clusterResolvers, // FIXME: This class cast shouldn't be necessary to query lineage. Possible refactor target in next major update. (ProvenanceRepository)eventAccess.getProvenanceRepository()); consumer.consumeEvents(context, (componentMapHolder, events) -> { for (ProvenanceEventRecord event : events) { try { lineageStrategy.processEvent(analysisContext, nifiFlow, event); } catch (Exception e) { // If something went wrong, log it and continue with other records. getLogger().error("Skipping failed analyzing event {} due to {}.", new Object[]{event, e, e}); } } nifiAtlasHook.commitMessages(); }); } private void setKafkaConfig(Map<Object, Object> mapToPopulate, PropertyContext context) { final String kafkaBootStrapServers = context.getProperty(KAFKA_BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue(); mapToPopulate.put(ATLAS_PROPERTY_KAFKA_BOOTSTRAP_SERVERS, kafkaBootStrapServers); mapToPopulate.put(ATLAS_PROPERTY_KAFKA_CLIENT_ID, String.format("%s.%s", getName(), getIdentifier())); final String kafkaSecurityProtocol = context.getProperty(KAFKA_SECURITY_PROTOCOL).getValue(); mapToPopulate.put(ATLAS_KAFKA_PREFIX + "security.protocol", kafkaSecurityProtocol); // Translate SSLContext Service configuration into Kafka properties final SSLContextService sslContextService = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class); if (sslContextService != null && sslContextService.isKeyStoreConfigured()) { mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, sslContextService.getKeyStoreFile()); mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, sslContextService.getKeyStorePassword()); final String keyPass = sslContextService.getKeyPassword() == null ? sslContextService.getKeyStorePassword() : sslContextService.getKeyPassword(); mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_KEY_PASSWORD_CONFIG, keyPass); mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, sslContextService.getKeyStoreType()); } if (sslContextService != null && sslContextService.isTrustStoreConfigured()) { mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, sslContextService.getTrustStoreFile()); mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, sslContextService.getTrustStorePassword()); mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, sslContextService.getTrustStoreType()); } if (SEC_SASL_PLAINTEXT.equals(kafkaSecurityProtocol) || SEC_SASL_SSL.equals(kafkaSecurityProtocol)) { setKafkaJaasConfig(mapToPopulate, context); } } /** * Populate Kafka JAAS properties for Atlas notification. * Since Atlas 0.8.1 uses Kafka client 0.10.0.0, we can not use 'sasl.jaas.config' property * as it is available since 0.10.2, implemented by KAFKA-4259. * Instead, this method uses old property names. * @param mapToPopulate Map of configuration properties * @param context Context */ private void setKafkaJaasConfig(Map<Object, Object> mapToPopulate, PropertyContext context) { String keytab; String principal; final String explicitPrincipal = context.getProperty(NIFI_KERBEROS_PRINCIPAL).evaluateAttributeExpressions().getValue(); final String explicitKeytab = context.getProperty(NIFI_KERBEROS_KEYTAB).evaluateAttributeExpressions().getValue(); final KerberosCredentialsService credentialsService = context.getProperty(ReportLineageToAtlas.KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class); if (credentialsService == null) { principal = explicitPrincipal; keytab = explicitKeytab; } else { principal = credentialsService.getPrincipal(); keytab = credentialsService.getKeytab(); } String serviceName = context.getProperty(KAFKA_KERBEROS_SERVICE_NAME).evaluateAttributeExpressions().getValue(); if(StringUtils.isNotBlank(keytab) && StringUtils.isNotBlank(principal) && StringUtils.isNotBlank(serviceName)) { mapToPopulate.put("atlas.jaas.KafkaClient.loginModuleControlFlag", "required"); mapToPopulate.put("atlas.jaas.KafkaClient.loginModuleName", "com.sun.security.auth.module.Krb5LoginModule"); mapToPopulate.put("atlas.jaas.KafkaClient.option.keyTab", keytab); mapToPopulate.put("atlas.jaas.KafkaClient.option.principal", principal); mapToPopulate.put("atlas.jaas.KafkaClient.option.serviceName", serviceName); mapToPopulate.put("atlas.jaas.KafkaClient.option.storeKey", "True"); mapToPopulate.put("atlas.jaas.KafkaClient.option.useKeyTab", "True"); mapToPopulate.put("atlas.jaas.ticketBased-KafkaClient.loginModuleControlFlag", "required"); mapToPopulate.put("atlas.jaas.ticketBased-KafkaClient.loginModuleName", "com.sun.security.auth.module.Krb5LoginModule"); mapToPopulate.put("atlas.jaas.ticketBased-KafkaClient.option.useTicketCache", "true"); mapToPopulate.put(ATLAS_KAFKA_PREFIX + "sasl.kerberos.service.name", serviceName); } } }
apache-2.0
PublicHealthEngland/animal-welfare-assessment-grid
code/server/src/main/java/uk/gov/phe/erdst/sc/awag/businesslogic/StudyController.java
14707
package uk.gov.phe.erdst.sc.awag.businesslogic; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Set; import javax.ejb.Stateless; import javax.inject.Inject; import javax.validation.ConstraintViolation; import javax.validation.Validator; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import uk.gov.phe.erdst.sc.awag.dao.ImportHeaderDao; import uk.gov.phe.erdst.sc.awag.dao.StudyDao; import uk.gov.phe.erdst.sc.awag.dao.StudyGroupDao; import uk.gov.phe.erdst.sc.awag.datamodel.Animal; import uk.gov.phe.erdst.sc.awag.datamodel.ImportHeader; import uk.gov.phe.erdst.sc.awag.datamodel.ImportStudy; import uk.gov.phe.erdst.sc.awag.datamodel.Study; import uk.gov.phe.erdst.sc.awag.exceptions.AWInputValidationException; import uk.gov.phe.erdst.sc.awag.exceptions.AWMultipleResultException; import uk.gov.phe.erdst.sc.awag.exceptions.AWNoSuchEntityException; import uk.gov.phe.erdst.sc.awag.exceptions.AWNonUniqueException; import uk.gov.phe.erdst.sc.awag.exceptions.AWSeriousException; import uk.gov.phe.erdst.sc.awag.service.ImportHeaderFactory; import uk.gov.phe.erdst.sc.awag.service.activitylogging.LoggedActions; import uk.gov.phe.erdst.sc.awag.service.activitylogging.LoggedActivity; import uk.gov.phe.erdst.sc.awag.service.activitylogging.LoggedUser; import uk.gov.phe.erdst.sc.awag.service.factory.study.ImportStudyFactory; import uk.gov.phe.erdst.sc.awag.service.factory.study.StudyDtoFactory; import uk.gov.phe.erdst.sc.awag.service.factory.study.StudyFactory; import uk.gov.phe.erdst.sc.awag.service.validation.utils.ValidationConstants; import uk.gov.phe.erdst.sc.awag.service.validation.utils.ValidatorUtils; import uk.gov.phe.erdst.sc.awag.utils.Constants; import uk.gov.phe.erdst.sc.awag.utils.UploadUtils; import uk.gov.phe.erdst.sc.awag.webapi.request.LikeFilterParam; import uk.gov.phe.erdst.sc.awag.webapi.request.PagingQueryParams; import uk.gov.phe.erdst.sc.awag.webapi.request.StudyClientData; import uk.gov.phe.erdst.sc.awag.webapi.response.EntityCreateResponseDto; import uk.gov.phe.erdst.sc.awag.webapi.response.ResponseDto; import uk.gov.phe.erdst.sc.awag.webapi.response.UploadResponseDto; import uk.gov.phe.erdst.sc.awag.webapi.response.paged.ResponsePager; import uk.gov.phe.erdst.sc.awag.webapi.response.study.StudiesDto; import uk.gov.phe.erdst.sc.awag.webapi.response.study.StudyDto; import uk.gov.phe.erdst.sc.awag.webapi.response.study.StudySimpleDto; @Stateless public class StudyController { private static final Logger LOGGER = LogManager.getLogger(StudyController.class.getName()); @Inject private AnimalController mAnimalController; @Inject private StudyDao studyDao; @Inject private Validator mValidator; @Inject private StudyFactory studyFactory; @Inject private StudyDtoFactory mStudyDtoFactory; @Inject private StudyGroupDao mStudyGroupDao; @Inject private ImportHeaderFactory importHeaderFactory; @Inject private ImportStudyFactory importStudyFactory; @Inject private ImportHeaderDao importHeaderDao; @LoggedActivity(actionName = LoggedActions.CREATE_STUDY) public EntityCreateResponseDto createStudy(StudyClientData clientData, LoggedUser loggedUser) throws AWInputValidationException, AWNonUniqueException, AWSeriousException { Set<ConstraintViolation<StudyClientData>> constraintViolations = mValidator.validate(clientData); if (constraintViolations.isEmpty()) { EntityCreateResponseDto response = new EntityCreateResponseDto(); Study study = studyFactory.create(clientData); study = studyDao.store(study); response.id = study.getId(); response.value = study.getStudyNumber(); return response; } else { ValidatorUtils.throwInputValidationExceptionWith(Arrays.asList(constraintViolations)); return null; } } @LoggedActivity(actionName = LoggedActions.UPDATE_STUDY) public StudyDto updateStudy(Long studyId, StudyClientData clientData, LoggedUser loggedUser) throws AWInputValidationException, AWNoSuchEntityException, AWNonUniqueException, AWSeriousException { Set<ConstraintViolation<StudyClientData>> constraintViolations = mValidator.validate(clientData); if (constraintViolations.isEmpty()) { ValidatorUtils.validateUpdateId(clientData.studyId, studyId, ValidationConstants.UPDATE_ID_MISMATCH); Study study = studyDao.getEntityById(studyId); studyFactory.update(study, clientData); study = studyDao.update(study); return mStudyDtoFactory.createStudyDto(study); } else { ValidatorUtils.throwInputValidationExceptionWith(Arrays.asList(constraintViolations)); return null; } } public StudiesDto getAllStudies(PagingQueryParams pagingParams) throws AWInputValidationException { Set<ConstraintViolation<PagingQueryParams>> pagingParamsViolations = new HashSet<>(0); boolean isPagingParamsSet = pagingParams.isParamsSet(); if (isPagingParamsSet) { pagingParamsViolations = mValidator.validate(pagingParams); } if (pagingParamsViolations.isEmpty()) { Integer offset = isPagingParamsSet ? pagingParams.offset : null; Integer limit = isPagingParamsSet ? pagingParams.limit : null; Collection<Study> studies = studyDao.getEntities(offset, limit); StudiesDto response = new StudiesDto(); response.studies = mStudyDtoFactory.createStudyDtos(studies); if (pagingParams.isParamsSet()) { Long studiesCount = studyDao.getEntityCount(); response.pagingInfo = ResponsePager.getPagingInfo(pagingParams, studiesCount); } return response; } else { ValidatorUtils.throwInputValidationExceptionWith(Arrays.asList(pagingParamsViolations)); return null; } } public StudiesDto getStudiesLike(LikeFilterParam likeFilterParam, PagingQueryParams pagingParams) throws AWInputValidationException { Set<ConstraintViolation<PagingQueryParams>> pagingParamsViolations = new HashSet<>(0); Set<ConstraintViolation<LikeFilterParam>> likeParamViolations = mValidator.validate(likeFilterParam); boolean isPagingParamsSet = pagingParams.isParamsSet(); if (isPagingParamsSet) { pagingParamsViolations = mValidator.validate(pagingParams); } if (pagingParamsViolations.isEmpty() && likeParamViolations.isEmpty()) { Integer offset = isPagingParamsSet ? pagingParams.offset : null; Integer limit = isPagingParamsSet ? pagingParams.limit : null; Collection<Study> studies = studyDao.getEntitiesLike(likeFilterParam.value, offset, limit); StudiesDto response = new StudiesDto(); response.studies = mStudyDtoFactory.createStudyDtos(studies); if (pagingParams.isParamsSet()) { Long studiesCount = studyDao.getEntityCountLike(likeFilterParam.value); response.pagingInfo = ResponsePager.getPagingInfo(pagingParams, studiesCount); } return response; } else { ValidatorUtils .throwInputValidationExceptionWith(Arrays.asList(pagingParamsViolations, likeParamViolations)); return null; } } public StudyDto getStudyById(Long studyId) throws AWNoSuchEntityException, AWInputValidationException { ValidatorUtils.validateEntityId(studyId); Study study = studyDao.getEntityById(studyId); return mStudyDtoFactory.createStudyDto(study); } public StudySimpleDto getStudyWithAnimal(Long animalId) throws AWNoSuchEntityException, AWSeriousException, AWInputValidationException { ValidatorUtils.validateEntityId(animalId); try { Study study = getStudyWithAnimalNonApiMethod(animalId); return mStudyDtoFactory.createStudySimpleDto(study); } catch (AWMultipleResultException e) { LOGGER.error(e); throw new AWSeriousException(e); } } /** * @see uk.gov.phe.erdst.sc.awag.dao.StudyDao.getStudyWithAnimal */ public Study getStudyWithAnimalNonApiMethod(Long animalId) throws AWNoSuchEntityException, AWMultipleResultException { Animal animal = mAnimalController.getAnimalNonApiMethod(animalId); return studyDao.getStudyWithAnimal(animal); } @LoggedActivity(actionName = LoggedActions.UPLOAD_STUDY) public ResponseDto uploadStudy(InputStream uploadFile, LoggedUser loggedUser) throws AWInputValidationException { try { final ArrayList<String[]> csvLinesData = UploadUtils.retrieveCSVLines(uploadFile, Constants.Upload.UPLOAD_HEADER_STUDY_COLUMNS); ImportHeader importHeader = importHeaderFactory.createWithImportStudies(loggedUser); for (String[] uploadCSVLineData : csvLinesData) { ImportStudy importStudy = importStudyFactory.create(uploadCSVLineData); importHeader.addImportStudy(importStudy); } importHeader = importHeaderDao.store(importHeader); /* uploadScalesFromImport(importHeader); // TODO this code to be moved in the client (test for now) ImportHeader importHeader = uploadToImportUploadStudyTables(csvLines, loggedUser); upload(importHeader); importHeaderDao.realDelete(importHeader.getImportheaderid()); */ } /* catch (AWNoSuchEntityException ex) { throw new AWInputValidationException(ex.getMessage()); } */ catch (AWNonUniqueException ex) { throw new AWInputValidationException(ex.getMessage()); } catch (IOException ex) { throw new AWInputValidationException(Constants.Upload.ERR_IMPORT_INVALID_FORMAT_ABORT); } return new UploadResponseDto(); } // TODO convert for REST API private void uploadStudiesFromImport(ImportHeader importHeader) throws AWNonUniqueException { Collection<Study> studies = new ArrayList<>(); for (ImportStudy importStudy : importHeader.getImportStudies()) { Study study = studyFactory.create(importStudy); studies.add(study); } studyDao.upload(studies); importHeaderDao.realDelete(importHeader.getImportheaderid()); } /* private ImportHeader uploadToImportUploadStudyTables(ArrayList<String[]> csvLines, LoggedUser loggedUser) throws AWInputValidationException, AWNonUniqueException { ImportHeader importHeader = new ImportHeader(); Set<ImportStudy> importStudies = new HashSet<>(); importHeader.setUserName(loggedUser.username); importHeader.setDateImport(new java.util.Date()); importHeader.setImportStudies(importStudies); for (String[] csvLine : csvLines) { final String studyNumber = csvLine[0]; final Boolean isStudyOpen = UploadUtils.convertToBoolean(csvLine[2]); final String studyGroupNumbersList = csvLine[2]; final List<String> studyGroupNumbers = Arrays.asList(studyGroupNumbersList.split(",")); final Long studyId = getStudyGroup(studyNumber); ImportStudy importStudy = new ImportStudy(); importStudy.setStudynumber(studyNumber); importStudy.setStudynumberid(studyId); importStudy.setIsstudyopen(isStudyOpen); importStudy.setStudystudygroupnumbers(studyGroupNumbersList); importStudy.setImportStudyStudyGroups(new HashSet<ImportStudyStudyGroup>()); for (String studyGroup : studyGroupNumbers) { final String trimmedStudyGroup = studyGroup.trim(); final Long studyGroupId = getStudyGroup(trimmedStudyGroup); // TODO duplicates in the table ImportStudyStudyGroup importStudyStudyGroup = new ImportStudyStudyGroup(); importStudyStudyGroup.setStudystudygroupnumber(trimmedStudyGroup); importStudyStudyGroup.setStudystudygroupnumberid(studyGroupId); importStudy.addImportStudyStudyGroup(importStudyStudyGroup); } importHeader.addImportStudy(importStudy); } importHeader = mImportHeaderDao.store(importHeader); return importHeader; } // TODO validate existing // TODO Collection not Set private void upload(ImportHeader importHeader) throws AWNoSuchEntityException, AWNonUniqueException { List<Study> studies = new ArrayList<>(); for (ImportStudy importStudy : importHeader.getImportStudies()) { Study study = new Study(); study.setStudyNumber(importStudy.getStudynumber()); Set<StudyGroup> studyGroups = new HashSet<>(); for (ImportStudyStudyGroup importStudyStudyGroup : importStudy.getImportStudyStudyGroups()) { if (importStudyStudyGroup.getStudystudygroupnumberid() != null) { StudyGroup studyGroup = mStudyGroupDao .getStudyGroup(importStudyStudyGroup.getStudystudygroupnumberid()); studyGroups.add(studyGroup); } } study.setStudyGroups(studyGroups); studies.add(study); } mStudyDao.upload(studies); } */ public final Long getStudyNumberNonApiMethod(String studyNumber) { try { return studyDao.getEntityByNameField(studyNumber).getId(); } catch (AWNoSuchEntityException ex) { return null; } } // TODO: Here? public final Long getStudyGroupNumberNonApiMethod(String studyGroupNumber) { try { return mStudyGroupDao.getStudyGroup(studyGroupNumber).getId(); } catch (AWNoSuchEntityException ex) { return null; } } }
apache-2.0
Thinker-BO/coolweather
app/src/main/java/com/coolweather/android/db/County.java
814
package com.coolweather.android.db; import org.litepal.crud.DataSupport; /** * Created by msi on 2017/7/21. */ public class County extends DataSupport { private int id; private String countyName; public String weatherId; private int cityId; public int getId(){ return id; } public void setId(int id){ this.id=id; } public String getCountyName(){ return countyName; } public void setCountyName(String countyName){ this.countyName=countyName; } public String getWeatherId(){ return weatherId; } public void setWeatherId(String weatherId){ this.weatherId=weatherId; } public int getCityId(){ return cityId; } public void setCityId(int cityId){ this.cityId=cityId; } }
apache-2.0
aravindc/databenecommons
src/main/java/org/databene/commons/validator/domain/AbstractValidationDomainDescriptor.java
1159
/* * Copyright (C) 2004-2015 Volker Bergmann (volker.bergmann@bergmann-it.de). * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.databene.commons.validator.domain; import org.databene.commons.ValidationDomainDescriptor; /** * Abstract implementation of the {@link ValidationDomainDescriptor}. * Whenever possible inherit from this class for implementing future-safe * {@link ValidationDomainDescriptor}s * Created: 20.12.2011 17:25:47 * @since 0.5.14 * @author Volker Bergmann */ public abstract class AbstractValidationDomainDescriptor implements ValidationDomainDescriptor { }
apache-2.0
onders86/camel
components/camel-twitter/src/main/java/org/apache/camel/component/twitter/directmessage/DirectMessageProducer.java
2356
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.twitter.directmessage; import org.apache.camel.CamelExchangeException; import org.apache.camel.Exchange; import org.apache.camel.component.twitter.TwitterConstants; import org.apache.camel.component.twitter.TwitterEndpoint; import org.apache.camel.impl.DefaultProducer; import org.apache.camel.util.ObjectHelper; import twitter4j.User; /** * Produces text as a direct message. */ public class DirectMessageProducer extends DefaultProducer { private TwitterEndpoint endpoint; private String user; public DirectMessageProducer(TwitterEndpoint endpoint, String user) { super(endpoint); this.endpoint = endpoint; this.user = user; } public void process(Exchange exchange) throws Exception { // send direct message String toUsername = user; if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(TwitterConstants.TWITTER_USER, String.class))) { toUsername = exchange.getIn().getHeader(TwitterConstants.TWITTER_USER, String.class); } String text = exchange.getIn().getBody(String.class); if (toUsername.isEmpty()) { throw new CamelExchangeException("Username not configured on TwitterEndpoint", exchange); } else { log.debug("Sending to: {} message: {}", toUsername, text); User userStatus = endpoint.getProperties().getTwitter().showUser(toUsername); endpoint.getProperties().getTwitter().sendDirectMessage(userStatus.getId(), text); } } }
apache-2.0
minhdt/vasman
src/com/unified/provisioning/ws/package-info.java
127
@javax.xml.bind.annotation.XmlSchema(namespace = "http://ws.provisioning.unified.com/") package com.unified.provisioning.ws;
apache-2.0
haiger/nsq-client
src/main/java/com/github/haiger/nsq/client/Producer.java
4435
package com.github.haiger.nsq.client; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.haiger.nsq.client.exception.NSQException; import com.github.haiger.nsq.client.lookup.NSQNode; import com.github.haiger.nsq.client.protocol.Request; import com.github.haiger.nsq.client.protocol.RequestBuilder; import com.github.haiger.nsq.client.protocol.Response; import com.github.haiger.nsq.client.remoting.NSQConnector; import com.github.haiger.nsq.client.util.ConnectorUtils; /** * @author haiger * @since 2017年1月5日 上午6:39:24 */ public class Producer { private static final Logger log = LoggerFactory.getLogger(Producer.class); private String host; // lookupd ip private int port; // lookupd port private ConcurrentHashMap</* ip:port */String, Connector> connectorMap; private AtomicLong index; private static final int DEFAULT_RETRY = 3; public Producer(String host, int port) { this.host = host; this.port = port; this.connectorMap = new ConcurrentHashMap<String, Connector>(); this.index = new AtomicLong(0); } public ConcurrentHashMap<String, Connector> getConnectorMap() { return connectorMap; } public void connect() { List<NSQNode> nodes = ConnectorUtils.lookupNode(host, port); if (null == nodes || nodes.isEmpty()) { log.error("producer start fail !! could not find any nsqd from lookupd {}:{}", host, port); return; } for (NSQNode nsqNode : nodes) { if (ConnectorUtils.isExcluded(nsqNode)) continue; Connector connector = null; try { connector = new NSQConnector(nsqNode.getHost(), nsqNode.getPort(), null, 0); connectorMap.put(ConnectorUtils.getConnectorKey(nsqNode), connector); } catch (NSQException e) { log.error("Producer: connector to {} goes wrong at:{}", ConnectorUtils.getConnectorKey(nsqNode), e); } } ConnectorMonitor.getInstance().setLookup(host, port); ConnectorMonitor.getInstance().registerProducer(this); } public boolean put(String topic, String msg) throws NSQException, InterruptedException { return put(topic, msg.getBytes()); } public boolean put(String topic, byte[] msgData) throws NSQException, InterruptedException { Connector connector = getConnector(); if (connector == null) throw new NSQException("No active connector to be used."); Request request = RequestBuilder.buildPub(topic, msgData); Response response = connector.writeAndWait(request); if (response.isOK()) { return true; } throw new NSQException(response.decodeString()); } private Connector getConnector() { Connector connector = nextConnector(); if (connector == null) return null; int retry = 0; while (!connector.isConnected()) { if (retry >= DEFAULT_RETRY) { connector = null; break; } removeConnector(connector); connector = nextConnector(); retry++; } return connector; } private Connector nextConnector() { Connector[] connectors = new NSQConnector[connectorMap.size()]; connectorMap.values().toArray(connectors); if (connectors.length < 1) return null; Long nextIndex = Math.abs(index.incrementAndGet() % connectors.length); return connectors[nextIndex.intValue()]; } public boolean removeConnector(Connector connector) { if (connector == null) return true; log.info("Producer: removeConnector({})", ConnectorUtils.getConnectorKey(connector)); connector.close(); return connectorMap.remove(ConnectorUtils.getConnectorKey(connector), connector); } public void addConnector(Connector connector) { log.info("Producer: addConnector({})", ConnectorUtils.getConnectorKey(connector)); connectorMap.put(ConnectorUtils.getConnectorKey(connector), connector); } public void close() { for (Connector connector : connectorMap.values()) { connector.close(); } } }
apache-2.0
lgoldstein/communitychest
apps/tools/svnkit/svnsync/src/main/java/net/community/apps/tools/svn/svnsync/Main.java
3268
/* * */ package net.community.apps.tools.svn.svnsync; import javax.swing.SwingUtilities; import net.community.apps.tools.svn.SVNBaseMain; import net.community.apps.tools.svn.svnsync.resources.ResourcesAnchor; import net.community.chest.CoVariantReturn; import net.community.chest.dom.proxy.AbstractXmlProxyConverter; import net.community.chest.lang.SysPropsEnum; import net.community.chest.resources.SystemPropertiesResolver; /** * <P>Copyright as per GPLv2</P> * @author Lyor G. * @since Aug 19, 2010 11:31:15 AM * */ public final class Main extends SVNBaseMain<ResourcesAnchor,SVNSyncMainFrame> { public Main (String... args) { super(args); } /* * @see net.community.apps.tools.svn.SVNBaseMain#processArgument(net.community.apps.tools.svn.SVNBaseMainFrame, java.lang.String, int, int, java.lang.String[]) */ @Override protected int processArgument (SVNSyncMainFrame f, String a, int oIndex, int numArgs, String... args) { int aIndex=oIndex; if ("-t".equals(a) || "--target".equals(a)) { aIndex++; final String loc=resolveStringArg(a, args, numArgs, aIndex, f.getWCLocation()), eff=SystemPropertiesResolver.SYSTEM.format(loc); f.setWCLocation(null, eff, false); } else if ("-s".equals(a) || "--source".equals(a)) { aIndex++; final String loc=resolveStringArg(a, args, numArgs, aIndex, f.getSynchronizationSource()), eff=SystemPropertiesResolver.SYSTEM.format(loc); f.setSynchronizationSource(eff); } else if ("-c".equals(a) || "--confirm".equals(a)) { aIndex++; final String loc=resolveStringArg(a, args, numArgs, aIndex, null); if (!f.addConfirmLocation(loc)) throw new IllegalStateException("Re-specified " + a + " value: " + loc); } else if ("--show-skipped".equals(a)) { f.setShowSkippedTargetsEnabled(true); } else if ("--skip-props".equals(a)) { f.setPropertiesSyncAllowed(false); } else if ("--use-merge".equals(a)) { f.setUseMergeForUpdate(true); } else aIndex = super.processArgument(f, a, oIndex, numArgs, args); return aIndex; } /* * @see net.community.apps.common.BaseMain#createMainFrameInstance() */ @Override @CoVariantReturn protected SVNSyncMainFrame createMainFrameInstance () throws Exception { final SVNSyncMainFrame f=processMainArgs(new SVNSyncMainFrame(), getMainArguments()); final String wcLoc=f.getSynchronizationSource(); if ((wcLoc == null) || (wcLoc.length() <= 0)) f.setSynchronizationSource(SysPropsEnum.USERDIR.getPropertyValue()); return f; } ////////////////////////////////////////////////////////////////////////// public static void main (final String[] args) { // 1st thing we do before any UI startup AbstractXmlProxyConverter.setDefaultLoader(ResourcesAnchor.getInstance()); SwingUtilities.invokeLater(new Main(args)); } }
apache-2.0
tdiesler/fabric8poc
fabric/spi/src/main/java/io/fabric8/spi/ClusterDataStore.java
933
/* * #%L * Fabric8 :: SPI * %% * Copyright (C) 2014 Red Hat * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package io.fabric8.spi; import io.fabric8.api.ContainerIdentity; /** * A cluster wide data store * * @author thomas.diesler@jboss.com * @since 18-Apr-2014 */ public interface ClusterDataStore { ContainerIdentity createContainerIdentity(ContainerIdentity parentId, String prefix); }
apache-2.0
terrancesnyder/solr-analytics
lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingTermVectorsFormat.java
2746
package org.apache.lucene.codecs.asserting; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import org.apache.lucene.codecs.TermVectorsFormat; import org.apache.lucene.codecs.TermVectorsReader; import org.apache.lucene.codecs.TermVectorsWriter; import org.apache.lucene.codecs.lucene40.Lucene40TermVectorsFormat; import org.apache.lucene.index.AssertingAtomicReader; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.Fields; import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; /** * Just like {@link Lucene40TermVectorsFormat} but with additional asserts. */ public class AssertingTermVectorsFormat extends TermVectorsFormat { private final TermVectorsFormat in = new Lucene40TermVectorsFormat(); @Override public TermVectorsReader vectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context) throws IOException { return new AssertingTermVectorsReader(in.vectorsReader(directory, segmentInfo, fieldInfos, context)); } @Override public TermVectorsWriter vectorsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context) throws IOException { return in.vectorsWriter(directory, segmentInfo, context); } static class AssertingTermVectorsReader extends TermVectorsReader { private final TermVectorsReader in; AssertingTermVectorsReader(TermVectorsReader in) { this.in = in; } @Override public void close() throws IOException { in.close(); } @Override public Fields get(int doc) throws IOException { Fields fields = in.get(doc); return fields == null ? null : new AssertingAtomicReader.AssertingFields(fields); } @Override public TermVectorsReader clone() { return new AssertingTermVectorsReader(in.clone()); } } }
apache-2.0
googleads/google-ads-java
google-ads-stubs-v9/src/main/java/com/google/ads/googleads/v9/services/AccountLinkOperationOrBuilder.java
3061
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v9/services/account_link_service.proto package com.google.ads.googleads.v9.services; public interface AccountLinkOperationOrBuilder extends // @@protoc_insertion_point(interface_extends:google.ads.googleads.v9.services.AccountLinkOperation) com.google.protobuf.MessageOrBuilder { /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> * @return Whether the updateMask field is set. */ boolean hasUpdateMask(); /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> * @return The updateMask. */ com.google.protobuf.FieldMask getUpdateMask(); /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> */ com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder(); /** * <pre> * Update operation: The account link is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.AccountLink update = 2;</code> * @return Whether the update field is set. */ boolean hasUpdate(); /** * <pre> * Update operation: The account link is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.AccountLink update = 2;</code> * @return The update. */ com.google.ads.googleads.v9.resources.AccountLink getUpdate(); /** * <pre> * Update operation: The account link is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.AccountLink update = 2;</code> */ com.google.ads.googleads.v9.resources.AccountLinkOrBuilder getUpdateOrBuilder(); /** * <pre> * Remove operation: A resource name for the account link to remove is * expected, in this format: * `customers/{customer_id}/accountLinks/{account_link_id}` * </pre> * * <code>string remove = 3;</code> * @return Whether the remove field is set. */ boolean hasRemove(); /** * <pre> * Remove operation: A resource name for the account link to remove is * expected, in this format: * `customers/{customer_id}/accountLinks/{account_link_id}` * </pre> * * <code>string remove = 3;</code> * @return The remove. */ java.lang.String getRemove(); /** * <pre> * Remove operation: A resource name for the account link to remove is * expected, in this format: * `customers/{customer_id}/accountLinks/{account_link_id}` * </pre> * * <code>string remove = 3;</code> * @return The bytes for remove. */ com.google.protobuf.ByteString getRemoveBytes(); public com.google.ads.googleads.v9.services.AccountLinkOperation.OperationCase getOperationCase(); }
apache-2.0
talshani/flux4j
example-todo/src/main/java/example/todo/TodoStore.java
1669
package example.todo; import com.google.common.collect.ImmutableList; import io.tals.flux4j.shared.ActionHandler; import javax.inject.Inject; import javax.inject.Singleton; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author Tal Shani */ @Singleton public class TodoStore { private final Map<Long, TodoItem> items = new HashMap<Long, TodoItem>(); @Inject public TodoStore() { } /** * Get the entire collection of TODOs. * * @return */ public List<TodoItem> getAll() { return ImmutableList.copyOf(items.values()); } @ActionHandler boolean handleTodoCreate(TodoCreateAction action) { String text = action.getText().trim(); if (!text.isEmpty()) { create(text); return true; } return false; } @ActionHandler boolean handleTodoDestroyMoo(TodoDestroyAction action) { if(items.containsKey(action.getId())) { items.remove(action.getId()); return true; } return false; } @ActionHandler boolean handle(TodoCompletedAction action) { if(items.containsKey(action.getId())) { items.get(action.getId()).setComplete(action.isCompleted()); return true; } return false; } /** * Create a TO-DO item. * * @param text text The content of the TO-DO */ private void create(String text) { // Using the current timestamp in place of a real id. long id = new Date().getTime(); items.put(id, new TodoItem(id, false, text)); } }
apache-2.0
adam-roughton/Concentus
Core/src/test/java/com/adamroughton/concentus/messaging/zmq/MessengerTestUtil.java
4477
package com.adamroughton.concentus.messaging.zmq; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.Objects; import java.util.UUID; import com.adamroughton.concentus.data.ArrayBackedResizingBuffer; import com.adamroughton.concentus.data.BytesUtil; import com.adamroughton.concentus.data.ResizingBuffer; import com.adamroughton.concentus.messaging.EventHeader; import com.adamroughton.concentus.messaging.IncomingEventHeader; import com.adamroughton.concentus.messaging.OutgoingEventHeader; public final class MessengerTestUtil { public static byte[] genContent(int length) { return genContent(length, 0); } public static byte[] genContent(int length, int seed) { byte[] content = new byte[length]; for (int i = 0; i < length; i += 4) { BytesUtil.writeInt(content, i, i / 4 + seed); } return content; } public static byte[] genIdBytes(UUID id) { byte[] idBytes = new byte[16]; BytesUtil.writeUUID(idBytes, 0, id); return idBytes; } public static void genSegmentData(int length, int seed, byte[][] segments, int...indices) { for (int index : indices) { if (index < 0 || index >= segments.length) throw new IllegalArgumentException(String.format("Provided index '%d' out of bounds [%d, %d]", index, 0, segments.length)); segments[index] = genContent(length, seed++); } } public static byte[] makeMsgHeader(byte[] msg) { return makeMsgHeader(msg.length); } public static byte[] makeMsgHeader(byte[]... msgs) { int total = 0; for (byte[] msg : msgs) { total += msg.length; } return makeMsgHeader(total); } public static byte[] makeMsgHeader(int msgLength) { byte[] header = new byte[ResizingBuffer.INT_SIZE]; BytesUtil.writeInt(header, 0, msgLength); return header; } public static byte[] makeMsgHeader(long seq, byte[]... msgs) { int total = 0; for (byte[] msg : msgs) { total += msg.length; } return makeMsgHeader(seq, total); } public static byte[] makeMsgHeader(long seq, byte[] msg) { return makeMsgHeader(seq, msg.length); } public static byte[] makeMsgHeader(long seq, int msgLength) { byte[] header = new byte[ResizingBuffer.LONG_SIZE + ResizingBuffer.INT_SIZE]; BytesUtil.writeLong(header, 0, seq); BytesUtil.writeInt(header, ResizingBuffer.LONG_SIZE, msgLength); return header; } public static byte[][] readMessageParts(ArrayBackedResizingBuffer buffer, IncomingEventHeader header) { Objects.requireNonNull(buffer); assertTrue(header.isValid(buffer)); byte[][] segments = new byte[header.getSegmentCount()][]; for (int segmentIndex = 0; segmentIndex < header.getSegmentCount(); segmentIndex++) { int segmentMetaData = header.getSegmentMetaData(buffer, segmentIndex); int segmentOffset = EventHeader.getSegmentOffset(segmentMetaData); int segmentLength = EventHeader.getSegmentLength(segmentMetaData); byte[] segment = new byte[segmentLength]; buffer.copyTo(segment, 0, segmentOffset, segmentLength); segments[segmentIndex] = segment; } return segments; } /** * Writes the given segments into the provided buffer with the correct protocol * format. * @param segments the segments to write * @param outgoingBuffer the buffer to write into * @param header the processing header to use for writing * @return the segment offsets in the outgoing buffer (saves having to use header to extract this data) */ public static int[] writeMessage(byte[][] segments, ArrayBackedResizingBuffer outgoingBuffer, OutgoingEventHeader header) { int[] offsets = new int[segments.length]; Objects.requireNonNull(outgoingBuffer); if (segments.length > header.getSegmentCount()) fail(String.format("Too many segments for the given header (%d > %d)", segments.length, header.getSegmentCount())); else if (segments.length < header.getSegmentCount()) fail(String.format("Not enough segments for the given header (%d < %d)", segments.length, header.getSegmentCount())); int contentCursor = header.getEventOffset(); for (int segmentIndex = 0; segmentIndex < segments.length; segmentIndex++) { byte[] segment = segments[segmentIndex]; header.setSegmentMetaData(outgoingBuffer, segmentIndex, contentCursor, segment.length); offsets[segmentIndex] = contentCursor; outgoingBuffer.copyFrom(segment, 0, contentCursor, segment.length); contentCursor += segment.length; } header.setIsValid(outgoingBuffer, true); return offsets; } }
apache-2.0
carlosgilsaenz/ud851-Exercises
Lesson05b-Smarter-GitHub-Repo-Search/T05b.01-Exercise-SaveResults/app/src/main/java/com/example/android/asynctaskloader/MainActivity.java
6834
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.asynctaskloader; import android.os.AsyncTask; import android.os.Bundle; import android.os.PersistableBundle; import android.support.v7.app.AppCompatActivity; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.EditText; import android.widget.ProgressBar; import android.widget.TextView; import com.example.android.asynctaskloader.utilities.NetworkUtils; import java.io.IOException; import java.net.URL; public class MainActivity extends AppCompatActivity { // complete (1) Create a static final key to store the query's URL private static final String URL_KEY = "QUERY_URL"; // complete (2) Create a static final key to store the search's raw JSON private static final String JSON_KEY = "JSON_RAW"; private EditText mSearchBoxEditText; private TextView mUrlDisplayTextView; private TextView mSearchResultsTextView; private TextView mErrorMessageDisplay; private ProgressBar mLoadingIndicator; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mSearchBoxEditText = (EditText) findViewById(R.id.et_search_box); mUrlDisplayTextView = (TextView) findViewById(R.id.tv_url_display); mSearchResultsTextView = (TextView) findViewById(R.id.tv_github_search_results_json); mErrorMessageDisplay = (TextView) findViewById(R.id.tv_error_message_display); mLoadingIndicator = (ProgressBar) findViewById(R.id.pb_loading_indicator); // complete (9) If the savedInstanceState bundle is not null, set the text of the URL and search results TextView respectively if(savedInstanceState != null){ mUrlDisplayTextView.setText(savedInstanceState.getString(URL_KEY)); mSearchResultsTextView.setText(savedInstanceState.getString(JSON_KEY)); } } /** * This method retrieves the search text from the EditText, constructs the * URL (using {@link NetworkUtils}) for the github repository you'd like to find, displays * that URL in a TextView, and finally fires off an AsyncTask to perform the GET request using * our {@link GithubQueryTask} */ private void makeGithubSearchQuery() { String githubQuery = mSearchBoxEditText.getText().toString(); URL githubSearchUrl = NetworkUtils.buildUrl(githubQuery); mUrlDisplayTextView.setText(githubSearchUrl.toString()); new GithubQueryTask().execute(githubSearchUrl); } /** * This method will make the View for the JSON data visible and * hide the error message. * <p> * Since it is okay to redundantly set the visibility of a View, we don't * need to check whether each view is currently visible or invisible. */ private void showJsonDataView() { /* First, make sure the error is invisible */ mErrorMessageDisplay.setVisibility(View.INVISIBLE); /* Then, make sure the JSON data is visible */ mSearchResultsTextView.setVisibility(View.VISIBLE); } /** * This method will make the error message visible and hide the JSON * View. * <p> * Since it is okay to redundantly set the visibility of a View, we don't * need to check whether each view is currently visible or invisible. */ private void showErrorMessage() { /* First, hide the currently visible data */ mSearchResultsTextView.setVisibility(View.INVISIBLE); /* Then, show the error */ mErrorMessageDisplay.setVisibility(View.VISIBLE); } public class GithubQueryTask extends AsyncTask<URL, Void, String> { @Override protected void onPreExecute() { super.onPreExecute(); mLoadingIndicator.setVisibility(View.VISIBLE); } @Override protected String doInBackground(URL... params) { URL searchUrl = params[0]; String githubSearchResults = null; try { githubSearchResults = NetworkUtils.getResponseFromHttpUrl(searchUrl); } catch (IOException e) { e.printStackTrace(); } return githubSearchResults; } @Override protected void onPostExecute(String githubSearchResults) { mLoadingIndicator.setVisibility(View.INVISIBLE); if (githubSearchResults != null && !githubSearchResults.equals("")) { showJsonDataView(); mSearchResultsTextView.setText(githubSearchResults); } else { showErrorMessage(); } } } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int itemThatWasClickedId = item.getItemId(); if (itemThatWasClickedId == R.id.action_search) { makeGithubSearchQuery(); return true; } return super.onOptionsItemSelected(item); } // complete (3) Override onSaveInstanceState to persist data across Activity recreation @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); String stringURL = mUrlDisplayTextView.getText().toString().trim(); outState.putString(URL_KEY,stringURL); String stringResults = mSearchResultsTextView.getText().toString().trim(); outState.putString(JSON_KEY,stringResults); } // Do the following steps within onSaveInstanceState // complete (4) Make sure super.onSaveInstanceState is called before doing anything else // complete (5) Put the contents of the TextView that contains our URL into a variable // complete (6) Using the key for the query URL, put the string in the outState Bundle // complete (7) Put the contents of the TextView that contains our raw JSON search results into a variable // complete (8) Using the key for the raw JSON search results, put the search results into the outState Bundle }
apache-2.0
nayuan/properties-maven-plugin
src/main/java/com/dplugin/maven/plugins/properties/source/Entry.java
1417
package com.dplugin.maven.plugins.properties.source; /** * 属性项 2016-07-23 22:44:01 * @author nayuan */ public class Entry { /** * 属性ID */ private Object id; /** * 描述/标题 */ private String title = ""; /** * 属性key */ private String key; /** * 属性值 */ private String value = ""; public Entry(String key, String value) { this("", key, value); } public Entry(String title, String key, String value) { this(null, title, key, value); } public Entry(Object id, String title, String key, String value) { this.id = id; this.title = title; this.key = key; this.value = value; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getKey() { return key; } public void setKey(String key) { this.key = key; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public String toString() { if(key == null) { return title; }else{ return key + "=" + value; } } public Object getId() { return id; } public void setId(Object id) { this.id = id; } }
apache-2.0
holenzh/babyDiary
src/com/holen/babygrowth/BabyDataEdit.java
5900
package com.holen.babygrowth; import java.util.Calendar; import org.joda.time.Interval; import android.app.Activity; import android.app.AlertDialog; import android.app.DatePickerDialog; import android.app.DatePickerDialog.OnDateSetListener; import android.content.ContentValues; import android.content.DialogInterface; import android.database.Cursor; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.DatePicker; import android.widget.EditText; import android.widget.RadioGroup; import com.holen.babygrowth.Constant.SqlConstant; import com.holen.babygrowth.DB.DBHelper; public class BabyDataEdit extends Activity { private Button saveBtn ; private EditText babyNameText; private RadioGroup gender; private EditText babyBirthText; private EditText babyWeightText; private EditText babyHeightText; private EditText babyPregnancyText; private EditText babyHeadText; private EditText babyBustText; private Calendar calendar; private int m_year , m_month , m_day; private long intervalTime = 0; @Override protected void onCreate(Bundle savedInstanceState) { // TODO Auto-generated method stub super.onCreate(savedInstanceState); setContentView(R.layout.baby_info); babyBirthText = (EditText)findViewById(R.id.babyBirth); saveBtn = (Button)findViewById(R.id.btnSave); gender = (RadioGroup)findViewById(R.id.genderGroup); babyNameText = (EditText)findViewById(R.id.babyName); babyWeightText = (EditText)findViewById(R.id.babyWeight); babyHeightText = (EditText)findViewById(R.id.babyHeight); babyPregnancyText = (EditText)findViewById(R.id.babyPregnancy); babyHeadText = (EditText)findViewById(R.id.babyHead); babyBustText = (EditText)findViewById(R.id.babyBust); //ÉèÖÃÉúÈÕÑ¡Ôñ¼àÌýÆ÷ babyBirthText.setOnClickListener(new OnClickListener() { public void onClick(View v) { // TODO Auto-generated method stub if ((System.currentTimeMillis()-intervalTime)>1000){ showDatePickerDialog(); intervalTime = System.currentTimeMillis(); } } }); //±£´æ°´Å¥´¥·¢Ê¼þ saveBtn.setOnClickListener(new OnClickListener() { public void onClick(View v) { // TODO Auto-generated method stub if (!validate()){ return ; } // do some thing DBHelper helper = new DBHelper(getApplicationContext()); String[] columns = {"baby_name"}; String[] selectionArgs = {babyNameText.getText().toString()}; Cursor c = helper.query(false, SqlConstant.babyTableName, columns , "baby_name=?", selectionArgs, null, null, null, null); //Log.v("database", c.getCount()); if (c.getCount() > 0){ showAlert(getString(R.string.register_msg)); babyNameText.setText(""); return ; }; ContentValues values= new ContentValues(); values.put("baby_name", babyNameText.getText().toString()); values.put("gender", gender.getCheckedRadioButtonId()==R.id.boy?"ÄÐ":"Å®"); values.put("birthday", m_year + "-" + (m_month+1) + "-" + m_day); values.put("weight", babyWeightText.getText().toString()); values.put("height", babyHeightText.getText().toString()); values.put("head_size", babyHeadText.getText().toString()); values.put("bust", babyBustText.getText().toString()); values.put("pregnancy", babyPregnancyText.getText().toString()); helper.insert(SqlConstant.babyTableName, values); finish(); } }); } /** * ÅжÏÊÇ·ñ´æÔÚ¿ÕÖµ£¬Èç¹û´æÔÚ¿ÕÖµ£¬²»Ð´ÈëÊý¾Ý¿â */ private boolean validate(){ String weight = babyWeightText.getText().toString(); String height = babyHeightText.getText().toString(); String head = babyHeadText.getText().toString(); String bust = babyBustText.getText().toString(); String pregnancy = babyPregnancyText.getText().toString(); //µ±randioButton²»Ñ¡Ôñʱ£¬ÊÇ-1 if(babyNameText.getText().toString().trim().equals("") || gender.getCheckedRadioButtonId()==-1 || babyBirthText.getText().toString().equals("") || weight.equals("") || height.equals("") || pregnancy.equals("") || bust.equals("") || head.equals("")){ showAlert(getString(R.string.info_msg)); return false; } try { if(Double.parseDouble(weight)<0 || Double.parseDouble(height)<0 || Double.parseDouble(head)<0 || Double.parseDouble(bust)<0 || Double.parseDouble(pregnancy)<0){ showAlert(getString(R.string.warn_msg)); return false; } } catch (NumberFormatException ex) { showAlert(getString(R.string.warn_msg)); return false; } return true; } /** * ÏÔʾʱ¼äÑ¡ÔñÆ÷¶Ô»°¿ò */ private void showDatePickerDialog(){ calendar = Calendar.getInstance(); m_year = calendar.get(Calendar.YEAR); m_month = calendar.get(Calendar.MONTH); m_day = calendar.get(Calendar.DAY_OF_MONTH); Log.v("holen", m_year + "Äê" + m_month + "ÔÂ" + m_day); //showAlert(m_year + ""); DatePickerDialog datePicker = new DatePickerDialog(BabyDataEdit.this, dateSetListener, m_year, m_month, m_day); datePicker.show(); } /** * ʼþÑ¡ÔñÆ÷¶Ô»°¿ò¼àÌýÆ÷ */ private OnDateSetListener dateSetListener = new OnDateSetListener() { public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) { // TODO Auto-generated method stub m_year = year; m_month = monthOfYear; m_day = dayOfMonth; babyBirthText.setText(m_year + "Äê" + (m_month+1) + "ÔÂ" + m_day + "ÈÕ"); } }; /** * ÏÔʾ¾¯¸æ¿ò */ private void showAlert(String message){ AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setMessage(message) .setCancelable(false) .setPositiveButton(R.string.OK, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub } }); AlertDialog alertDialog = builder.create(); alertDialog.show(); } }
apache-2.0
klenkes74/piracc
piracc-backend/src/main/java/de/kaiserpfalzEdv/piracc/backend/organization/organization/OrganizationPredicateBuilder.java
2953
/* * Copyright 2015 Kaiserpfalz EDV-Service, Roland T. Lichti * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package de.kaiserpfalzEdv.piracc.backend.organization.organization; import com.mysema.query.types.Ops; import com.mysema.query.types.Predicate; import com.mysema.query.types.expr.BooleanOperation; import de.kaiserpfalzEdv.piracc.backend.db.auth.User; import de.kaiserpfalzEdv.piracc.backend.db.master.Organization; import de.kaiserpfalzEdv.vaadin.backend.query.PredicateBuilder; import de.kaiserpfalzEdv.vaadin.backend.query.PredicateBuilderException; import java.io.Serializable; import java.sql.Timestamp; import static de.kaiserpfalzEdv.piracc.backend.db.master.QOrganization.organization; /** * @author klenkes * @version 2015Q1 * @since 16.09.15 20:42 */ public class OrganizationPredicateBuilder implements PredicateBuilder<Organization> { private Long id; private Timestamp created; private Timestamp modified; @Override public Predicate build() { Predicate result = null; if (id != null) result = addToResult(result, organization.id.eq(id)); if (created != null) result = addToResult(result, organization.created.eq(created)); if (modified != null) result = addToResult(result, organization.lastModified.eq(modified)); if (result == null) throw new PredicateBuilderException(this, "No query data given!"); return result; } private Predicate addToResult(Predicate input, Predicate predicate) { if (input != null) { return BooleanOperation.create(Ops.AND, input, predicate); } return predicate; } public OrganizationPredicateBuilder withQuery(OrganizationQuery query) { if (query == null) return this; withId(query.getId()); withCreated(query.getCreated()); withModified(query.getLastModified()); return this; } public OrganizationPredicateBuilder withId(Long id) { this.id = id; return this; } public OrganizationPredicateBuilder withCreated(Timestamp created) { this.created = created; return this; } public OrganizationPredicateBuilder withModified(Timestamp modified) { this.modified = modified; return this; } @Override public Class<? extends Serializable> getEntityClass() { return User.class; } }
apache-2.0