index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/AbstractFormatValidator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines;
import java.io.Serializable;
import java.text.Format;
import java.text.ParsePosition;
import java.util.Locale;
/**
* <p>Abstract class for <i>Format</i> based Validation.</p>
*
* <p>This is a <i>base</i> class for building Date and Number
* Validators using format parsing.</p>
*
* @since 1.3.0
*/
public abstract class AbstractFormatValidator implements Serializable {
private static final long serialVersionUID = -4690687565200568258L;
private final boolean strict;
/**
* Constructs an instance with the specified strict setting.
*
* @param strict {@code true} if strict
* <code>Format</code> parsing should be used.
*/
public AbstractFormatValidator(final boolean strict) {
this.strict = strict;
}
/**
* <p>Indicates whether validated values should adhere
* strictly to the <code>Format</code> used.</p>
*
* <p>Typically implementations of <code>Format</code>
* ignore invalid characters at the end of the value
* and just stop parsing. For example parsing a date
* value of <code>01/01/20x0</code> using a pattern
* of <code>dd/MM/yyyy</code> will result in a year
* of <code>20</code> if <code>strict</code> is set
* to {@code false}, whereas setting <code>strict</code>
* to {@code true} will cause this value to fail
* validation.</p>
*
* @return {@code true} if strict <code>Format</code>
* parsing should be used.
*/
public boolean isStrict() {
return strict;
}
/**
* <p>Validate using the default <code>Locale</code>.
*
* @param value The value validation is being performed on.
* @return {@code true} if the value is valid.
*/
public boolean isValid(final String value) {
return isValid(value, (String)null, (Locale)null);
}
/**
* <p>Validate using the specified <i>pattern</i>.
*
* @param value The value validation is being performed on.
* @param pattern The pattern used to validate the value against.
* @return {@code true} if the value is valid.
*/
public boolean isValid(final String value, final String pattern) {
return isValid(value, pattern, (Locale)null);
}
/**
* <p>Validate using the specified <code>Locale</code>.
*
* @param value The value validation is being performed on.
* @param locale The locale to use for the Format, defaults to the default
* @return {@code true} if the value is valid.
*/
public boolean isValid(final String value, final Locale locale) {
return isValid(value, (String)null, locale);
}
/**
* <p>Validate using the specified pattern and/or <code>Locale</code>.
*
* @param value The value validation is being performed on.
* @param pattern The pattern used to format the value.
* @param locale The locale to use for the Format, defaults to the default
* @return {@code true} if the value is valid.
*/
public abstract boolean isValid(String value, String pattern, Locale locale);
/**
* <p>Format an object into a <code>String</code> using
* the default Locale.</p>
*
* @param value The value validation is being performed on.
* @return The value formatted as a <code>String</code>.
*/
public String format(final Object value) {
return format(value, (String)null, (Locale)null);
}
/**
* <p>Format an object into a <code>String</code> using
* the specified pattern.</p>
*
* @param value The value validation is being performed on.
* @param pattern The pattern used to format the value.
* @return The value formatted as a <code>String</code>.
*/
public String format(final Object value, final String pattern) {
return format(value, pattern, (Locale)null);
}
/**
* <p>Format an object into a <code>String</code> using
* the specified Locale.</p>
*
* @param value The value validation is being performed on.
* @param locale The locale to use for the Format.
* @return The value formatted as a <code>String</code>.
*/
public String format(final Object value, final Locale locale) {
return format(value, (String)null, locale);
}
/**
* <p>Format an object using the specified pattern and/or
* <code>Locale</code>.
*
* @param value The value validation is being performed on.
* @param pattern The pattern used to format the value.
* @param locale The locale to use for the Format.
* @return The value formatted as a <code>String</code>.
*/
public String format(final Object value, final String pattern, final Locale locale) {
final Format formatter = getFormat(pattern, locale);
return format(value, formatter);
}
/**
* <p>Format a value with the specified <code>Format</code>.</p>
*
* @param value The value to be formatted.
* @param formatter The Format to use.
* @return The formatted value.
*/
protected String format(final Object value, final Format formatter) {
return formatter.format(value);
}
/**
* <p>Parse the value with the specified <code>Format</code>.</p>
*
* @param value The value to be parsed.
* @param formatter The Format to parse the value with.
* @return The parsed value if valid or <code>null</code> if invalid.
*/
protected Object parse(final String value, final Format formatter) {
final ParsePosition pos = new ParsePosition(0);
Object parsedValue = formatter.parseObject(value, pos);
if (pos.getErrorIndex() > -1) {
return null;
}
if (isStrict() && pos.getIndex() < value.length()) {
return null;
}
if (parsedValue != null) {
parsedValue = processParsedValue(parsedValue, formatter);
}
return parsedValue;
}
/**
* <p>Process the parsed value, performing any further validation
* and type conversion required.</p>
*
* @param value The parsed object created.
* @param formatter The Format used to parse the value with.
* @return The parsed value converted to the appropriate type
* if valid or <code>null</code> if invalid.
*/
protected abstract Object processParsedValue(Object value, Format formatter);
/**
* <p>Returns a <code>Format</code> for the specified <i>pattern</i>
* and/or <code>Locale</code>.</p>
*
* @param pattern The pattern used to validate the value against or
* <code>null</code> to use the default for the <code>Locale</code>.
* @param locale The locale to use for the currency format, system default if null.
* @return The <code>NumberFormat</code> to created.
*/
protected abstract Format getFormat(String pattern, Locale locale);
}
| 8,000 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/CreditCardValidator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.validator.routines.checkdigit.CheckDigit;
import org.apache.commons.validator.routines.checkdigit.LuhnCheckDigit;
/**
* Perform credit card validations.
*
* <p>
* By default, AMEX + VISA + MASTERCARD + DISCOVER card types are allowed. You can specify which
* cards should pass validation by configuring the validation options. For
* example,
* </p>
*
* <pre>
* <code>CreditCardValidator ccv = new CreditCardValidator(CreditCardValidator.AMEX + CreditCardValidator.VISA);</code>
* </pre>
*
* <p>
* configures the validator to only pass American Express and Visa cards.
* If a card type is not directly supported by this class, you can create an
* instance of the {@link CodeValidator} class and pass it to a {@link CreditCardValidator}
* constructor along with any existing validators. For example:
* </p>
*
* <pre>
* <code>CreditCardValidator ccv = new CreditCardValidator(
* new CodeValidator[] {
* CreditCardValidator.AMEX_VALIDATOR,
* CreditCardValidator.VISA_VALIDATOR,
* new CodeValidator("^(4)(\\d{12,18})$", LUHN_VALIDATOR) // add VPAY
* };</code>
* </pre>
*
* <p>
* Alternatively you can define a validator using the {@link CreditCardRange} class.
* For example:
* </p>
*
* <pre>
* <code>CreditCardValidator ccv = new CreditCardValidator(
* new CreditCardRange[]{
* new CreditCardRange("300", "305", 14, 14), // Diners
* new CreditCardRange("3095", null, 14, 14), // Diners
* new CreditCardRange("36", null, 14, 14), // Diners
* new CreditCardRange("38", "39", 14, 14), // Diners
* new CreditCardRange("4", null, new int[] {13, 16}), // VISA
* }
* );
* </code>
* </pre>
* <p>
* This can be combined with a list of {@code CodeValidator}s
* </p>
* <p>
* More information can be found in Michael Gilleland's essay
* <a href="http://web.archive.org/web/20120614072656/http://www.merriampark.com/anatomycc.htm">Anatomy of Credit Card Numbers</a>.
* </p>
*
* @since 1.4
*/
public class CreditCardValidator implements Serializable {
private static final long serialVersionUID = 5955978921148959496L;
private static final int MIN_CC_LENGTH = 12; // minimum allowed length
private static final int MAX_CC_LENGTH = 19; // maximum allowed length
/**
* Class that represents a credit card range.
* @since 1.6
*/
public static class CreditCardRange {
final String low; // e.g. 34 or 644
final String high; // e.g. 34 or 65
final int minLen; // e.g. 16 or -1
final int maxLen; // e.g. 19 or -1
final int lengths[]; // e.g. 16,18,19
/**
* Create a credit card range specifier for use in validation
* of the number syntax including the IIN range.
* <p>
* The low and high parameters may be shorter than the length
* of an IIN (currently 6 digits) in which case subsequent digits
* are ignored and may range from 0-9.
* <br>
* The low and high parameters may be different lengths.
* e.g. Discover "644" and "65".
* </p>
* @param low the low digits of the IIN range
* @param high the high digits of the IIN range
* @param minLen the minimum length of the entire number
* @param maxLen the maximum length of the entire number
*/
public CreditCardRange(final String low, final String high, final int minLen, final int maxLen) {
this.low = low;
this.high = high;
this.minLen = minLen;
this.maxLen = maxLen;
this.lengths = null;
}
/**
* Create a credit card range specifier for use in validation
* of the number syntax including the IIN range.
* <p>
* The low and high parameters may be shorter than the length
* of an IIN (currently 6 digits) in which case subsequent digits
* are ignored and may range from 0-9.
* <br>
* The low and high parameters may be different lengths.
* e.g. Discover "644" and "65".
* </p>
* @param low the low digits of the IIN range
* @param high the high digits of the IIN range
* @param lengths array of valid lengths
*/
public CreditCardRange(final String low, final String high, final int [] lengths) {
this.low = low;
this.high = high;
this.minLen = -1;
this.maxLen = -1;
this.lengths = lengths.clone();
}
}
/**
* Option specifying that no cards are allowed. This is useful if
* you want only custom card types to validate so you turn off the
* default cards with this option.
*
* <pre>
* <code>
* CreditCardValidator v = new CreditCardValidator(CreditCardValidator.NONE);
* v.addAllowedCardType(customType);
* v.isValid(aCardNumber);
* </code>
* </pre>
*/
public static final long NONE = 0;
/**
* Option specifying that American Express cards are allowed.
*/
public static final long AMEX = 1 << 0;
/**
* Option specifying that Visa cards are allowed.
*/
public static final long VISA = 1 << 1;
/**
* Option specifying that Mastercard cards are allowed.
*/
public static final long MASTERCARD = 1 << 2;
/**
* Option specifying that Discover cards are allowed.
*/
public static final long DISCOVER = 1 << 3; // CHECKSTYLE IGNORE MagicNumber
/**
* Option specifying that Diners cards are allowed.
*/
public static final long DINERS = 1 << 4; // CHECKSTYLE IGNORE MagicNumber
/**
* Option specifying that VPay (Visa) cards are allowed.
* @since 1.5.0
*/
public static final long VPAY = 1 << 5; // CHECKSTYLE IGNORE MagicNumber
/**
* Option specifying that Mastercard cards (pre Oct 2016 only) are allowed.
* @deprecated for use until Oct 2016 only
*/
@Deprecated
public static final long MASTERCARD_PRE_OCT2016 = 1 << 6; // CHECKSTYLE IGNORE MagicNumber
/**
* The CreditCardTypes that are allowed to pass validation.
*/
private final List<CodeValidator> cardTypes = new ArrayList<>();
/**
* Luhn checkdigit validator for the card numbers.
*/
private static final CheckDigit LUHN_VALIDATOR = LuhnCheckDigit.LUHN_CHECK_DIGIT;
/**
* American Express (Amex) Card Validator
* <p>
* 34xxxx (15) <br>
* 37xxxx (15) <br>
*/
public static final CodeValidator AMEX_VALIDATOR = new CodeValidator("^(3[47]\\d{13})$", LUHN_VALIDATOR);
/**
* Diners Card Validator
* <p>
* 300xxx - 305xxx (14) <br>
* 3095xx (14) <br>
* 36xxxx (14) <br>
* 38xxxx (14) <br>
* 39xxxx (14) <br>
*/
public static final CodeValidator DINERS_VALIDATOR = new CodeValidator("^(30[0-5]\\d{11}|3095\\d{10}|36\\d{12}|3[8-9]\\d{12})$", LUHN_VALIDATOR);
/**
* Discover Card regular expressions
* <p>
* 6011xx (16) <br>
* 644xxx - 65xxxx (16) <br>
*/
private static final RegexValidator DISCOVER_REGEX = new RegexValidator("^(6011\\d{12,13})$", "^(64[4-9]\\d{13})$", "^(65\\d{14})$", "^(62[2-8]\\d{13})$");
/** Discover Card Validator */
public static final CodeValidator DISCOVER_VALIDATOR = new CodeValidator(DISCOVER_REGEX, LUHN_VALIDATOR);
/**
* Mastercard regular expressions
* <p>
* 2221xx - 2720xx (16) <br>
* 51xxx - 55xxx (16) <br>
*/
private static final RegexValidator MASTERCARD_REGEX = new RegexValidator(
"^(5[1-5]\\d{14})$", // 51 - 55 (pre Oct 2016)
// valid from October 2016
"^(2221\\d{12})$", // 222100 - 222199
"^(222[2-9]\\d{12})$",// 222200 - 222999
"^(22[3-9]\\d{13})$", // 223000 - 229999
"^(2[3-6]\\d{14})$", // 230000 - 269999
"^(27[01]\\d{13})$", // 270000 - 271999
"^(2720\\d{12})$" // 272000 - 272099
);
/** Mastercard Card Validator */
public static final CodeValidator MASTERCARD_VALIDATOR = new CodeValidator(MASTERCARD_REGEX, LUHN_VALIDATOR);
/**
* Mastercard Card Validator (pre Oct 2016)
* @deprecated for use until Oct 2016 only
*/
@Deprecated
public static final CodeValidator MASTERCARD_VALIDATOR_PRE_OCT2016 = new CodeValidator("^(5[1-5]\\d{14})$", LUHN_VALIDATOR);
/**
* Visa Card Validator
* <p>
* 4xxxxx (13 or 16)
*/
public static final CodeValidator VISA_VALIDATOR = new CodeValidator("^(4)(\\d{12}|\\d{15})$", LUHN_VALIDATOR);
/** VPay (Visa) Card Validator
* <p>
* 4xxxxx (13-19)
* @since 1.5.0
*/
public static final CodeValidator VPAY_VALIDATOR = new CodeValidator("^(4)(\\d{12,18})$", LUHN_VALIDATOR);
/**
* Create a new CreditCardValidator with default options.
* The default options are:
* AMEX, VISA, MASTERCARD and DISCOVER
*/
public CreditCardValidator() {
this(AMEX + VISA + MASTERCARD + DISCOVER);
}
/**
* Create a new CreditCardValidator with the specified options.
* @param options Pass in
* CreditCardValidator.VISA + CreditCardValidator.AMEX to specify that
* those are the only valid card types.
*/
public CreditCardValidator(final long options) {
if (isOn(options, VISA)) {
this.cardTypes.add(VISA_VALIDATOR);
}
if (isOn(options, VPAY)) {
this.cardTypes.add(VPAY_VALIDATOR);
}
if (isOn(options, AMEX)) {
this.cardTypes.add(AMEX_VALIDATOR);
}
if (isOn(options, MASTERCARD)) {
this.cardTypes.add(MASTERCARD_VALIDATOR);
}
if (isOn(options, MASTERCARD_PRE_OCT2016)) {
this.cardTypes.add(MASTERCARD_VALIDATOR_PRE_OCT2016);
}
if (isOn(options, DISCOVER)) {
this.cardTypes.add(DISCOVER_VALIDATOR);
}
if (isOn(options, DINERS)) {
this.cardTypes.add(DINERS_VALIDATOR);
}
}
/**
* Create a new CreditCardValidator with the specified {@link CodeValidator}s.
* @param creditCardValidators Set of valid code validators
*/
public CreditCardValidator(final CodeValidator[] creditCardValidators) {
if (creditCardValidators == null) {
throw new IllegalArgumentException("Card validators are missing");
}
Collections.addAll(cardTypes, creditCardValidators);
}
/**
* Create a new CreditCardValidator with the specified {@link CreditCardRange}s.
* @param creditCardRanges Set of valid code validators
* @since 1.6
*/
public CreditCardValidator(final CreditCardRange[] creditCardRanges) {
if (creditCardRanges == null) {
throw new IllegalArgumentException("Card ranges are missing");
}
Collections.addAll(cardTypes, createRangeValidator(creditCardRanges, LUHN_VALIDATOR));
}
/**
* Create a new CreditCardValidator with the specified {@link CodeValidator}s
* and {@link CreditCardRange}s.
* <p>
* This can be used to combine predefined validators such as {@link #MASTERCARD_VALIDATOR}
* with additional validators using the simpler {@link CreditCardRange}s.
* @param creditCardValidators Set of valid code validators
* @param creditCardRanges Set of valid code validators
* @since 1.6
*/
public CreditCardValidator(final CodeValidator[] creditCardValidators, final CreditCardRange[] creditCardRanges) {
if (creditCardValidators == null) {
throw new IllegalArgumentException("Card validators are missing");
}
if (creditCardRanges == null) {
throw new IllegalArgumentException("Card ranges are missing");
}
Collections.addAll(cardTypes, creditCardValidators);
Collections.addAll(cardTypes, createRangeValidator(creditCardRanges, LUHN_VALIDATOR));
}
/**
* Create a new generic CreditCardValidator which validates the syntax and check digit only.
* Does not check the Issuer Identification Number (IIN)
*
* @param minLen minimum allowed length
* @param maxLen maximum allowed length
* @return the validator
* @since 1.6
*/
public static CreditCardValidator genericCreditCardValidator(final int minLen, final int maxLen) {
return new CreditCardValidator(new CodeValidator[] {new CodeValidator("(\\d+)", minLen, maxLen, LUHN_VALIDATOR)});
}
/**
* Create a new generic CreditCardValidator which validates the syntax and check digit only.
* Does not check the Issuer Identification Number (IIN)
*
* @param length exact length
* @return the validator
* @since 1.6
*/
public static CreditCardValidator genericCreditCardValidator(final int length) {
return genericCreditCardValidator(length, length);
}
/**
* Create a new generic CreditCardValidator which validates the syntax and check digit only.
* Does not check the Issuer Identification Number (IIN)
*
* @return the validator
* @since 1.6
*/
public static CreditCardValidator genericCreditCardValidator() {
return genericCreditCardValidator(MIN_CC_LENGTH, MAX_CC_LENGTH);
}
/**
* Checks if the field is a valid credit card number.
* @param card The card number to validate.
* @return Whether the card number is valid.
*/
public boolean isValid(final String card) {
if (card == null || card.isEmpty()) {
return false;
}
for (final CodeValidator cardType : cardTypes) {
if (cardType.isValid(card)) {
return true;
}
}
return false;
}
/**
* Checks if the field is a valid credit card number.
* @param card The card number to validate.
* @return The card number if valid or <code>null</code>
* if invalid.
*/
public Object validate(final String card) {
if (card == null || card.isEmpty()) {
return null;
}
Object result = null;
for (final CodeValidator cardType : cardTypes) {
result = cardType.validate(card);
if (result != null) {
return result;
}
}
return null;
}
// package protected for unit test access
static boolean validLength(final int valueLength, final CreditCardRange range) {
if (range.lengths != null) {
for(final int length : range.lengths) {
if (valueLength == length) {
return true;
}
}
return false;
}
return valueLength >= range.minLen && valueLength <= range.maxLen;
}
// package protected for unit test access
static CodeValidator createRangeValidator(final CreditCardRange[] creditCardRanges, final CheckDigit digitCheck ) {
return new CodeValidator(
// must be numeric (rest of validation is done later)
new RegexValidator("(\\d+)") {
private static final long serialVersionUID = 1L;
private final CreditCardRange[] ccr = creditCardRanges.clone();
@Override
// must return full string
public String validate(final String value) {
if (super.match(value) != null) {
final int length = value.length();
for(final CreditCardRange range : ccr) {
if (validLength(length, range)) {
if (range.high == null) { // single prefix only
if (value.startsWith(range.low)) {
return value;
}
} else if (range.low.compareTo(value) <= 0 // no need to trim value here
&&
// here we have to ignore digits beyond the prefix
range.high.compareTo(value.substring(0, range.high.length())) >= 0) {
return value;
}
}
}
}
return null;
}
@Override
public boolean isValid(final String value) {
return validate(value) != null;
}
@Override
public String[] match(final String value) {
return new String[] { validate(value) };
}
}, digitCheck);
}
/**
* Tests whether the given flag is on. If the flag is not a power of 2
* (ie. 3) this tests whether the combination of flags is on.
*
* @param options The options specified.
* @param flag Flag value to check.
*
* @return whether the specified flag value is on.
*/
private boolean isOn(final long options, final long flag) {
return (options & flag) > 0;
}
}
| 8,001 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/PercentValidator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines;
import java.math.BigDecimal;
import java.text.DecimalFormat;
import java.text.Format;
/**
* <p><b>Percentage Validation</b> and Conversion routines (<code>java.math.BigDecimal</code>).</p>
*
* <p>This is one implementation of a percent validator that has the following features:</p>
* <ul>
* <li>It is <i>lenient</i> about the presence of the <i>percent symbol</i></li>
* <li>It converts the percent to a <code>java.math.BigDecimal</code></li>
* </ul>
*
* <p>However any of the <i>number</i> validators can be used for <i>percent</i> validation.
* For example, if you wanted a <i>percent</i> validator that converts to a
* <code>java.lang.Float</code> then you can simply instantiate an
* <code>FloatValidator</code> with the appropriate <i>format type</i>:</p>
*
* <p><code>... = new FloatValidator(false, FloatValidator.PERCENT_FORMAT);</code></p>
*
* <p>Pick the appropriate validator, depending on the type (i.e Float, Double or BigDecimal)
* you want the percent converted to. Please note, it makes no sense to use
* one of the validators that doesn't handle fractions (i.e. byte, short, integer, long
* and BigInteger) since percentages are converted to fractions (i.e <code>50%</code> is
* converted to <code>0.5</code>).</p>
*
* @since 1.3.0
*/
public class PercentValidator extends BigDecimalValidator {
private static final long serialVersionUID = -3508241924961535772L;
private static final PercentValidator VALIDATOR = new PercentValidator();
/** DecimalFormat's percent (thousand multiplier) symbol */
private static final char PERCENT_SYMBOL = '%';
private static final BigDecimal POINT_ZERO_ONE = new BigDecimal("0.01");
/**
* Return a singleton instance of this validator.
* @return A singleton instance of the PercentValidator.
*/
public static BigDecimalValidator getInstance() {
return VALIDATOR;
}
/**
* Constructs a <i>strict</i> instance.
*/
public PercentValidator() {
this(true);
}
/**
* Constructs an instance with the specified strict setting.
*
* @param strict {@code true} if strict
* <code>Format</code> parsing should be used.
*/
public PercentValidator(final boolean strict) {
super(strict, PERCENT_FORMAT, true);
}
/**
* <p>Parse the value with the specified <code>Format</code>.</p>
*
* <p>This implementation is lenient whether the currency symbol
* is present or not. The default <code>NumberFormat</code>
* behavior is for the parsing to "fail" if the currency
* symbol is missing. This method re-parses with a format
* without the currency symbol if it fails initially.</p>
*
* @param value The value to be parsed.
* @param formatter The Format to parse the value with.
* @return The parsed value if valid or <code>null</code> if invalid.
*/
@Override
protected Object parse(final String value, final Format formatter) {
// Initial parse of the value
BigDecimal parsedValue = (BigDecimal)super.parse(value, formatter);
if (parsedValue != null || !(formatter instanceof DecimalFormat)) {
return parsedValue;
}
// Re-parse using a pattern without the percent symbol
final DecimalFormat decimalFormat = (DecimalFormat)formatter;
final String pattern = decimalFormat.toPattern();
if (pattern.indexOf(PERCENT_SYMBOL) >= 0) {
final StringBuilder buffer = new StringBuilder(pattern.length());
for (int i = 0; i < pattern.length(); i++) {
if (pattern.charAt(i) != PERCENT_SYMBOL) {
buffer.append(pattern.charAt(i));
}
}
decimalFormat.applyPattern(buffer.toString());
parsedValue = (BigDecimal)super.parse(value, decimalFormat);
// If parsed OK, divide by 100 to get percent
if (parsedValue != null) {
parsedValue = parsedValue.multiply(POINT_ZERO_ONE);
}
}
return parsedValue;
}
}
| 8,002 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/DomainValidator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines;
import java.io.Serializable;
import java.net.IDN;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
/**
* <p><b>Domain name</b> validation routines.</p>
*
* <p>
* This validator provides methods for validating Internet domain names
* and top-level domains.
* </p>
*
* <p>Domain names are evaluated according
* to the standards <a href="http://www.ietf.org/rfc/rfc1034.txt">RFC1034</a>,
* section 3, and <a href="http://www.ietf.org/rfc/rfc1123.txt">RFC1123</a>,
* section 2.1. No accommodation is provided for the specialized needs of
* other applications; if the domain name has been URL-encoded, for example,
* validation will fail even though the equivalent plaintext version of the
* same name would have passed.
* </p>
*
* <p>
* Validation is also provided for top-level domains (TLDs) as defined and
* maintained by the Internet Assigned Numbers Authority (IANA):
* </p>
*
* <ul>
* <li>{@link #isValidInfrastructureTld} - validates infrastructure TLDs
* (<code>.arpa</code>, etc.)</li>
* <li>{@link #isValidGenericTld} - validates generic TLDs
* (<code>.com, .org</code>, etc.)</li>
* <li>{@link #isValidCountryCodeTld} - validates country code TLDs
* (<code>.us, .uk, .cn</code>, etc.)</li>
* </ul>
*
* <p>
* (<b>NOTE</b>: This class does not provide IP address lookup for domain names or
* methods to ensure that a given domain name matches a specific IP; see
* {@link java.net.InetAddress} for that functionality.)
* </p>
*
* @since 1.4
*/
public class DomainValidator implements Serializable {
/** Maximum allowable length ({@value}) of a domain name */
private static final int MAX_DOMAIN_LENGTH = 253;
private static final String[] EMPTY_STRING_ARRAY = {};
private static final long serialVersionUID = -4407125112880174009L;
// Regular expression strings for hostnames (derived from RFC2396 and RFC 1123)
// RFC2396: domainlabel = alphanum | alphanum *( alphanum | "-" ) alphanum
// Max 63 characters
private static final String DOMAIN_LABEL_REGEX = "\\p{Alnum}(?>[\\p{Alnum}-]{0,61}\\p{Alnum})?";
// RFC2396 toplabel = alpha | alpha *( alphanum | "-" ) alphanum
// Max 63 characters
private static final String TOP_LABEL_REGEX = "\\p{Alpha}(?>[\\p{Alnum}-]{0,61}\\p{Alnum})?";
// RFC2396 hostname = *( domainlabel "." ) toplabel [ "." ]
// Note that the regex currently requires both a domain label and a top level label, whereas
// the RFC does not. This is because the regex is used to detect if a TLD is present.
// If the match fails, input is checked against DOMAIN_LABEL_REGEX (hostnameRegex)
// RFC1123 sec 2.1 allows hostnames to start with a digit
private static final String DOMAIN_NAME_REGEX =
"^(?:" + DOMAIN_LABEL_REGEX + "\\.)+" + "(" + TOP_LABEL_REGEX + ")\\.?$";
private static final String UNEXPECTED_ENUM_VALUE = "Unexpected enum value: ";
private final boolean allowLocal;
private static class LazyHolder { // IODH
/**
* Singleton instance of this validator, which
* doesn't consider local addresses as valid.
*/
private static final DomainValidator DOMAIN_VALIDATOR = new DomainValidator(false);
/**
* Singleton instance of this validator, which does
* consider local addresses valid.
*/
private static final DomainValidator DOMAIN_VALIDATOR_WITH_LOCAL = new DomainValidator(true);
}
/**
* The above instances must only be returned via the getInstance() methods.
* This is to ensure that the override data arrays are properly protected.
*/
/**
* RegexValidator for matching domains.
*/
private final RegexValidator domainRegex =
new RegexValidator(DOMAIN_NAME_REGEX);
/**
* RegexValidator for matching a local hostname
*/
// RFC1123 sec 2.1 allows hostnames to start with a digit
private final RegexValidator hostnameRegex =
new RegexValidator(DOMAIN_LABEL_REGEX);
/**
* Returns the singleton instance of this validator. It
* will not consider local addresses as valid.
* @return the singleton instance of this validator
*/
public static synchronized DomainValidator getInstance() {
inUse = true;
return LazyHolder.DOMAIN_VALIDATOR;
}
/**
* Returns the singleton instance of this validator,
* with local validation as required.
* @param allowLocal Should local addresses be considered valid?
* @return the singleton instance of this validator
*/
public static synchronized DomainValidator getInstance(final boolean allowLocal) {
inUse = true;
if (allowLocal) {
return LazyHolder.DOMAIN_VALIDATOR_WITH_LOCAL;
}
return LazyHolder.DOMAIN_VALIDATOR;
}
/**
* Returns a new instance of this validator.
* The user can provide a list of {@link Item} entries which can
* be used to override the generic and country code lists.
* Note that any such entries override values provided by the
* {@link #updateTLDOverride(ArrayType, String[])} method
* If an entry for a particular type is not provided, then
* the class override (if any) is retained.
*
* @param allowLocal Should local addresses be considered valid?
* @param items - array of {@link Item} entries
* @return an instance of this validator
* @since 1.7
*/
public static synchronized DomainValidator getInstance(final boolean allowLocal, final List<Item> items) {
inUse = true;
return new DomainValidator(allowLocal, items);
}
// instance variables allowing local overrides
final String[] myCountryCodeTLDsMinus;
final String[] myCountryCodeTLDsPlus;
final String[] myGenericTLDsPlus;
final String[] myGenericTLDsMinus;
final String[] myLocalTLDsPlus;
final String[] myLocalTLDsMinus;
/*
* N.B. It is vital that instances are immutable.
* This is because the default instances are shared.
*/
// N.B. The constructors are deliberately private to avoid possible problems with unsafe publication.
// It is vital that the static override arrays are not mutable once they have been used in an instance
// The arrays could be copied into the instance variables, however if the static array were changed it could
// result in different settings for the shared default instances
/**
* Private constructor.
*/
private DomainValidator(final boolean allowLocal) {
this.allowLocal = allowLocal;
// link to class overrides
myCountryCodeTLDsMinus = countryCodeTLDsMinus;
myCountryCodeTLDsPlus = countryCodeTLDsPlus;
myGenericTLDsPlus = genericTLDsPlus;
myGenericTLDsMinus = genericTLDsMinus;
myLocalTLDsPlus = localTLDsPlus;
myLocalTLDsMinus = localTLDsMinus;
}
/**
* Private constructor, allowing local overrides
* @since 1.7
*/
private DomainValidator(final boolean allowLocal, final List<Item> items) {
this.allowLocal = allowLocal;
// default to class overrides
String[] ccMinus = countryCodeTLDsMinus;
String[] ccPlus = countryCodeTLDsPlus;
String[] genMinus = genericTLDsMinus;
String[] genPlus = genericTLDsPlus;
String[] localMinus = localTLDsMinus;
String[] localPlus = localTLDsPlus;
// apply the instance overrides
for(final Item item: items) {
final String [] copy = new String[item.values.length];
// Comparisons are always done with lower-case entries
for (int i = 0; i < item.values.length; i++) {
copy[i] = item.values[i].toLowerCase(Locale.ENGLISH);
}
Arrays.sort(copy);
switch(item.type) {
case COUNTRY_CODE_MINUS: {
ccMinus = copy;
break;
}
case COUNTRY_CODE_PLUS: {
ccPlus = copy;
break;
}
case GENERIC_MINUS: {
genMinus = copy;
break;
}
case GENERIC_PLUS: {
genPlus = copy;
break;
}
case LOCAL_MINUS: {
localMinus = copy;
break;
}
case LOCAL_PLUS: {
localPlus = copy;
break;
}
default:
break;
}
}
// init the instance overrides
myCountryCodeTLDsMinus = ccMinus;
myCountryCodeTLDsPlus = ccPlus;
myGenericTLDsMinus = genMinus;
myGenericTLDsPlus = genPlus;
myLocalTLDsMinus = localMinus;
myLocalTLDsPlus = localPlus;
}
/**
* Returns true if the specified <code>String</code> parses
* as a valid domain name with a recognized top-level domain.
* The parsing is case-insensitive.
* @param domain the parameter to check for domain name syntax
* @return true if the parameter is a valid domain name
*/
public boolean isValid(String domain) {
if (domain == null) {
return false;
}
domain = unicodeToASCII(domain);
// hosts must be equally reachable via punycode and Unicode
// Unicode is never shorter than punycode, so check punycode
// if domain did not convert, then it will be caught by ASCII
// checks in the regexes below
if (domain.length() > MAX_DOMAIN_LENGTH) {
return false;
}
final String[] groups = domainRegex.match(domain);
if (groups != null && groups.length > 0) {
return isValidTld(groups[0]);
}
return allowLocal && hostnameRegex.isValid(domain);
}
// package protected for unit test access
// must agree with isValid() above
final boolean isValidDomainSyntax(String domain) {
if (domain == null) {
return false;
}
domain = unicodeToASCII(domain);
// hosts must be equally reachable via punycode and Unicode
// Unicode is never shorter than punycode, so check punycode
// if domain did not convert, then it will be caught by ASCII
// checks in the regexes below
if (domain.length() > MAX_DOMAIN_LENGTH) {
return false;
}
final String[] groups = domainRegex.match(domain);
return groups != null && groups.length > 0
|| hostnameRegex.isValid(domain);
}
/**
* Returns true if the specified <code>String</code> matches any
* IANA-defined top-level domain. Leading dots are ignored if present.
* The search is case-insensitive.
* <p>
* If allowLocal is true, the TLD is checked using {@link #isValidLocalTld(String)}.
* The TLD is then checked against {@link #isValidInfrastructureTld(String)},
* {@link #isValidGenericTld(String)} and {@link #isValidCountryCodeTld(String)}
* @param tld the parameter to check for TLD status, not null
* @return true if the parameter is a TLD
*/
public boolean isValidTld(final String tld) {
if (allowLocal && isValidLocalTld(tld)) {
return true;
}
return isValidInfrastructureTld(tld)
|| isValidGenericTld(tld)
|| isValidCountryCodeTld(tld);
}
/**
* Returns true if the specified <code>String</code> matches any
* IANA-defined infrastructure top-level domain. Leading dots are
* ignored if present. The search is case-insensitive.
* @param iTld the parameter to check for infrastructure TLD status, not null
* @return true if the parameter is an infrastructure TLD
*/
public boolean isValidInfrastructureTld(final String iTld) {
final String key = chompLeadingDot(unicodeToASCII(iTld).toLowerCase(Locale.ENGLISH));
return arrayContains(INFRASTRUCTURE_TLDS, key);
}
/**
* Returns true if the specified <code>String</code> matches any
* IANA-defined generic top-level domain. Leading dots are ignored
* if present. The search is case-insensitive.
* @param gTld the parameter to check for generic TLD status, not null
* @return true if the parameter is a generic TLD
*/
public boolean isValidGenericTld(final String gTld) {
final String key = chompLeadingDot(unicodeToASCII(gTld).toLowerCase(Locale.ENGLISH));
return (arrayContains(GENERIC_TLDS, key) || arrayContains(myGenericTLDsPlus, key))
&& !arrayContains(myGenericTLDsMinus, key);
}
/**
* Returns true if the specified <code>String</code> matches any
* IANA-defined country code top-level domain. Leading dots are
* ignored if present. The search is case-insensitive.
* @param ccTld the parameter to check for country code TLD status, not null
* @return true if the parameter is a country code TLD
*/
public boolean isValidCountryCodeTld(final String ccTld) {
final String key = chompLeadingDot(unicodeToASCII(ccTld).toLowerCase(Locale.ENGLISH));
return (arrayContains(COUNTRY_CODE_TLDS, key) || arrayContains(myCountryCodeTLDsPlus, key))
&& !arrayContains(myCountryCodeTLDsMinus, key);
}
/**
* Returns true if the specified <code>String</code> matches any
* widely used "local" domains (localhost or localdomain). Leading dots are
* ignored if present. The search is case-insensitive.
* @param lTld the parameter to check for local TLD status, not null
* @return true if the parameter is an local TLD
*/
public boolean isValidLocalTld(final String lTld) {
final String key = chompLeadingDot(unicodeToASCII(lTld).toLowerCase(Locale.ENGLISH));
return (arrayContains(LOCAL_TLDS, key) || arrayContains(myLocalTLDsPlus, key))
&& !arrayContains(myLocalTLDsMinus, key);
}
/**
* Does this instance allow local addresses?
*
* @return true if local addresses are allowed.
* @since 1.7
*/
public boolean isAllowLocal() {
return this.allowLocal;
}
private String chompLeadingDot(final String str) {
if (str.startsWith(".")) {
return str.substring(1);
}
return str;
}
// ---------------------------------------------
// ----- TLDs defined by IANA
// ----- Authoritative and comprehensive list at:
// ----- http://data.iana.org/TLD/tlds-alpha-by-domain.txt
// Note that the above list is in UPPER case.
// The code currently converts strings to lower case (as per the tables below)
// IANA also provide an HTML list at http://www.iana.org/domains/root/db
// Note that this contains several country code entries which are NOT in
// the text file. These all have the "Not assigned" in the "Sponsoring Organisation" column
// For example (as of 2015-01-02):
// .bl country-code Not assigned
// .um country-code Not assigned
// WARNING: this array MUST be sorted, otherwise it cannot be searched reliably using binary search
private static final String[] INFRASTRUCTURE_TLDS = {
"arpa", // internet infrastructure
};
// WARNING: this array MUST be sorted, otherwise it cannot be searched reliably using binary search
private static final String[] GENERIC_TLDS = {
// Taken from Version 2023011200, Last Updated Thu Jan 12 07:07:01 2023 UTC
"aaa", // aaa American Automobile Association, Inc.
"aarp", // aarp AARP
"abarth", // abarth Fiat Chrysler Automobiles N.V.
"abb", // abb ABB Ltd
"abbott", // abbott Abbott Laboratories, Inc.
"abbvie", // abbvie AbbVie Inc.
"abc", // abc Disney Enterprises, Inc.
"able", // able Able Inc.
"abogado", // abogado Top Level Domain Holdings Limited
"abudhabi", // abudhabi Abu Dhabi Systems and Information Centre
"academy", // academy Half Oaks, LLC
"accenture", // accenture Accenture plc
"accountant", // accountant dot Accountant Limited
"accountants", // accountants Knob Town, LLC
"aco", // aco ACO Severin Ahlmann GmbH & Co. KG
// "active", // active The Active Network, Inc
"actor", // actor United TLD Holdco Ltd.
// "adac", // adac Allgemeiner Deutscher Automobil-Club e.V. (ADAC)
"ads", // ads Charleston Road Registry Inc.
"adult", // adult ICM Registry AD LLC
"aeg", // aeg Aktiebolaget Electrolux
"aero", // aero Societe Internationale de Telecommunications Aeronautique (SITA INC USA)
"aetna", // aetna Aetna Life Insurance Company
// "afamilycompany", // afamilycompany Johnson Shareholdings, Inc.
"afl", // afl Australian Football League
"africa", // africa ZA Central Registry NPC trading as Registry.Africa
"agakhan", // agakhan Fondation Aga Khan (Aga Khan Foundation)
"agency", // agency Steel Falls, LLC
"aig", // aig American International Group, Inc.
// "aigo", // aigo aigo Digital Technology Co,Ltd. [Not assigned as of Jul 25]
"airbus", // airbus Airbus S.A.S.
"airforce", // airforce United TLD Holdco Ltd.
"airtel", // airtel Bharti Airtel Limited
"akdn", // akdn Fondation Aga Khan (Aga Khan Foundation)
"alfaromeo", // alfaromeo Fiat Chrysler Automobiles N.V.
"alibaba", // alibaba Alibaba Group Holding Limited
"alipay", // alipay Alibaba Group Holding Limited
"allfinanz", // allfinanz Allfinanz Deutsche Vermögensberatung Aktiengesellschaft
"allstate", // allstate Allstate Fire and Casualty Insurance Company
"ally", // ally Ally Financial Inc.
"alsace", // alsace REGION D ALSACE
"alstom", // alstom ALSTOM
"amazon", // amazon Amazon Registry Services, Inc.
"americanexpress", // americanexpress American Express Travel Related Services Company, Inc.
"americanfamily", // americanfamily AmFam, Inc.
"amex", // amex American Express Travel Related Services Company, Inc.
"amfam", // amfam AmFam, Inc.
"amica", // amica Amica Mutual Insurance Company
"amsterdam", // amsterdam Gemeente Amsterdam
"analytics", // analytics Campus IP LLC
"android", // android Charleston Road Registry Inc.
"anquan", // anquan QIHOO 360 TECHNOLOGY CO. LTD.
"anz", // anz Australia and New Zealand Banking Group Limited
"aol", // aol AOL Inc.
"apartments", // apartments June Maple, LLC
"app", // app Charleston Road Registry Inc.
"apple", // apple Apple Inc.
"aquarelle", // aquarelle Aquarelle.com
"arab", // arab League of Arab States
"aramco", // aramco Aramco Services Company
"archi", // archi STARTING DOT LIMITED
"army", // army United TLD Holdco Ltd.
"art", // art UK Creative Ideas Limited
"arte", // arte Association Relative à la Télévision Européenne G.E.I.E.
"asda", // asda Wal-Mart Stores, Inc.
"asia", // asia DotAsia Organisation Ltd.
"associates", // associates Baxter Hill, LLC
"athleta", // athleta The Gap, Inc.
"attorney", // attorney United TLD Holdco, Ltd
"auction", // auction United TLD HoldCo, Ltd.
"audi", // audi AUDI Aktiengesellschaft
"audible", // audible Amazon Registry Services, Inc.
"audio", // audio Uniregistry, Corp.
"auspost", // auspost Australian Postal Corporation
"author", // author Amazon Registry Services, Inc.
"auto", // auto Uniregistry, Corp.
"autos", // autos DERAutos, LLC
"avianca", // avianca Aerovias del Continente Americano S.A. Avianca
"aws", // aws Amazon Registry Services, Inc.
"axa", // axa AXA SA
"azure", // azure Microsoft Corporation
"baby", // baby Johnson & Johnson Services, Inc.
"baidu", // baidu Baidu, Inc.
"banamex", // banamex Citigroup Inc.
"bananarepublic", // bananarepublic The Gap, Inc.
"band", // band United TLD Holdco, Ltd
"bank", // bank fTLD Registry Services, LLC
"bar", // bar Punto 2012 Sociedad Anonima Promotora de Inversion de Capital Variable
"barcelona", // barcelona Municipi de Barcelona
"barclaycard", // barclaycard Barclays Bank PLC
"barclays", // barclays Barclays Bank PLC
"barefoot", // barefoot Gallo Vineyards, Inc.
"bargains", // bargains Half Hallow, LLC
"baseball", // baseball MLB Advanced Media DH, LLC
"basketball", // basketball Fédération Internationale de Basketball (FIBA)
"bauhaus", // bauhaus Werkhaus GmbH
"bayern", // bayern Bayern Connect GmbH
"bbc", // bbc British Broadcasting Corporation
"bbt", // bbt BB&T Corporation
"bbva", // bbva BANCO BILBAO VIZCAYA ARGENTARIA, S.A.
"bcg", // bcg The Boston Consulting Group, Inc.
"bcn", // bcn Municipi de Barcelona
"beats", // beats Beats Electronics, LLC
"beauty", // beauty L'Oréal
"beer", // beer Top Level Domain Holdings Limited
"bentley", // bentley Bentley Motors Limited
"berlin", // berlin dotBERLIN GmbH & Co. KG
"best", // best BestTLD Pty Ltd
"bestbuy", // bestbuy BBY Solutions, Inc.
"bet", // bet Afilias plc
"bharti", // bharti Bharti Enterprises (Holding) Private Limited
"bible", // bible American Bible Society
"bid", // bid dot Bid Limited
"bike", // bike Grand Hollow, LLC
"bing", // bing Microsoft Corporation
"bingo", // bingo Sand Cedar, LLC
"bio", // bio STARTING DOT LIMITED
"biz", // biz Neustar, Inc.
"black", // black Afilias Limited
"blackfriday", // blackfriday Uniregistry, Corp.
// "blanco", // blanco BLANCO GmbH + Co KG
"blockbuster", // blockbuster Dish DBS Corporation
"blog", // blog Knock Knock WHOIS There, LLC
"bloomberg", // bloomberg Bloomberg IP Holdings LLC
"blue", // blue Afilias Limited
"bms", // bms Bristol-Myers Squibb Company
"bmw", // bmw Bayerische Motoren Werke Aktiengesellschaft
// "bnl", // bnl Banca Nazionale del Lavoro
"bnpparibas", // bnpparibas BNP Paribas
"boats", // boats DERBoats, LLC
"boehringer", // boehringer Boehringer Ingelheim International GmbH
"bofa", // bofa NMS Services, Inc.
"bom", // bom Núcleo de Informação e Coordenação do Ponto BR - NIC.br
"bond", // bond Bond University Limited
"boo", // boo Charleston Road Registry Inc.
"book", // book Amazon Registry Services, Inc.
"booking", // booking Booking.com B.V.
// "boots", // boots THE BOOTS COMPANY PLC
"bosch", // bosch Robert Bosch GMBH
"bostik", // bostik Bostik SA
"boston", // boston Boston TLD Management, LLC
"bot", // bot Amazon Registry Services, Inc.
"boutique", // boutique Over Galley, LLC
"box", // box NS1 Limited
"bradesco", // bradesco Banco Bradesco S.A.
"bridgestone", // bridgestone Bridgestone Corporation
"broadway", // broadway Celebrate Broadway, Inc.
"broker", // broker DOTBROKER REGISTRY LTD
"brother", // brother Brother Industries, Ltd.
"brussels", // brussels DNS.be vzw
// "budapest", // budapest Top Level Domain Holdings Limited
// "bugatti", // bugatti Bugatti International SA
"build", // build Plan Bee LLC
"builders", // builders Atomic Madison, LLC
"business", // business Spring Cross, LLC
"buy", // buy Amazon Registry Services, INC
"buzz", // buzz DOTSTRATEGY CO.
"bzh", // bzh Association www.bzh
"cab", // cab Half Sunset, LLC
"cafe", // cafe Pioneer Canyon, LLC
"cal", // cal Charleston Road Registry Inc.
"call", // call Amazon Registry Services, Inc.
"calvinklein", // calvinklein PVH gTLD Holdings LLC
"cam", // cam AC Webconnecting Holding B.V.
"camera", // camera Atomic Maple, LLC
"camp", // camp Delta Dynamite, LLC
// "cancerresearch", // cancerresearch Australian Cancer Research Foundation
"canon", // canon Canon Inc.
"capetown", // capetown ZA Central Registry NPC trading as ZA Central Registry
"capital", // capital Delta Mill, LLC
"capitalone", // capitalone Capital One Financial Corporation
"car", // car Cars Registry Limited
"caravan", // caravan Caravan International, Inc.
"cards", // cards Foggy Hollow, LLC
"care", // care Goose Cross, LLC
"career", // career dotCareer LLC
"careers", // careers Wild Corner, LLC
"cars", // cars Uniregistry, Corp.
// "cartier", // cartier Richemont DNS Inc.
"casa", // casa Top Level Domain Holdings Limited
"case", // case CNH Industrial N.V.
// "caseih", // caseih CNH Industrial N.V.
"cash", // cash Delta Lake, LLC
"casino", // casino Binky Sky, LLC
"cat", // cat Fundacio puntCAT
"catering", // catering New Falls. LLC
"catholic", // catholic Pontificium Consilium de Comunicationibus Socialibus (PCCS) (Pontifical Council for Social Communication)
"cba", // cba COMMONWEALTH BANK OF AUSTRALIA
"cbn", // cbn The Christian Broadcasting Network, Inc.
"cbre", // cbre CBRE, Inc.
"cbs", // cbs CBS Domains Inc.
// "ceb", // ceb The Corporate Executive Board Company
"center", // center Tin Mill, LLC
"ceo", // ceo CEOTLD Pty Ltd
"cern", // cern European Organization for Nuclear Research ("CERN")
"cfa", // cfa CFA Institute
"cfd", // cfd DOTCFD REGISTRY LTD
"chanel", // chanel Chanel International B.V.
"channel", // channel Charleston Road Registry Inc.
"charity", // charity Corn Lake, LLC
"chase", // chase JPMorgan Chase & Co.
"chat", // chat Sand Fields, LLC
"cheap", // cheap Sand Cover, LLC
"chintai", // chintai CHINTAI Corporation
// "chloe", // chloe Richemont DNS Inc. (Not assigned)
"christmas", // christmas Uniregistry, Corp.
"chrome", // chrome Charleston Road Registry Inc.
// "chrysler", // chrysler FCA US LLC.
"church", // church Holly Fileds, LLC
"cipriani", // cipriani Hotel Cipriani Srl
"circle", // circle Amazon Registry Services, Inc.
"cisco", // cisco Cisco Technology, Inc.
"citadel", // citadel Citadel Domain LLC
"citi", // citi Citigroup Inc.
"citic", // citic CITIC Group Corporation
"city", // city Snow Sky, LLC
"cityeats", // cityeats Lifestyle Domain Holdings, Inc.
"claims", // claims Black Corner, LLC
"cleaning", // cleaning Fox Shadow, LLC
"click", // click Uniregistry, Corp.
"clinic", // clinic Goose Park, LLC
"clinique", // clinique The Estée Lauder Companies Inc.
"clothing", // clothing Steel Lake, LLC
"cloud", // cloud ARUBA S.p.A.
"club", // club .CLUB DOMAINS, LLC
"clubmed", // clubmed Club Méditerranée S.A.
"coach", // coach Koko Island, LLC
"codes", // codes Puff Willow, LLC
"coffee", // coffee Trixy Cover, LLC
"college", // college XYZ.COM LLC
"cologne", // cologne NetCologne Gesellschaft für Telekommunikation mbH
"com", // com VeriSign Global Registry Services
"comcast", // comcast Comcast IP Holdings I, LLC
"commbank", // commbank COMMONWEALTH BANK OF AUSTRALIA
"community", // community Fox Orchard, LLC
"company", // company Silver Avenue, LLC
"compare", // compare iSelect Ltd
"computer", // computer Pine Mill, LLC
"comsec", // comsec VeriSign, Inc.
"condos", // condos Pine House, LLC
"construction", // construction Fox Dynamite, LLC
"consulting", // consulting United TLD Holdco, LTD.
"contact", // contact Top Level Spectrum, Inc.
"contractors", // contractors Magic Woods, LLC
"cooking", // cooking Top Level Domain Holdings Limited
"cookingchannel", // cookingchannel Lifestyle Domain Holdings, Inc.
"cool", // cool Koko Lake, LLC
"coop", // coop DotCooperation LLC
"corsica", // corsica Collectivité Territoriale de Corse
"country", // country Top Level Domain Holdings Limited
"coupon", // coupon Amazon Registry Services, Inc.
"coupons", // coupons Black Island, LLC
"courses", // courses OPEN UNIVERSITIES AUSTRALIA PTY LTD
"cpa", // cpa American Institute of Certified Public Accountants
"credit", // credit Snow Shadow, LLC
"creditcard", // creditcard Binky Frostbite, LLC
"creditunion", // creditunion CUNA Performance Resources, LLC
"cricket", // cricket dot Cricket Limited
"crown", // crown Crown Equipment Corporation
"crs", // crs Federated Co-operatives Limited
"cruise", // cruise Viking River Cruises (Bermuda) Ltd.
"cruises", // cruises Spring Way, LLC
// "csc", // csc Alliance-One Services, Inc.
"cuisinella", // cuisinella SALM S.A.S.
"cymru", // cymru Nominet UK
"cyou", // cyou Beijing Gamease Age Digital Technology Co., Ltd.
"dabur", // dabur Dabur India Limited
"dad", // dad Charleston Road Registry Inc.
"dance", // dance United TLD Holdco Ltd.
"data", // data Dish DBS Corporation
"date", // date dot Date Limited
"dating", // dating Pine Fest, LLC
"datsun", // datsun NISSAN MOTOR CO., LTD.
"day", // day Charleston Road Registry Inc.
"dclk", // dclk Charleston Road Registry Inc.
"dds", // dds Minds + Machines Group Limited
"deal", // deal Amazon Registry Services, Inc.
"dealer", // dealer Dealer Dot Com, Inc.
"deals", // deals Sand Sunset, LLC
"degree", // degree United TLD Holdco, Ltd
"delivery", // delivery Steel Station, LLC
"dell", // dell Dell Inc.
"deloitte", // deloitte Deloitte Touche Tohmatsu
"delta", // delta Delta Air Lines, Inc.
"democrat", // democrat United TLD Holdco Ltd.
"dental", // dental Tin Birch, LLC
"dentist", // dentist United TLD Holdco, Ltd
"desi", // desi Desi Networks LLC
"design", // design Top Level Design, LLC
"dev", // dev Charleston Road Registry Inc.
"dhl", // dhl Deutsche Post AG
"diamonds", // diamonds John Edge, LLC
"diet", // diet Uniregistry, Corp.
"digital", // digital Dash Park, LLC
"direct", // direct Half Trail, LLC
"directory", // directory Extra Madison, LLC
"discount", // discount Holly Hill, LLC
"discover", // discover Discover Financial Services
"dish", // dish Dish DBS Corporation
"diy", // diy Lifestyle Domain Holdings, Inc.
"dnp", // dnp Dai Nippon Printing Co., Ltd.
"docs", // docs Charleston Road Registry Inc.
"doctor", // doctor Brice Trail, LLC
// "dodge", // dodge FCA US LLC.
"dog", // dog Koko Mill, LLC
// "doha", // doha Communications Regulatory Authority (CRA)
"domains", // domains Sugar Cross, LLC
// "doosan", // doosan Doosan Corporation (retired)
"dot", // dot Dish DBS Corporation
"download", // download dot Support Limited
"drive", // drive Charleston Road Registry Inc.
"dtv", // dtv Dish DBS Corporation
"dubai", // dubai Dubai Smart Government Department
// "duck", // duck Johnson Shareholdings, Inc.
"dunlop", // dunlop The Goodyear Tire & Rubber Company
// "duns", // duns The Dun & Bradstreet Corporation
"dupont", // dupont E. I. du Pont de Nemours and Company
"durban", // durban ZA Central Registry NPC trading as ZA Central Registry
"dvag", // dvag Deutsche Vermögensberatung Aktiengesellschaft DVAG
"dvr", // dvr Hughes Satellite Systems Corporation
"earth", // earth Interlink Co., Ltd.
"eat", // eat Charleston Road Registry Inc.
"eco", // eco Big Room Inc.
"edeka", // edeka EDEKA Verband kaufmännischer Genossenschaften e.V.
"edu", // edu EDUCAUSE
"education", // education Brice Way, LLC
"email", // email Spring Madison, LLC
"emerck", // emerck Merck KGaA
"energy", // energy Binky Birch, LLC
"engineer", // engineer United TLD Holdco Ltd.
"engineering", // engineering Romeo Canyon
"enterprises", // enterprises Snow Oaks, LLC
// "epost", // epost Deutsche Post AG
"epson", // epson Seiko Epson Corporation
"equipment", // equipment Corn Station, LLC
"ericsson", // ericsson Telefonaktiebolaget L M Ericsson
"erni", // erni ERNI Group Holding AG
"esq", // esq Charleston Road Registry Inc.
"estate", // estate Trixy Park, LLC
// "esurance", // esurance Esurance Insurance Company (not assigned as at Version 2020062100)
"etisalat", // etisalat Emirates Telecommunic
"eurovision", // eurovision European Broadcasting Union (EBU)
"eus", // eus Puntueus Fundazioa
"events", // events Pioneer Maple, LLC
// "everbank", // everbank EverBank
"exchange", // exchange Spring Falls, LLC
"expert", // expert Magic Pass, LLC
"exposed", // exposed Victor Beach, LLC
"express", // express Sea Sunset, LLC
"extraspace", // extraspace Extra Space Storage LLC
"fage", // fage Fage International S.A.
"fail", // fail Atomic Pipe, LLC
"fairwinds", // fairwinds FairWinds Partners, LLC
"faith", // faith dot Faith Limited
"family", // family United TLD Holdco Ltd.
"fan", // fan Asiamix Digital Ltd
"fans", // fans Asiamix Digital Limited
"farm", // farm Just Maple, LLC
"farmers", // farmers Farmers Insurance Exchange
"fashion", // fashion Top Level Domain Holdings Limited
"fast", // fast Amazon Registry Services, Inc.
"fedex", // fedex Federal Express Corporation
"feedback", // feedback Top Level Spectrum, Inc.
"ferrari", // ferrari Fiat Chrysler Automobiles N.V.
"ferrero", // ferrero Ferrero Trading Lux S.A.
"fiat", // fiat Fiat Chrysler Automobiles N.V.
"fidelity", // fidelity Fidelity Brokerage Services LLC
"fido", // fido Rogers Communications Canada Inc.
"film", // film Motion Picture Domain Registry Pty Ltd
"final", // final Núcleo de Informação e Coordenação do Ponto BR - NIC.br
"finance", // finance Cotton Cypress, LLC
"financial", // financial Just Cover, LLC
"fire", // fire Amazon Registry Services, Inc.
"firestone", // firestone Bridgestone Corporation
"firmdale", // firmdale Firmdale Holdings Limited
"fish", // fish Fox Woods, LLC
"fishing", // fishing Top Level Domain Holdings Limited
"fit", // fit Minds + Machines Group Limited
"fitness", // fitness Brice Orchard, LLC
"flickr", // flickr Yahoo! Domain Services Inc.
"flights", // flights Fox Station, LLC
"flir", // flir FLIR Systems, Inc.
"florist", // florist Half Cypress, LLC
"flowers", // flowers Uniregistry, Corp.
// "flsmidth", // flsmidth FLSmidth A/S retired 2016-07-22
"fly", // fly Charleston Road Registry Inc.
"foo", // foo Charleston Road Registry Inc.
"food", // food Lifestyle Domain Holdings, Inc.
"foodnetwork", // foodnetwork Lifestyle Domain Holdings, Inc.
"football", // football Foggy Farms, LLC
"ford", // ford Ford Motor Company
"forex", // forex DOTFOREX REGISTRY LTD
"forsale", // forsale United TLD Holdco, LLC
"forum", // forum Fegistry, LLC
"foundation", // foundation John Dale, LLC
"fox", // fox FOX Registry, LLC
"free", // free Amazon Registry Services, Inc.
"fresenius", // fresenius Fresenius Immobilien-Verwaltungs-GmbH
"frl", // frl FRLregistry B.V.
"frogans", // frogans OP3FT
"frontdoor", // frontdoor Lifestyle Domain Holdings, Inc.
"frontier", // frontier Frontier Communications Corporation
"ftr", // ftr Frontier Communications Corporation
"fujitsu", // fujitsu Fujitsu Limited
// "fujixerox", // fujixerox Xerox DNHC LLC
"fun", // fun DotSpace, Inc.
"fund", // fund John Castle, LLC
"furniture", // furniture Lone Fields, LLC
"futbol", // futbol United TLD Holdco, Ltd.
"fyi", // fyi Silver Tigers, LLC
"gal", // gal Asociación puntoGAL
"gallery", // gallery Sugar House, LLC
"gallo", // gallo Gallo Vineyards, Inc.
"gallup", // gallup Gallup, Inc.
"game", // game Uniregistry, Corp.
"games", // games United TLD Holdco Ltd.
"gap", // gap The Gap, Inc.
"garden", // garden Top Level Domain Holdings Limited
"gay", // gay Top Level Design, LLC
"gbiz", // gbiz Charleston Road Registry Inc.
"gdn", // gdn Joint Stock Company "Navigation-information systems"
"gea", // gea GEA Group Aktiengesellschaft
"gent", // gent COMBELL GROUP NV/SA
"genting", // genting Resorts World Inc. Pte. Ltd.
"george", // george Wal-Mart Stores, Inc.
"ggee", // ggee GMO Internet, Inc.
"gift", // gift Uniregistry, Corp.
"gifts", // gifts Goose Sky, LLC
"gives", // gives United TLD Holdco Ltd.
"giving", // giving Giving Limited
// "glade", // glade Johnson Shareholdings, Inc.
"glass", // glass Black Cover, LLC
"gle", // gle Charleston Road Registry Inc.
"global", // global Dot Global Domain Registry Limited
"globo", // globo Globo Comunicação e Participações S.A
"gmail", // gmail Charleston Road Registry Inc.
"gmbh", // gmbh Extra Dynamite, LLC
"gmo", // gmo GMO Internet, Inc.
"gmx", // gmx 1&1 Mail & Media GmbH
"godaddy", // godaddy Go Daddy East, LLC
"gold", // gold June Edge, LLC
"goldpoint", // goldpoint YODOBASHI CAMERA CO.,LTD.
"golf", // golf Lone Falls, LLC
"goo", // goo NTT Resonant Inc.
// "goodhands", // goodhands Allstate Fire and Casualty Insurance Company
"goodyear", // goodyear The Goodyear Tire & Rubber Company
"goog", // goog Charleston Road Registry Inc.
"google", // google Charleston Road Registry Inc.
"gop", // gop Republican State Leadership Committee, Inc.
"got", // got Amazon Registry Services, Inc.
"gov", // gov General Services Administration Attn: QTDC, 2E08 (.gov Domain Registration)
"grainger", // grainger Grainger Registry Services, LLC
"graphics", // graphics Over Madison, LLC
"gratis", // gratis Pioneer Tigers, LLC
"green", // green Afilias Limited
"gripe", // gripe Corn Sunset, LLC
"grocery", // grocery Wal-Mart Stores, Inc.
"group", // group Romeo Town, LLC
"guardian", // guardian The Guardian Life Insurance Company of America
"gucci", // gucci Guccio Gucci S.p.a.
"guge", // guge Charleston Road Registry Inc.
"guide", // guide Snow Moon, LLC
"guitars", // guitars Uniregistry, Corp.
"guru", // guru Pioneer Cypress, LLC
"hair", // hair L'Oreal
"hamburg", // hamburg Hamburg Top-Level-Domain GmbH
"hangout", // hangout Charleston Road Registry Inc.
"haus", // haus United TLD Holdco, LTD.
"hbo", // hbo HBO Registry Services, Inc.
"hdfc", // hdfc HOUSING DEVELOPMENT FINANCE CORPORATION LIMITED
"hdfcbank", // hdfcbank HDFC Bank Limited
"health", // health DotHealth, LLC
"healthcare", // healthcare Silver Glen, LLC
"help", // help Uniregistry, Corp.
"helsinki", // helsinki City of Helsinki
"here", // here Charleston Road Registry Inc.
"hermes", // hermes Hermes International
"hgtv", // hgtv Lifestyle Domain Holdings, Inc.
"hiphop", // hiphop Uniregistry, Corp.
"hisamitsu", // hisamitsu Hisamitsu Pharmaceutical Co.,Inc.
"hitachi", // hitachi Hitachi, Ltd.
"hiv", // hiv dotHIV gemeinnuetziger e.V.
"hkt", // hkt PCCW-HKT DataCom Services Limited
"hockey", // hockey Half Willow, LLC
"holdings", // holdings John Madison, LLC
"holiday", // holiday Goose Woods, LLC
"homedepot", // homedepot Homer TLC, Inc.
"homegoods", // homegoods The TJX Companies, Inc.
"homes", // homes DERHomes, LLC
"homesense", // homesense The TJX Companies, Inc.
"honda", // honda Honda Motor Co., Ltd.
// "honeywell", // honeywell Honeywell GTLD LLC
"horse", // horse Top Level Domain Holdings Limited
"hospital", // hospital Ruby Pike, LLC
"host", // host DotHost Inc.
"hosting", // hosting Uniregistry, Corp.
"hot", // hot Amazon Registry Services, Inc.
"hoteles", // hoteles Travel Reservations SRL
"hotels", // hotels Booking.com B.V.
"hotmail", // hotmail Microsoft Corporation
"house", // house Sugar Park, LLC
"how", // how Charleston Road Registry Inc.
"hsbc", // hsbc HSBC Holdings PLC
// "htc", // htc HTC corporation (Not assigned)
"hughes", // hughes Hughes Satellite Systems Corporation
"hyatt", // hyatt Hyatt GTLD, L.L.C.
"hyundai", // hyundai Hyundai Motor Company
"ibm", // ibm International Business Machines Corporation
"icbc", // icbc Industrial and Commercial Bank of China Limited
"ice", // ice IntercontinentalExchange, Inc.
"icu", // icu One.com A/S
"ieee", // ieee IEEE Global LLC
"ifm", // ifm ifm electronic gmbh
// "iinet", // iinet Connect West Pty. Ltd. (Retired)
"ikano", // ikano Ikano S.A.
"imamat", // imamat Fondation Aga Khan (Aga Khan Foundation)
"imdb", // imdb Amazon Registry Services, Inc.
"immo", // immo Auburn Bloom, LLC
"immobilien", // immobilien United TLD Holdco Ltd.
"inc", // inc Intercap Holdings Inc.
"industries", // industries Outer House, LLC
"infiniti", // infiniti NISSAN MOTOR CO., LTD.
"info", // info Afilias Limited
"ing", // ing Charleston Road Registry Inc.
"ink", // ink Top Level Design, LLC
"institute", // institute Outer Maple, LLC
"insurance", // insurance fTLD Registry Services LLC
"insure", // insure Pioneer Willow, LLC
"int", // int Internet Assigned Numbers Authority
// "intel", // intel Intel Corporation
"international", // international Wild Way, LLC
"intuit", // intuit Intuit Administrative Services, Inc.
"investments", // investments Holly Glen, LLC
"ipiranga", // ipiranga Ipiranga Produtos de Petroleo S.A.
"irish", // irish Dot-Irish LLC
// "iselect", // iselect iSelect Ltd
"ismaili", // ismaili Fondation Aga Khan (Aga Khan Foundation)
"ist", // ist Istanbul Metropolitan Municipality
"istanbul", // istanbul Istanbul Metropolitan Municipality / Medya A.S.
"itau", // itau Itau Unibanco Holding S.A.
"itv", // itv ITV Services Limited
// "iveco", // iveco CNH Industrial N.V.
// "iwc", // iwc Richemont DNS Inc.
"jaguar", // jaguar Jaguar Land Rover Ltd
"java", // java Oracle Corporation
"jcb", // jcb JCB Co., Ltd.
// "jcp", // jcp JCP Media, Inc.
"jeep", // jeep FCA US LLC.
"jetzt", // jetzt New TLD Company AB
"jewelry", // jewelry Wild Bloom, LLC
"jio", // jio Affinity Names, Inc.
// "jlc", // jlc Richemont DNS Inc.
"jll", // jll Jones Lang LaSalle Incorporated
"jmp", // jmp Matrix IP LLC
"jnj", // jnj Johnson & Johnson Services, Inc.
"jobs", // jobs Employ Media LLC
"joburg", // joburg ZA Central Registry NPC trading as ZA Central Registry
"jot", // jot Amazon Registry Services, Inc.
"joy", // joy Amazon Registry Services, Inc.
"jpmorgan", // jpmorgan JPMorgan Chase & Co.
"jprs", // jprs Japan Registry Services Co., Ltd.
"juegos", // juegos Uniregistry, Corp.
"juniper", // juniper JUNIPER NETWORKS, INC.
"kaufen", // kaufen United TLD Holdco Ltd.
"kddi", // kddi KDDI CORPORATION
"kerryhotels", // kerryhotels Kerry Trading Co. Limited
"kerrylogistics", // kerrylogistics Kerry Trading Co. Limited
"kerryproperties", // kerryproperties Kerry Trading Co. Limited
"kfh", // kfh Kuwait Finance House
"kia", // kia KIA MOTORS CORPORATION
"kids", // kids DotKids Foundation Limited
"kim", // kim Afilias Limited
"kinder", // kinder Ferrero Trading Lux S.A.
"kindle", // kindle Amazon Registry Services, Inc.
"kitchen", // kitchen Just Goodbye, LLC
"kiwi", // kiwi DOT KIWI LIMITED
"koeln", // koeln NetCologne Gesellschaft für Telekommunikation mbH
"komatsu", // komatsu Komatsu Ltd.
"kosher", // kosher Kosher Marketing Assets LLC
"kpmg", // kpmg KPMG International Cooperative (KPMG International Genossenschaft)
"kpn", // kpn Koninklijke KPN N.V.
"krd", // krd KRG Department of Information Technology
"kred", // kred KredTLD Pty Ltd
"kuokgroup", // kuokgroup Kerry Trading Co. Limited
"kyoto", // kyoto Academic Institution: Kyoto Jyoho Gakuen
"lacaixa", // lacaixa CAIXA D'ESTALVIS I PENSIONS DE BARCELONA
// "ladbrokes", // ladbrokes LADBROKES INTERNATIONAL PLC
"lamborghini", // lamborghini Automobili Lamborghini S.p.A.
"lamer", // lamer The Estée Lauder Companies Inc.
"lancaster", // lancaster LANCASTER
"lancia", // lancia Fiat Chrysler Automobiles N.V.
// "lancome", // lancome L'Oréal
"land", // land Pine Moon, LLC
"landrover", // landrover Jaguar Land Rover Ltd
"lanxess", // lanxess LANXESS Corporation
"lasalle", // lasalle Jones Lang LaSalle Incorporated
"lat", // lat ECOM-LAC Federación de Latinoamérica y el Caribe para Internet y el Comercio Electrónico
"latino", // latino Dish DBS Corporation
"latrobe", // latrobe La Trobe University
"law", // law Minds + Machines Group Limited
"lawyer", // lawyer United TLD Holdco, Ltd
"lds", // lds IRI Domain Management, LLC
"lease", // lease Victor Trail, LLC
"leclerc", // leclerc A.C.D. LEC Association des Centres Distributeurs Edouard Leclerc
"lefrak", // lefrak LeFrak Organization, Inc.
"legal", // legal Blue Falls, LLC
"lego", // lego LEGO Juris A/S
"lexus", // lexus TOYOTA MOTOR CORPORATION
"lgbt", // lgbt Afilias Limited
// "liaison", // liaison Liaison Technologies, Incorporated
"lidl", // lidl Schwarz Domains und Services GmbH & Co. KG
"life", // life Trixy Oaks, LLC
"lifeinsurance", // lifeinsurance American Council of Life Insurers
"lifestyle", // lifestyle Lifestyle Domain Holdings, Inc.
"lighting", // lighting John McCook, LLC
"like", // like Amazon Registry Services, Inc.
"lilly", // lilly Eli Lilly and Company
"limited", // limited Big Fest, LLC
"limo", // limo Hidden Frostbite, LLC
"lincoln", // lincoln Ford Motor Company
"linde", // linde Linde Aktiengesellschaft
"link", // link Uniregistry, Corp.
"lipsy", // lipsy Lipsy Ltd
"live", // live United TLD Holdco Ltd.
"living", // living Lifestyle Domain Holdings, Inc.
// "lixil", // lixil LIXIL Group Corporation
"llc", // llc Afilias plc
"llp", // llp Dot Registry LLC
"loan", // loan dot Loan Limited
"loans", // loans June Woods, LLC
"locker", // locker Dish DBS Corporation
"locus", // locus Locus Analytics LLC
// "loft", // loft Annco, Inc.
"lol", // lol Uniregistry, Corp.
"london", // london Dot London Domains Limited
"lotte", // lotte Lotte Holdings Co., Ltd.
"lotto", // lotto Afilias Limited
"love", // love Merchant Law Group LLP
"lpl", // lpl LPL Holdings, Inc.
"lplfinancial", // lplfinancial LPL Holdings, Inc.
"ltd", // ltd Over Corner, LLC
"ltda", // ltda InterNetX Corp.
"lundbeck", // lundbeck H. Lundbeck A/S
// "lupin", // lupin LUPIN LIMITED
"luxe", // luxe Top Level Domain Holdings Limited
"luxury", // luxury Luxury Partners LLC
"macys", // macys Macys, Inc.
"madrid", // madrid Comunidad de Madrid
"maif", // maif Mutuelle Assurance Instituteur France (MAIF)
"maison", // maison Victor Frostbite, LLC
"makeup", // makeup L'Oréal
"man", // man MAN SE
"management", // management John Goodbye, LLC
"mango", // mango PUNTO FA S.L.
"map", // map Charleston Road Registry Inc.
"market", // market Unitied TLD Holdco, Ltd
"marketing", // marketing Fern Pass, LLC
"markets", // markets DOTMARKETS REGISTRY LTD
"marriott", // marriott Marriott Worldwide Corporation
"marshalls", // marshalls The TJX Companies, Inc.
"maserati", // maserati Fiat Chrysler Automobiles N.V.
"mattel", // mattel Mattel Sites, Inc.
"mba", // mba Lone Hollow, LLC
// "mcd", // mcd McDonald’s Corporation (Not assigned)
// "mcdonalds", // mcdonalds McDonald’s Corporation (Not assigned)
"mckinsey", // mckinsey McKinsey Holdings, Inc.
"med", // med Medistry LLC
"media", // media Grand Glen, LLC
"meet", // meet Afilias Limited
"melbourne", // melbourne The Crown in right of the State of Victoria, represented by its Department of State Development, Business and Innovation
"meme", // meme Charleston Road Registry Inc.
"memorial", // memorial Dog Beach, LLC
"men", // men Exclusive Registry Limited
"menu", // menu Wedding TLD2, LLC
// "meo", // meo PT Comunicacoes S.A.
"merckmsd", // merckmsd MSD Registry Holdings, Inc.
// "metlife", // metlife MetLife Services and Solutions, LLC
"miami", // miami Top Level Domain Holdings Limited
"microsoft", // microsoft Microsoft Corporation
"mil", // mil DoD Network Information Center
"mini", // mini Bayerische Motoren Werke Aktiengesellschaft
"mint", // mint Intuit Administrative Services, Inc.
"mit", // mit Massachusetts Institute of Technology
"mitsubishi", // mitsubishi Mitsubishi Corporation
"mlb", // mlb MLB Advanced Media DH, LLC
"mls", // mls The Canadian Real Estate Association
"mma", // mma MMA IARD
"mobi", // mobi Afilias Technologies Limited dba dotMobi
"mobile", // mobile Dish DBS Corporation
// "mobily", // mobily GreenTech Consultancy Company W.L.L.
"moda", // moda United TLD Holdco Ltd.
"moe", // moe Interlink Co., Ltd.
"moi", // moi Amazon Registry Services, Inc.
"mom", // mom Uniregistry, Corp.
"monash", // monash Monash University
"money", // money Outer McCook, LLC
"monster", // monster Monster Worldwide, Inc.
// "montblanc", // montblanc Richemont DNS Inc. (Not assigned)
// "mopar", // mopar FCA US LLC.
"mormon", // mormon IRI Domain Management, LLC ("Applicant")
"mortgage", // mortgage United TLD Holdco, Ltd
"moscow", // moscow Foundation for Assistance for Internet Technologies and Infrastructure Development (FAITID)
"moto", // moto Motorola Trademark Holdings, LLC
"motorcycles", // motorcycles DERMotorcycles, LLC
"mov", // mov Charleston Road Registry Inc.
"movie", // movie New Frostbite, LLC
// "movistar", // movistar Telefónica S.A.
"msd", // msd MSD Registry Holdings, Inc.
"mtn", // mtn MTN Dubai Limited
// "mtpc", // mtpc Mitsubishi Tanabe Pharma Corporation (Retired)
"mtr", // mtr MTR Corporation Limited
"museum", // museum Museum Domain Management Association
"music", // music DotMusic Limited
"mutual", // mutual Northwestern Mutual MU TLD Registry, LLC
// "mutuelle", // mutuelle Fédération Nationale de la Mutualité Française (Retired)
"nab", // nab National Australia Bank Limited
// "nadex", // nadex Nadex Domains, Inc
"nagoya", // nagoya GMO Registry, Inc.
"name", // name VeriSign Information Services, Inc.
// "nationwide", // nationwide Nationwide Mutual Insurance Company
"natura", // natura NATURA COSMÉTICOS S.A.
"navy", // navy United TLD Holdco Ltd.
"nba", // nba NBA REGISTRY, LLC
"nec", // nec NEC Corporation
"net", // net VeriSign Global Registry Services
"netbank", // netbank COMMONWEALTH BANK OF AUSTRALIA
"netflix", // netflix Netflix, Inc.
"network", // network Trixy Manor, LLC
"neustar", // neustar NeuStar, Inc.
"new", // new Charleston Road Registry Inc.
// "newholland", // newholland CNH Industrial N.V.
"news", // news United TLD Holdco Ltd.
"next", // next Next plc
"nextdirect", // nextdirect Next plc
"nexus", // nexus Charleston Road Registry Inc.
"nfl", // nfl NFL Reg Ops LLC
"ngo", // ngo Public Interest Registry
"nhk", // nhk Japan Broadcasting Corporation (NHK)
"nico", // nico DWANGO Co., Ltd.
"nike", // nike NIKE, Inc.
"nikon", // nikon NIKON CORPORATION
"ninja", // ninja United TLD Holdco Ltd.
"nissan", // nissan NISSAN MOTOR CO., LTD.
"nissay", // nissay Nippon Life Insurance Company
"nokia", // nokia Nokia Corporation
"northwesternmutual", // northwesternmutual Northwestern Mutual Registry, LLC
"norton", // norton Symantec Corporation
"now", // now Amazon Registry Services, Inc.
"nowruz", // nowruz Asia Green IT System Bilgisayar San. ve Tic. Ltd. Sti.
"nowtv", // nowtv Starbucks (HK) Limited
"nra", // nra NRA Holdings Company, INC.
"nrw", // nrw Minds + Machines GmbH
"ntt", // ntt NIPPON TELEGRAPH AND TELEPHONE CORPORATION
"nyc", // nyc The City of New York by and through the New York City Department of Information Technology & Telecommunications
"obi", // obi OBI Group Holding SE & Co. KGaA
"observer", // observer Top Level Spectrum, Inc.
// "off", // off Johnson Shareholdings, Inc.
"office", // office Microsoft Corporation
"okinawa", // okinawa BusinessRalliart inc.
"olayan", // olayan Crescent Holding GmbH
"olayangroup", // olayangroup Crescent Holding GmbH
"oldnavy", // oldnavy The Gap, Inc.
"ollo", // ollo Dish DBS Corporation
"omega", // omega The Swatch Group Ltd
"one", // one One.com A/S
"ong", // ong Public Interest Registry
"onl", // onl I-REGISTRY Ltd., Niederlassung Deutschland
"online", // online DotOnline Inc.
// "onyourside", // onyourside Nationwide Mutual Insurance Company
"ooo", // ooo INFIBEAM INCORPORATION LIMITED
"open", // open American Express Travel Related Services Company, Inc.
"oracle", // oracle Oracle Corporation
"orange", // orange Orange Brand Services Limited
"org", // org Public Interest Registry (PIR)
"organic", // organic Afilias Limited
// "orientexpress", // orientexpress Orient Express (retired 2017-04-11)
"origins", // origins The Estée Lauder Companies Inc.
"osaka", // osaka Interlink Co., Ltd.
"otsuka", // otsuka Otsuka Holdings Co., Ltd.
"ott", // ott Dish DBS Corporation
"ovh", // ovh OVH SAS
"page", // page Charleston Road Registry Inc.
// "pamperedchef", // pamperedchef The Pampered Chef, Ltd. (Not assigned)
"panasonic", // panasonic Panasonic Corporation
// "panerai", // panerai Richemont DNS Inc.
"paris", // paris City of Paris
"pars", // pars Asia Green IT System Bilgisayar San. ve Tic. Ltd. Sti.
"partners", // partners Magic Glen, LLC
"parts", // parts Sea Goodbye, LLC
"party", // party Blue Sky Registry Limited
"passagens", // passagens Travel Reservations SRL
"pay", // pay Amazon Registry Services, Inc.
"pccw", // pccw PCCW Enterprises Limited
"pet", // pet Afilias plc
"pfizer", // pfizer Pfizer Inc.
"pharmacy", // pharmacy National Association of Boards of Pharmacy
"phd", // phd Charleston Road Registry Inc.
"philips", // philips Koninklijke Philips N.V.
"phone", // phone Dish DBS Corporation
"photo", // photo Uniregistry, Corp.
"photography", // photography Sugar Glen, LLC
"photos", // photos Sea Corner, LLC
"physio", // physio PhysBiz Pty Ltd
// "piaget", // piaget Richemont DNS Inc.
"pics", // pics Uniregistry, Corp.
"pictet", // pictet Pictet Europe S.A.
"pictures", // pictures Foggy Sky, LLC
"pid", // pid Top Level Spectrum, Inc.
"pin", // pin Amazon Registry Services, Inc.
"ping", // ping Ping Registry Provider, Inc.
"pink", // pink Afilias Limited
"pioneer", // pioneer Pioneer Corporation
"pizza", // pizza Foggy Moon, LLC
"place", // place Snow Galley, LLC
"play", // play Charleston Road Registry Inc.
"playstation", // playstation Sony Computer Entertainment Inc.
"plumbing", // plumbing Spring Tigers, LLC
"plus", // plus Sugar Mill, LLC
"pnc", // pnc PNC Domain Co., LLC
"pohl", // pohl Deutsche Vermögensberatung Aktiengesellschaft DVAG
"poker", // poker Afilias Domains No. 5 Limited
"politie", // politie Politie Nederland
"porn", // porn ICM Registry PN LLC
"post", // post Universal Postal Union
"pramerica", // pramerica Prudential Financial, Inc.
"praxi", // praxi Praxi S.p.A.
"press", // press DotPress Inc.
"prime", // prime Amazon Registry Services, Inc.
"pro", // pro Registry Services Corporation dba RegistryPro
"prod", // prod Charleston Road Registry Inc.
"productions", // productions Magic Birch, LLC
"prof", // prof Charleston Road Registry Inc.
"progressive", // progressive Progressive Casualty Insurance Company
"promo", // promo Afilias plc
"properties", // properties Big Pass, LLC
"property", // property Uniregistry, Corp.
"protection", // protection XYZ.COM LLC
"pru", // pru Prudential Financial, Inc.
"prudential", // prudential Prudential Financial, Inc.
"pub", // pub United TLD Holdco Ltd.
"pwc", // pwc PricewaterhouseCoopers LLP
"qpon", // qpon dotCOOL, Inc.
"quebec", // quebec PointQuébec Inc
"quest", // quest Quest ION Limited
// "qvc", // qvc QVC, Inc.
"racing", // racing Premier Registry Limited
"radio", // radio European Broadcasting Union (EBU)
// "raid", // raid Johnson Shareholdings, Inc.
"read", // read Amazon Registry Services, Inc.
"realestate", // realestate dotRealEstate LLC
"realtor", // realtor Real Estate Domains LLC
"realty", // realty Fegistry, LLC
"recipes", // recipes Grand Island, LLC
"red", // red Afilias Limited
"redstone", // redstone Redstone Haute Couture Co., Ltd.
"redumbrella", // redumbrella Travelers TLD, LLC
"rehab", // rehab United TLD Holdco Ltd.
"reise", // reise Foggy Way, LLC
"reisen", // reisen New Cypress, LLC
"reit", // reit National Association of Real Estate Investment Trusts, Inc.
"reliance", // reliance Reliance Industries Limited
"ren", // ren Beijing Qianxiang Wangjing Technology Development Co., Ltd.
"rent", // rent XYZ.COM LLC
"rentals", // rentals Big Hollow,LLC
"repair", // repair Lone Sunset, LLC
"report", // report Binky Glen, LLC
"republican", // republican United TLD Holdco Ltd.
"rest", // rest Punto 2012 Sociedad Anonima Promotora de Inversion de Capital Variable
"restaurant", // restaurant Snow Avenue, LLC
"review", // review dot Review Limited
"reviews", // reviews United TLD Holdco, Ltd.
"rexroth", // rexroth Robert Bosch GMBH
"rich", // rich I-REGISTRY Ltd., Niederlassung Deutschland
"richardli", // richardli Pacific Century Asset Management (HK) Limited
"ricoh", // ricoh Ricoh Company, Ltd.
// "rightathome", // rightathome Johnson Shareholdings, Inc. (retired 2020-07-31)
"ril", // ril Reliance Industries Limited
"rio", // rio Empresa Municipal de Informática SA - IPLANRIO
"rip", // rip United TLD Holdco Ltd.
// "rmit", // rmit Royal Melbourne Institute of Technology
"rocher", // rocher Ferrero Trading Lux S.A.
"rocks", // rocks United TLD Holdco, LTD.
"rodeo", // rodeo Top Level Domain Holdings Limited
"rogers", // rogers Rogers Communications Canada Inc.
"room", // room Amazon Registry Services, Inc.
"rsvp", // rsvp Charleston Road Registry Inc.
"rugby", // rugby World Rugby Strategic Developments Limited
"ruhr", // ruhr regiodot GmbH & Co. KG
"run", // run Snow Park, LLC
"rwe", // rwe RWE AG
"ryukyu", // ryukyu BusinessRalliart inc.
"saarland", // saarland dotSaarland GmbH
"safe", // safe Amazon Registry Services, Inc.
"safety", // safety Safety Registry Services, LLC.
"sakura", // sakura SAKURA Internet Inc.
"sale", // sale United TLD Holdco, Ltd
"salon", // salon Outer Orchard, LLC
"samsclub", // samsclub Wal-Mart Stores, Inc.
"samsung", // samsung SAMSUNG SDS CO., LTD
"sandvik", // sandvik Sandvik AB
"sandvikcoromant", // sandvikcoromant Sandvik AB
"sanofi", // sanofi Sanofi
"sap", // sap SAP AG
// "sapo", // sapo PT Comunicacoes S.A.
"sarl", // sarl Delta Orchard, LLC
"sas", // sas Research IP LLC
"save", // save Amazon Registry Services, Inc.
"saxo", // saxo Saxo Bank A/S
"sbi", // sbi STATE BANK OF INDIA
"sbs", // sbs SPECIAL BROADCASTING SERVICE CORPORATION
"sca", // sca SVENSKA CELLULOSA AKTIEBOLAGET SCA (publ)
"scb", // scb The Siam Commercial Bank Public Company Limited ("SCB")
"schaeffler", // schaeffler Schaeffler Technologies AG & Co. KG
"schmidt", // schmidt SALM S.A.S.
"scholarships", // scholarships Scholarships.com, LLC
"school", // school Little Galley, LLC
"schule", // schule Outer Moon, LLC
"schwarz", // schwarz Schwarz Domains und Services GmbH & Co. KG
"science", // science dot Science Limited
// "scjohnson", // scjohnson Johnson Shareholdings, Inc.
// "scor", // scor SCOR SE (not assigned as at Version 2020062100)
"scot", // scot Dot Scot Registry Limited
"search", // search Charleston Road Registry Inc.
"seat", // seat SEAT, S.A. (Sociedad Unipersonal)
"secure", // secure Amazon Registry Services, Inc.
"security", // security XYZ.COM LLC
"seek", // seek Seek Limited
"select", // select iSelect Ltd
"sener", // sener Sener Ingeniería y Sistemas, S.A.
"services", // services Fox Castle, LLC
// "ses", // ses SES
"seven", // seven Seven West Media Ltd
"sew", // sew SEW-EURODRIVE GmbH & Co KG
"sex", // sex ICM Registry SX LLC
"sexy", // sexy Uniregistry, Corp.
"sfr", // sfr Societe Francaise du Radiotelephone - SFR
"shangrila", // shangrila Shangri‐La International Hotel Management Limited
"sharp", // sharp Sharp Corporation
"shaw", // shaw Shaw Cablesystems G.P.
"shell", // shell Shell Information Technology International Inc
"shia", // shia Asia Green IT System Bilgisayar San. ve Tic. Ltd. Sti.
"shiksha", // shiksha Afilias Limited
"shoes", // shoes Binky Galley, LLC
"shop", // shop GMO Registry, Inc.
"shopping", // shopping Over Keep, LLC
"shouji", // shouji QIHOO 360 TECHNOLOGY CO. LTD.
"show", // show Snow Beach, LLC
"showtime", // showtime CBS Domains Inc.
// "shriram", // shriram Shriram Capital Ltd.
"silk", // silk Amazon Registry Services, Inc.
"sina", // sina Sina Corporation
"singles", // singles Fern Madison, LLC
"site", // site DotSite Inc.
"ski", // ski STARTING DOT LIMITED
"skin", // skin L'Oréal
"sky", // sky Sky International AG
"skype", // skype Microsoft Corporation
"sling", // sling Hughes Satellite Systems Corporation
"smart", // smart Smart Communications, Inc. (SMART)
"smile", // smile Amazon Registry Services, Inc.
"sncf", // sncf SNCF (Société Nationale des Chemins de fer Francais)
"soccer", // soccer Foggy Shadow, LLC
"social", // social United TLD Holdco Ltd.
"softbank", // softbank SoftBank Group Corp.
"software", // software United TLD Holdco, Ltd
"sohu", // sohu Sohu.com Limited
"solar", // solar Ruby Town, LLC
"solutions", // solutions Silver Cover, LLC
"song", // song Amazon Registry Services, Inc.
"sony", // sony Sony Corporation
"soy", // soy Charleston Road Registry Inc.
"spa", // spa Asia Spa and Wellness Promotion Council Limited
"space", // space DotSpace Inc.
// "spiegel", // spiegel SPIEGEL-Verlag Rudolf Augstein GmbH & Co. KG
"sport", // sport Global Association of International Sports Federations (GAISF)
"spot", // spot Amazon Registry Services, Inc.
// "spreadbetting", // spreadbetting DOTSPREADBETTING REGISTRY LTD
"srl", // srl InterNetX Corp.
// "srt", // srt FCA US LLC.
"stada", // stada STADA Arzneimittel AG
"staples", // staples Staples, Inc.
"star", // star Star India Private Limited
// "starhub", // starhub StarHub Limited
"statebank", // statebank STATE BANK OF INDIA
"statefarm", // statefarm State Farm Mutual Automobile Insurance Company
// "statoil", // statoil Statoil ASA
"stc", // stc Saudi Telecom Company
"stcgroup", // stcgroup Saudi Telecom Company
"stockholm", // stockholm Stockholms kommun
"storage", // storage Self Storage Company LLC
"store", // store DotStore Inc.
"stream", // stream dot Stream Limited
"studio", // studio United TLD Holdco Ltd.
"study", // study OPEN UNIVERSITIES AUSTRALIA PTY LTD
"style", // style Binky Moon, LLC
"sucks", // sucks Vox Populi Registry Ltd.
"supplies", // supplies Atomic Fields, LLC
"supply", // supply Half Falls, LLC
"support", // support Grand Orchard, LLC
"surf", // surf Top Level Domain Holdings Limited
"surgery", // surgery Tin Avenue, LLC
"suzuki", // suzuki SUZUKI MOTOR CORPORATION
"swatch", // swatch The Swatch Group Ltd
// "swiftcover", // swiftcover Swiftcover Insurance Services Limited
"swiss", // swiss Swiss Confederation
"sydney", // sydney State of New South Wales, Department of Premier and Cabinet
// "symantec", // symantec Symantec Corporation [Not assigned as of Jul 25]
"systems", // systems Dash Cypress, LLC
"tab", // tab Tabcorp Holdings Limited
"taipei", // taipei Taipei City Government
"talk", // talk Amazon Registry Services, Inc.
"taobao", // taobao Alibaba Group Holding Limited
"target", // target Target Domain Holdings, LLC
"tatamotors", // tatamotors Tata Motors Ltd
"tatar", // tatar LLC "Coordination Center of Regional Domain of Tatarstan Republic"
"tattoo", // tattoo Uniregistry, Corp.
"tax", // tax Storm Orchard, LLC
"taxi", // taxi Pine Falls, LLC
"tci", // tci Asia Green IT System Bilgisayar San. ve Tic. Ltd. Sti.
"tdk", // tdk TDK Corporation
"team", // team Atomic Lake, LLC
"tech", // tech Dot Tech LLC
"technology", // technology Auburn Falls, LLC
"tel", // tel Telnic Ltd.
// "telecity", // telecity TelecityGroup International Limited
// "telefonica", // telefonica Telefónica S.A.
"temasek", // temasek Temasek Holdings (Private) Limited
"tennis", // tennis Cotton Bloom, LLC
"teva", // teva Teva Pharmaceutical Industries Limited
"thd", // thd Homer TLC, Inc.
"theater", // theater Blue Tigers, LLC
"theatre", // theatre XYZ.COM LLC
"tiaa", // tiaa Teachers Insurance and Annuity Association of America
"tickets", // tickets Accent Media Limited
"tienda", // tienda Victor Manor, LLC
"tiffany", // tiffany Tiffany and Company
"tips", // tips Corn Willow, LLC
"tires", // tires Dog Edge, LLC
"tirol", // tirol punkt Tirol GmbH
"tjmaxx", // tjmaxx The TJX Companies, Inc.
"tjx", // tjx The TJX Companies, Inc.
"tkmaxx", // tkmaxx The TJX Companies, Inc.
"tmall", // tmall Alibaba Group Holding Limited
"today", // today Pearl Woods, LLC
"tokyo", // tokyo GMO Registry, Inc.
"tools", // tools Pioneer North, LLC
"top", // top Jiangsu Bangning Science & Technology Co.,Ltd.
"toray", // toray Toray Industries, Inc.
"toshiba", // toshiba TOSHIBA Corporation
"total", // total Total SA
"tours", // tours Sugar Station, LLC
"town", // town Koko Moon, LLC
"toyota", // toyota TOYOTA MOTOR CORPORATION
"toys", // toys Pioneer Orchard, LLC
"trade", // trade Elite Registry Limited
"trading", // trading DOTTRADING REGISTRY LTD
"training", // training Wild Willow, LLC
"travel", // travel Tralliance Registry Management Company, LLC.
"travelchannel", // travelchannel Lifestyle Domain Holdings, Inc.
"travelers", // travelers Travelers TLD, LLC
"travelersinsurance", // travelersinsurance Travelers TLD, LLC
"trust", // trust Artemis Internet Inc
"trv", // trv Travelers TLD, LLC
"tube", // tube Latin American Telecom LLC
"tui", // tui TUI AG
"tunes", // tunes Amazon Registry Services, Inc.
"tushu", // tushu Amazon Registry Services, Inc.
"tvs", // tvs T V SUNDRAM IYENGAR & SONS PRIVATE LIMITED
"ubank", // ubank National Australia Bank Limited
"ubs", // ubs UBS AG
// "uconnect", // uconnect FCA US LLC.
"unicom", // unicom China United Network Communications Corporation Limited
"university", // university Little Station, LLC
"uno", // uno Dot Latin LLC
"uol", // uol UBN INTERNET LTDA.
"ups", // ups UPS Market Driver, Inc.
"vacations", // vacations Atomic Tigers, LLC
"vana", // vana Lifestyle Domain Holdings, Inc.
"vanguard", // vanguard The Vanguard Group, Inc.
"vegas", // vegas Dot Vegas, Inc.
"ventures", // ventures Binky Lake, LLC
"verisign", // verisign VeriSign, Inc.
"versicherung", // versicherung dotversicherung-registry GmbH
"vet", // vet United TLD Holdco, Ltd
"viajes", // viajes Black Madison, LLC
"video", // video United TLD Holdco, Ltd
"vig", // vig VIENNA INSURANCE GROUP AG Wiener Versicherung Gruppe
"viking", // viking Viking River Cruises (Bermuda) Ltd.
"villas", // villas New Sky, LLC
"vin", // vin Holly Shadow, LLC
"vip", // vip Minds + Machines Group Limited
"virgin", // virgin Virgin Enterprises Limited
"visa", // visa Visa Worldwide Pte. Limited
"vision", // vision Koko Station, LLC
// "vista", // vista Vistaprint Limited
// "vistaprint", // vistaprint Vistaprint Limited
"viva", // viva Saudi Telecom Company
"vivo", // vivo Telefonica Brasil S.A.
"vlaanderen", // vlaanderen DNS.be vzw
"vodka", // vodka Top Level Domain Holdings Limited
"volkswagen", // volkswagen Volkswagen Group of America Inc.
"volvo", // volvo Volvo Holding Sverige Aktiebolag
"vote", // vote Monolith Registry LLC
"voting", // voting Valuetainment Corp.
"voto", // voto Monolith Registry LLC
"voyage", // voyage Ruby House, LLC
"vuelos", // vuelos Travel Reservations SRL
"wales", // wales Nominet UK
"walmart", // walmart Wal-Mart Stores, Inc.
"walter", // walter Sandvik AB
"wang", // wang Zodiac Registry Limited
"wanggou", // wanggou Amazon Registry Services, Inc.
// "warman", // warman Weir Group IP Limited
"watch", // watch Sand Shadow, LLC
"watches", // watches Richemont DNS Inc.
"weather", // weather The Weather Channel, LLC
"weatherchannel", // weatherchannel The Weather Channel, LLC
"webcam", // webcam dot Webcam Limited
"weber", // weber Saint-Gobain Weber SA
"website", // website DotWebsite Inc.
"wed", // wed Atgron, Inc.
"wedding", // wedding Top Level Domain Holdings Limited
"weibo", // weibo Sina Corporation
"weir", // weir Weir Group IP Limited
"whoswho", // whoswho Who's Who Registry
"wien", // wien punkt.wien GmbH
"wiki", // wiki Top Level Design, LLC
"williamhill", // williamhill William Hill Organization Limited
"win", // win First Registry Limited
"windows", // windows Microsoft Corporation
"wine", // wine June Station, LLC
"winners", // winners The TJX Companies, Inc.
"wme", // wme William Morris Endeavor Entertainment, LLC
"wolterskluwer", // wolterskluwer Wolters Kluwer N.V.
"woodside", // woodside Woodside Petroleum Limited
"work", // work Top Level Domain Holdings Limited
"works", // works Little Dynamite, LLC
"world", // world Bitter Fields, LLC
"wow", // wow Amazon Registry Services, Inc.
"wtc", // wtc World Trade Centers Association, Inc.
"wtf", // wtf Hidden Way, LLC
"xbox", // xbox Microsoft Corporation
"xerox", // xerox Xerox DNHC LLC
"xfinity", // xfinity Comcast IP Holdings I, LLC
"xihuan", // xihuan QIHOO 360 TECHNOLOGY CO. LTD.
"xin", // xin Elegant Leader Limited
"xn--11b4c3d", // कॉम VeriSign Sarl
"xn--1ck2e1b", // セール Amazon Registry Services, Inc.
"xn--1qqw23a", // 佛山 Guangzhou YU Wei Information Technology Co., Ltd.
"xn--30rr7y", // 慈善 Excellent First Limited
"xn--3bst00m", // 集团 Eagle Horizon Limited
"xn--3ds443g", // 在线 TLD REGISTRY LIMITED
// "xn--3oq18vl8pn36a", // 大众汽车 Volkswagen (China) Investment Co., Ltd.
"xn--3pxu8k", // 点看 VeriSign Sarl
"xn--42c2d9a", // คอม VeriSign Sarl
"xn--45q11c", // 八卦 Zodiac Scorpio Limited
"xn--4gbrim", // موقع Suhub Electronic Establishment
"xn--55qw42g", // 公益 China Organizational Name Administration Center
"xn--55qx5d", // 公司 Computer Network Information Center of Chinese Academy of Sciences (China Internet Network Information Center)
"xn--5su34j936bgsg", // 香格里拉 Shangri‐La International Hotel Management Limited
"xn--5tzm5g", // 网站 Global Website TLD Asia Limited
"xn--6frz82g", // 移动 Afilias Limited
"xn--6qq986b3xl", // 我爱你 Tycoon Treasure Limited
"xn--80adxhks", // москва Foundation for Assistance for Internet Technologies and Infrastructure Development (FAITID)
"xn--80aqecdr1a", // католик Pontificium Consilium de Comunicationibus Socialibus (PCCS) (Pontifical Council for Social Communication)
"xn--80asehdb", // онлайн CORE Association
"xn--80aswg", // сайт CORE Association
"xn--8y0a063a", // 联通 China United Network Communications Corporation Limited
"xn--90ae", // бг Imena.BG Plc (NAMES.BG Plc)
"xn--9dbq2a", // קום VeriSign Sarl
"xn--9et52u", // 时尚 RISE VICTORY LIMITED
"xn--9krt00a", // 微博 Sina Corporation
"xn--b4w605ferd", // 淡马锡 Temasek Holdings (Private) Limited
"xn--bck1b9a5dre4c", // ファッション Amazon Registry Services, Inc.
"xn--c1avg", // орг Public Interest Registry
"xn--c2br7g", // नेट VeriSign Sarl
"xn--cck2b3b", // ストア Amazon Registry Services, Inc.
"xn--cckwcxetd", // アマゾン Amazon Registry Services, Inc.
"xn--cg4bki", // 삼성 SAMSUNG SDS CO., LTD
"xn--czr694b", // 商标 HU YI GLOBAL INFORMATION RESOURCES(HOLDING) COMPANY.HONGKONG LIMITED
"xn--czrs0t", // 商店 Wild Island, LLC
"xn--czru2d", // 商城 Zodiac Aquarius Limited
"xn--d1acj3b", // дети The Foundation for Network Initiatives “The Smart Internet”
"xn--eckvdtc9d", // ポイント Amazon Registry Services, Inc.
"xn--efvy88h", // 新闻 Xinhua News Agency Guangdong Branch 新华通讯社广东分社
// "xn--estv75g", // 工行 Industrial and Commercial Bank of China Limited
"xn--fct429k", // 家電 Amazon Registry Services, Inc.
"xn--fhbei", // كوم VeriSign Sarl
"xn--fiq228c5hs", // 中文网 TLD REGISTRY LIMITED
"xn--fiq64b", // 中信 CITIC Group Corporation
"xn--fjq720a", // 娱乐 Will Bloom, LLC
"xn--flw351e", // 谷歌 Charleston Road Registry Inc.
"xn--fzys8d69uvgm", // 電訊盈科 PCCW Enterprises Limited
"xn--g2xx48c", // 购物 Minds + Machines Group Limited
"xn--gckr3f0f", // クラウド Amazon Registry Services, Inc.
"xn--gk3at1e", // 通販 Amazon Registry Services, Inc.
"xn--hxt814e", // 网店 Zodiac Libra Limited
"xn--i1b6b1a6a2e", // संगठन Public Interest Registry
"xn--imr513n", // 餐厅 HU YI GLOBAL INFORMATION RESOURCES (HOLDING) COMPANY. HONGKONG LIMITED
"xn--io0a7i", // 网络 Computer Network Information Center of Chinese Academy of Sciences (China Internet Network Information Center)
"xn--j1aef", // ком VeriSign Sarl
"xn--jlq480n2rg", // 亚马逊 Amazon Registry Services, Inc.
// "xn--jlq61u9w7b", // 诺基亚 Nokia Corporation
"xn--jvr189m", // 食品 Amazon Registry Services, Inc.
"xn--kcrx77d1x4a", // 飞利浦 Koninklijke Philips N.V.
// "xn--kpu716f", // 手表 Richemont DNS Inc. [Not assigned as of Jul 25]
"xn--kput3i", // 手机 Beijing RITT-Net Technology Development Co., Ltd
"xn--mgba3a3ejt", // ارامكو Aramco Services Company
"xn--mgba7c0bbn0a", // العليان Crescent Holding GmbH
"xn--mgbaakc7dvf", // اتصالات Emirates Telecommunications Corporation (trading as Etisalat)
"xn--mgbab2bd", // بازار CORE Association
// "xn--mgbb9fbpob", // موبايلي GreenTech Consultancy Company W.L.L.
"xn--mgbca7dzdo", // ابوظبي Abu Dhabi Systems and Information Centre
"xn--mgbi4ecexp", // كاثوليك Pontificium Consilium de Comunicationibus Socialibus (PCCS) (Pontifical Council for Social Communication)
"xn--mgbt3dhd", // همراه Asia Green IT System Bilgisayar San. ve Tic. Ltd. Sti.
"xn--mk1bu44c", // 닷컴 VeriSign Sarl
"xn--mxtq1m", // 政府 Net-Chinese Co., Ltd.
"xn--ngbc5azd", // شبكة International Domain Registry Pty. Ltd.
"xn--ngbe9e0a", // بيتك Kuwait Finance House
"xn--ngbrx", // عرب League of Arab States
"xn--nqv7f", // 机构 Public Interest Registry
"xn--nqv7fs00ema", // 组织机构 Public Interest Registry
"xn--nyqy26a", // 健康 Stable Tone Limited
"xn--otu796d", // 招聘 Dot Trademark TLD Holding Company Limited
"xn--p1acf", // рус Rusnames Limited
// "xn--pbt977c", // 珠宝 Richemont DNS Inc. [Not assigned as of Jul 25]
"xn--pssy2u", // 大拿 VeriSign Sarl
"xn--q9jyb4c", // みんな Charleston Road Registry Inc.
"xn--qcka1pmc", // グーグル Charleston Road Registry Inc.
"xn--rhqv96g", // 世界 Stable Tone Limited
"xn--rovu88b", // 書籍 Amazon EU S.à r.l.
"xn--ses554g", // 网址 KNET Co., Ltd
"xn--t60b56a", // 닷넷 VeriSign Sarl
"xn--tckwe", // コム VeriSign Sarl
"xn--tiq49xqyj", // 天主教 Pontificium Consilium de Comunicationibus Socialibus (PCCS) (Pontifical Council for Social Communication)
"xn--unup4y", // 游戏 Spring Fields, LLC
"xn--vermgensberater-ctb", // VERMöGENSBERATER Deutsche Vermögensberatung Aktiengesellschaft DVAG
"xn--vermgensberatung-pwb", // VERMöGENSBERATUNG Deutsche Vermögensberatung Aktiengesellschaft DVAG
"xn--vhquv", // 企业 Dash McCook, LLC
"xn--vuq861b", // 信息 Beijing Tele-info Network Technology Co., Ltd.
"xn--w4r85el8fhu5dnra", // 嘉里大酒店 Kerry Trading Co. Limited
"xn--w4rs40l", // 嘉里 Kerry Trading Co. Limited
"xn--xhq521b", // 广东 Guangzhou YU Wei Information Technology Co., Ltd.
"xn--zfr164b", // 政务 China Organizational Name Administration Center
// "xperia", // xperia Sony Mobile Communications AB
"xxx", // xxx ICM Registry LLC
"xyz", // xyz XYZ.COM LLC
"yachts", // yachts DERYachts, LLC
"yahoo", // yahoo Yahoo! Domain Services Inc.
"yamaxun", // yamaxun Amazon Registry Services, Inc.
"yandex", // yandex YANDEX, LLC
"yodobashi", // yodobashi YODOBASHI CAMERA CO.,LTD.
"yoga", // yoga Top Level Domain Holdings Limited
"yokohama", // yokohama GMO Registry, Inc.
"you", // you Amazon Registry Services, Inc.
"youtube", // youtube Charleston Road Registry Inc.
"yun", // yun QIHOO 360 TECHNOLOGY CO. LTD.
"zappos", // zappos Amazon Registry Services, Inc.
"zara", // zara Industria de Diseño Textil, S.A. (INDITEX, S.A.)
"zero", // zero Amazon Registry Services, Inc.
"zip", // zip Charleston Road Registry Inc.
// "zippo", // zippo Zadco Company
"zone", // zone Outer Falls, LLC
"zuerich", // zuerich Kanton Zürich (Canton of Zurich)
};
// WARNING: this array MUST be sorted, otherwise it cannot be searched reliably using binary search
private static final String[] COUNTRY_CODE_TLDS = {
// Taken from Version 2023011200, Last Updated Thu Jan 12 07:07:01 2023 UTC
"ac", // Ascension Island
"ad", // Andorra
"ae", // United Arab Emirates
"af", // Afghanistan
"ag", // Antigua and Barbuda
"ai", // Anguilla
"al", // Albania
"am", // Armenia
// "an", // Netherlands Antilles (retired)
"ao", // Angola
"aq", // Antarctica
"ar", // Argentina
"as", // American Samoa
"at", // Austria
"au", // Australia (includes Ashmore and Cartier Islands and Coral Sea Islands)
"aw", // Aruba
"ax", // Åland
"az", // Azerbaijan
"ba", // Bosnia and Herzegovina
"bb", // Barbados
"bd", // Bangladesh
"be", // Belgium
"bf", // Burkina Faso
"bg", // Bulgaria
"bh", // Bahrain
"bi", // Burundi
"bj", // Benin
"bm", // Bermuda
"bn", // Brunei Darussalam
"bo", // Bolivia
"br", // Brazil
"bs", // Bahamas
"bt", // Bhutan
"bv", // Bouvet Island
"bw", // Botswana
"by", // Belarus
"bz", // Belize
"ca", // Canada
"cc", // Cocos (Keeling) Islands
"cd", // Democratic Republic of the Congo (formerly Zaire)
"cf", // Central African Republic
"cg", // Republic of the Congo
"ch", // Switzerland
"ci", // Côte d'Ivoire
"ck", // Cook Islands
"cl", // Chile
"cm", // Cameroon
"cn", // China, mainland
"co", // Colombia
"cr", // Costa Rica
"cu", // Cuba
"cv", // Cape Verde
"cw", // Curaçao
"cx", // Christmas Island
"cy", // Cyprus
"cz", // Czech Republic
"de", // Germany
"dj", // Djibouti
"dk", // Denmark
"dm", // Dominica
"do", // Dominican Republic
"dz", // Algeria
"ec", // Ecuador
"ee", // Estonia
"eg", // Egypt
"er", // Eritrea
"es", // Spain
"et", // Ethiopia
"eu", // European Union
"fi", // Finland
"fj", // Fiji
"fk", // Falkland Islands
"fm", // Federated States of Micronesia
"fo", // Faroe Islands
"fr", // France
"ga", // Gabon
"gb", // Great Britain (United Kingdom)
"gd", // Grenada
"ge", // Georgia
"gf", // French Guiana
"gg", // Guernsey
"gh", // Ghana
"gi", // Gibraltar
"gl", // Greenland
"gm", // The Gambia
"gn", // Guinea
"gp", // Guadeloupe
"gq", // Equatorial Guinea
"gr", // Greece
"gs", // South Georgia and the South Sandwich Islands
"gt", // Guatemala
"gu", // Guam
"gw", // Guinea-Bissau
"gy", // Guyana
"hk", // Hong Kong
"hm", // Heard Island and McDonald Islands
"hn", // Honduras
"hr", // Croatia (Hrvatska)
"ht", // Haiti
"hu", // Hungary
"id", // Indonesia
"ie", // Ireland (Éire)
"il", // Israel
"im", // Isle of Man
"in", // India
"io", // British Indian Ocean Territory
"iq", // Iraq
"ir", // Iran
"is", // Iceland
"it", // Italy
"je", // Jersey
"jm", // Jamaica
"jo", // Jordan
"jp", // Japan
"ke", // Kenya
"kg", // Kyrgyzstan
"kh", // Cambodia (Khmer)
"ki", // Kiribati
"km", // Comoros
"kn", // Saint Kitts and Nevis
"kp", // North Korea
"kr", // South Korea
"kw", // Kuwait
"ky", // Cayman Islands
"kz", // Kazakhstan
"la", // Laos (currently being marketed as the official domain for Los Angeles)
"lb", // Lebanon
"lc", // Saint Lucia
"li", // Liechtenstein
"lk", // Sri Lanka
"lr", // Liberia
"ls", // Lesotho
"lt", // Lithuania
"lu", // Luxembourg
"lv", // Latvia
"ly", // Libya
"ma", // Morocco
"mc", // Monaco
"md", // Moldova
"me", // Montenegro
"mg", // Madagascar
"mh", // Marshall Islands
"mk", // Republic of Macedonia
"ml", // Mali
"mm", // Myanmar
"mn", // Mongolia
"mo", // Macau
"mp", // Northern Mariana Islands
"mq", // Martinique
"mr", // Mauritania
"ms", // Montserrat
"mt", // Malta
"mu", // Mauritius
"mv", // Maldives
"mw", // Malawi
"mx", // Mexico
"my", // Malaysia
"mz", // Mozambique
"na", // Namibia
"nc", // New Caledonia
"ne", // Niger
"nf", // Norfolk Island
"ng", // Nigeria
"ni", // Nicaragua
"nl", // Netherlands
"no", // Norway
"np", // Nepal
"nr", // Nauru
"nu", // Niue
"nz", // New Zealand
"om", // Oman
"pa", // Panama
"pe", // Peru
"pf", // French Polynesia With Clipperton Island
"pg", // Papua New Guinea
"ph", // Philippines
"pk", // Pakistan
"pl", // Poland
"pm", // Saint-Pierre and Miquelon
"pn", // Pitcairn Islands
"pr", // Puerto Rico
"ps", // Palestinian territories (PA-controlled West Bank and Gaza Strip)
"pt", // Portugal
"pw", // Palau
"py", // Paraguay
"qa", // Qatar
"re", // Réunion
"ro", // Romania
"rs", // Serbia
"ru", // Russia
"rw", // Rwanda
"sa", // Saudi Arabia
"sb", // Solomon Islands
"sc", // Seychelles
"sd", // Sudan
"se", // Sweden
"sg", // Singapore
"sh", // Saint Helena
"si", // Slovenia
"sj", // Svalbard and Jan Mayen Islands Not in use (Norwegian dependencies; see .no)
"sk", // Slovakia
"sl", // Sierra Leone
"sm", // San Marino
"sn", // Senegal
"so", // Somalia
"sr", // Suriname
"ss", // ss National Communication Authority (NCA)
"st", // São Tomé and Príncipe
"su", // Soviet Union (deprecated)
"sv", // El Salvador
"sx", // Sint Maarten
"sy", // Syria
"sz", // Swaziland
"tc", // Turks and Caicos Islands
"td", // Chad
"tf", // French Southern and Antarctic Lands
"tg", // Togo
"th", // Thailand
"tj", // Tajikistan
"tk", // Tokelau
"tl", // East Timor (deprecated old code)
"tm", // Turkmenistan
"tn", // Tunisia
"to", // Tonga
// "tp", // East Timor (Retired)
"tr", // Turkey
"tt", // Trinidad and Tobago
"tv", // Tuvalu
"tw", // Taiwan, Republic of China
"tz", // Tanzania
"ua", // Ukraine
"ug", // Uganda
"uk", // United Kingdom
"us", // United States of America
"uy", // Uruguay
"uz", // Uzbekistan
"va", // Vatican City State
"vc", // Saint Vincent and the Grenadines
"ve", // Venezuela
"vg", // British Virgin Islands
"vi", // U.S. Virgin Islands
"vn", // Vietnam
"vu", // Vanuatu
"wf", // Wallis and Futuna
"ws", // Samoa (formerly Western Samoa)
"xn--2scrj9c", // ಭಾರತ National Internet eXchange of India
"xn--3e0b707e", // 한국 KISA (Korea Internet & Security Agency)
"xn--3hcrj9c", // ଭାରତ National Internet eXchange of India
"xn--45br5cyl", // ভাৰত National Internet eXchange of India
"xn--45brj9c", // ভারত National Internet Exchange of India
"xn--4dbrk0ce", // ישראל The Israel Internet Association (RA)
"xn--54b7fta0cc", // বাংলা Posts and Telecommunications Division
"xn--80ao21a", // қаз Association of IT Companies of Kazakhstan
"xn--90a3ac", // срб Serbian National Internet Domain Registry (RNIDS)
"xn--90ais", // ??? Reliable Software Inc.
"xn--clchc0ea0b2g2a9gcd", // சிங்கப்பூர் Singapore Network Information Centre (SGNIC) Pte Ltd
"xn--d1alf", // мкд Macedonian Academic Research Network Skopje
"xn--e1a4c", // ею EURid vzw/asbl
"xn--fiqs8s", // 中国 China Internet Network Information Center
"xn--fiqz9s", // 中國 China Internet Network Information Center
"xn--fpcrj9c3d", // భారత్ National Internet Exchange of India
"xn--fzc2c9e2c", // ලංකා LK Domain Registry
"xn--gecrj9c", // ભારત National Internet Exchange of India
"xn--h2breg3eve", // भारतम् National Internet eXchange of India
"xn--h2brj9c", // भारत National Internet Exchange of India
"xn--h2brj9c8c", // भारोत National Internet eXchange of India
"xn--j1amh", // укр Ukrainian Network Information Centre (UANIC), Inc.
"xn--j6w193g", // 香港 Hong Kong Internet Registration Corporation Ltd.
"xn--kprw13d", // 台湾 Taiwan Network Information Center (TWNIC)
"xn--kpry57d", // 台灣 Taiwan Network Information Center (TWNIC)
"xn--l1acc", // мон Datacom Co.,Ltd
"xn--lgbbat1ad8j", // الجزائر CERIST
"xn--mgb9awbf", // عمان Telecommunications Regulatory Authority (TRA)
"xn--mgba3a4f16a", // ایران Institute for Research in Fundamental Sciences (IPM)
"xn--mgbaam7a8h", // امارات Telecommunications Regulatory Authority (TRA)
"xn--mgbah1a3hjkrd", // موريتانيا Université de Nouakchott Al Aasriya
"xn--mgbai9azgqp6j", // پاکستان National Telecommunication Corporation
"xn--mgbayh7gpa", // الاردن National Information Technology Center (NITC)
"xn--mgbbh1a", // بارت National Internet eXchange of India
"xn--mgbbh1a71e", // بھارت National Internet Exchange of India
"xn--mgbc0a9azcg", // المغرب Agence Nationale de Réglementation des Télécommunications (ANRT)
"xn--mgbcpq6gpa1a", // البحرين Telecommunications Regulatory Authority (TRA)
"xn--mgberp4a5d4ar", // السعودية Communications and Information Technology Commission
"xn--mgbgu82a", // ڀارت National Internet eXchange of India
"xn--mgbpl2fh", // ????? Sudan Internet Society
"xn--mgbtx2b", // عراق Communications and Media Commission (CMC)
"xn--mgbx4cd0ab", // مليسيا MYNIC Berhad
"xn--mix891f", // 澳門 Bureau of Telecommunications Regulation (DSRT)
"xn--node", // გე Information Technologies Development Center (ITDC)
"xn--o3cw4h", // ไทย Thai Network Information Center Foundation
"xn--ogbpf8fl", // سورية National Agency for Network Services (NANS)
"xn--p1ai", // рф Coordination Center for TLD RU
"xn--pgbs0dh", // تونس Agence Tunisienne d'Internet
"xn--q7ce6a", // ລາວ Lao National Internet Center (LANIC)
"xn--qxa6a", // ευ EURid vzw/asbl
"xn--qxam", // ελ ICS-FORTH GR
"xn--rvc1e0am3e", // ഭാരതം National Internet eXchange of India
"xn--s9brj9c", // ਭਾਰਤ National Internet Exchange of India
"xn--wgbh1c", // مصر National Telecommunication Regulatory Authority - NTRA
"xn--wgbl6a", // قطر Communications Regulatory Authority
"xn--xkc2al3hye2a", // இலங்கை LK Domain Registry
"xn--xkc2dl3a5ee0h", // இந்தியா National Internet Exchange of India
"xn--y9a3aq", // ??? Internet Society
"xn--yfro4i67o", // 新加坡 Singapore Network Information Centre (SGNIC) Pte Ltd
"xn--ygbi2ammx", // فلسطين Ministry of Telecom & Information Technology (MTIT)
"ye", // Yemen
"yt", // Mayotte
"za", // South Africa
"zm", // Zambia
"zw", // Zimbabwe
};
// WARNING: this array MUST be sorted, otherwise it cannot be searched reliably using binary search
private static final String[] LOCAL_TLDS = {
"localdomain", // Also widely used as localhost.localdomain
"localhost", // RFC2606 defined
};
// Additional arrays to supplement or override the built in ones.
// The PLUS arrays are valid keys, the MINUS arrays are invalid keys
/*
* This field is used to detect whether the getInstance has been called.
* After this, the method updateTLDOverride is not allowed to be called.
* This field does not need to be volatile since it is only accessed from
* synchronized methods.
*/
private static boolean inUse;
/*
* These arrays are mutable.
* They can only be updated by the updateTLDOverride method, and readers must first get an instance
* using the getInstance methods which are all (now) synchronized.
* The only other access is via getTLDEntries which is now synchronized.
*/
// WARNING: this array MUST be sorted, otherwise it cannot be searched reliably using binary search
private static String[] countryCodeTLDsPlus = EMPTY_STRING_ARRAY;
// WARNING: this array MUST be sorted, otherwise it cannot be searched reliably using binary search
private static String[] genericTLDsPlus = EMPTY_STRING_ARRAY;
// WARNING: this array MUST be sorted, otherwise it cannot be searched reliably using binary search
private static String[] countryCodeTLDsMinus = EMPTY_STRING_ARRAY;
// WARNING: this array MUST be sorted, otherwise it cannot be searched reliably using binary search
private static String[] genericTLDsMinus = EMPTY_STRING_ARRAY;
// WARNING: this array MUST be sorted, otherwise it cannot be searched reliably using binary search
private static String[] localTLDsMinus = EMPTY_STRING_ARRAY;
// WARNING: this array MUST be sorted, otherwise it cannot be searched reliably using binary search
private static String[] localTLDsPlus = EMPTY_STRING_ARRAY;
/**
* enum used by {@link DomainValidator#updateTLDOverride(ArrayType, String[])}
* to determine which override array to update / fetch
* @since 1.5.0
* @since 1.5.1 made public and added read-only array references
*/
public enum ArrayType {
/** Update (or get a copy of) the GENERIC_TLDS_PLUS table containing additonal generic TLDs */
GENERIC_PLUS,
/** Update (or get a copy of) the GENERIC_TLDS_MINUS table containing deleted generic TLDs */
GENERIC_MINUS,
/** Update (or get a copy of) the COUNTRY_CODE_TLDS_PLUS table containing additonal country code TLDs */
COUNTRY_CODE_PLUS,
/** Update (or get a copy of) the COUNTRY_CODE_TLDS_MINUS table containing deleted country code TLDs */
COUNTRY_CODE_MINUS,
/** Gets a copy of the generic TLDS table */
GENERIC_RO,
/** Gets a copy of the country code table */
COUNTRY_CODE_RO,
/** Gets a copy of the infrastructure table */
INFRASTRUCTURE_RO,
/** Gets a copy of the local table */
LOCAL_RO,
/**
* Update (or get a copy of) the LOCAL_TLDS_PLUS table containing additional local TLDs
* @since 1.7
*/
LOCAL_PLUS,
/**
* Update (or get a copy of) the LOCAL_TLDS_MINUS table containing deleted local TLDs
* @since 1.7
*/
LOCAL_MINUS
;
}
/**
* Used to specify overrides when creating a new class.
* @since 1.7
*/
public static class Item {
final ArrayType type;
final String[] values;
/**
* Constructs a new instance.
* @param type ArrayType, e.g. GENERIC_PLUS, LOCAL_PLUS
* @param values array of TLDs. Will be lower-cased and sorted
*/
public Item(final ArrayType type, final String... values) {
this.type = type;
this.values = values; // no need to copy here
}
}
/**
* Update one of the TLD override arrays.
* This must only be done at program startup, before any instances are accessed using getInstance.
* <p>
* For example:
* <p>
* {@code DomainValidator.updateTLDOverride(ArrayType.GENERIC_PLUS, "apache")}
* <p>
* To clear an override array, provide an empty array.
*
* @param table the table to update, see {@link DomainValidator.ArrayType}
* Must be one of the following
* <ul>
* <li>COUNTRY_CODE_MINUS</li>
* <li>COUNTRY_CODE_PLUS</li>
* <li>GENERIC_MINUS</li>
* <li>GENERIC_PLUS</li>
* <li>LOCAL_MINUS</li>
* <li>LOCAL_PLUS</li>
* </ul>
* @param tlds the array of TLDs, must not be null
* @throws IllegalStateException if the method is called after getInstance
* @throws IllegalArgumentException if one of the read-only tables is requested
* @since 1.5.0
*/
public static synchronized void updateTLDOverride(final ArrayType table, final String... tlds) {
if (inUse) {
throw new IllegalStateException("Can only invoke this method before calling getInstance");
}
final String [] copy = new String[tlds.length];
// Comparisons are always done with lower-case entries
for (int i = 0; i < tlds.length; i++) {
copy[i] = tlds[i].toLowerCase(Locale.ENGLISH);
}
Arrays.sort(copy);
switch(table) {
case COUNTRY_CODE_MINUS:
countryCodeTLDsMinus = copy;
break;
case COUNTRY_CODE_PLUS:
countryCodeTLDsPlus = copy;
break;
case GENERIC_MINUS:
genericTLDsMinus = copy;
break;
case GENERIC_PLUS:
genericTLDsPlus = copy;
break;
case LOCAL_MINUS:
localTLDsMinus = copy;
break;
case LOCAL_PLUS:
localTLDsPlus = copy;
break;
case COUNTRY_CODE_RO:
case GENERIC_RO:
case INFRASTRUCTURE_RO:
case LOCAL_RO:
throw new IllegalArgumentException("Cannot update the table: " + table);
default:
throw new IllegalArgumentException(UNEXPECTED_ENUM_VALUE + table);
}
}
/**
* Gets a copy of a class level internal array.
* @param table the array type (any of the enum values)
* @return a copy of the array
* @throws IllegalArgumentException if the table type is unexpected (should not happen)
* @since 1.5.1
*/
public static synchronized String [] getTLDEntries(final ArrayType table) {
final String[] array;
switch(table) {
case COUNTRY_CODE_MINUS:
array = countryCodeTLDsMinus;
break;
case COUNTRY_CODE_PLUS:
array = countryCodeTLDsPlus;
break;
case GENERIC_MINUS:
array = genericTLDsMinus;
break;
case GENERIC_PLUS:
array = genericTLDsPlus;
break;
case LOCAL_MINUS:
array = localTLDsMinus;
break;
case LOCAL_PLUS:
array = localTLDsPlus;
break;
case GENERIC_RO:
array = GENERIC_TLDS;
break;
case COUNTRY_CODE_RO:
array = COUNTRY_CODE_TLDS;
break;
case INFRASTRUCTURE_RO:
array = INFRASTRUCTURE_TLDS;
break;
case LOCAL_RO:
array = LOCAL_TLDS;
break;
default:
throw new IllegalArgumentException(UNEXPECTED_ENUM_VALUE + table);
}
return Arrays.copyOf(array, array.length); // clone the array
}
/**
* Gets a copy of an instance level internal array.
* @param table the array type (any of the enum values)
* @return a copy of the array
* @throws IllegalArgumentException if the table type is unexpected, e.g. GENERIC_RO
* @since 1.7
*/
public String [] getOverrides(final ArrayType table) {
final String[] array;
switch(table) {
case COUNTRY_CODE_MINUS:
array = myCountryCodeTLDsMinus;
break;
case COUNTRY_CODE_PLUS:
array = myCountryCodeTLDsPlus;
break;
case GENERIC_MINUS:
array = myGenericTLDsMinus;
break;
case GENERIC_PLUS:
array = myGenericTLDsPlus;
break;
case LOCAL_MINUS:
array = myLocalTLDsMinus;
break;
case LOCAL_PLUS:
array = myLocalTLDsPlus;
break;
default:
throw new IllegalArgumentException(UNEXPECTED_ENUM_VALUE + table);
}
return Arrays.copyOf(array, array.length); // clone the array
}
/**
* Converts potentially Unicode input to punycode.
* If conversion fails, returns the original input.
*
* @param input the string to convert, not null
* @return converted input, or original input if conversion fails
*/
// Needed by UrlValidator
static String unicodeToASCII(final String input) {
if (isOnlyASCII(input)) { // skip possibly expensive processing
return input;
}
try {
final String ascii = IDN.toASCII(input);
if (IDNBUGHOLDER.IDN_TOASCII_PRESERVES_TRAILING_DOTS) {
return ascii;
}
final int length = input.length();
if (length == 0) {// check there is a last character
return input;
}
// RFC3490 3.1. 1)
// Whenever dots are used as label separators, the following
// characters MUST be recognized as dots: U+002E (full stop), U+3002
// (ideographic full stop), U+FF0E (fullwidth full stop), U+FF61
// (halfwidth ideographic full stop).
final char lastChar = input.charAt(length-1);// fetch original last char
switch(lastChar) {
case '\u002E': // "." full stop
case '\u3002': // ideographic full stop
case '\uFF0E': // fullwidth full stop
case '\uFF61': // halfwidth ideographic full stop
return ascii + "."; // restore the missing stop
default:
return ascii;
}
} catch (final IllegalArgumentException e) { // input is not valid
return input;
}
}
private static class IDNBUGHOLDER {
private static boolean keepsTrailingDot() {
final String input = "a."; // must be a valid name
return input.equals(IDN.toASCII(input));
}
private static final boolean IDN_TOASCII_PRESERVES_TRAILING_DOTS = keepsTrailingDot();
}
/*
* Check if input contains only ASCII
* Treats null as all ASCII
*/
private static boolean isOnlyASCII(final String input) {
if (input == null) {
return true;
}
for(int i=0; i < input.length(); i++) {
if (input.charAt(i) > 0x7F) { // CHECKSTYLE IGNORE MagicNumber
return false;
}
}
return true;
}
/**
* Check if a sorted array contains the specified key
*
* @param sortedArray the array to search
* @param key the key to find
* @return {@code true} if the array contains the key
*/
private static boolean arrayContains(final String[] sortedArray, final String key) {
return Arrays.binarySearch(sortedArray, key) >= 0;
}
}
| 8,003 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This package contains <i>independent</i> validation routines.
* <h2>Table of Contents</h2>
* <ul>
* <li>1. <a href="#overview">Overview</a></li>
* <li>2. <a href="#date">Date and Time Validators</a>
* <ul>
* <li>2.1 <a href="#date.overview">Overview</a></li>
* <li>2.2 <a href="#date.validate">Validating a Date Value</a></li>
* <li>2.3 <a href="#date.format">Formatting</a></li>
* <li>2.4 <a href="#date.timezone">Time Zones</a></li>
* <li>2.5 <a href="#date.compare">Comparing Dates and Times</a></li>
* </ul>
* </li>
* <li>3. <a href="#numeric">Numeric Validators</a>
* <ul>
* <li>3.1 <a href="#numeric.overview">Overview</a></li>
* <li>3.2 <a href="#numeric.validate">Validating a Numeric Value</a></li>
* <li>3.3 <a href="#numeric.format">Formatting</a></li>
* <li>3.4 <a href="#numeric.compare">Comparing Numbers</a></li>
* <li>3.5 <a href="#numeric.currency">Currency Validation</a></li>
* <li>3.6 <a href="#numeric.percent">Percent Validation</a></li>
* </ul>
* </li>
* <li>4. <a href="#other">Other Validators</a>
* <ul>
* <li>4.1 <a href="#other.overview">Overview</a></li>
* <li>4.2 <a href="#other.regex">Regular Expression validation</a></li>
* <li>4.3 <a href="#other.checkdigit">Check Digit Validation/Calculation</a></li>
* <li>4.4 <a href="#other.code">General Code Validation</a></li>
* <li>4.5 <a href="#other.isbn">ISBN Validation</a></li>
* <li>4.6 <a href="#other.inet">IP Address Validation</a></li>
* <li>4.7 <a href="#other.email">Email Address Validation</a></li>
* <li>4.8 <a href="#other.url">URL Validation</a></li>
* <li>4.9 <a href="#other.domain">Domain Name Validation</a></li>
* </ul>
* </li>
* </ul>
* <a id="overview"></a>
* <h2>1. Overview</h2>
* <p>
* Commons Validator serves two purposes:
* </p>
* <ul>
* <li>To provide standard, independent validation routines/functions.</li>
* <li>To provide a <i>mini</i> framework for Validation.</li>
* </ul>
* <p>
* This package has been created, since version 1.3.0, in an attempt to clearly
* separate these two concerns and is the location for the standard, independent
* validation routines/functions in <em>Commons Validator</em>.
* </p>
* <p>
* The contents of this package have no dependencies on the framework aspect of
* Commons Validator and can be used on their own.
* </p>
* <a id="date"></a>
* <h2>2. Date and Time Validators</h2>
* <a id="date.overview"></a>
* <h2>2.1 Overview</h2>
* <p>
* The date and time validators either validate according to a specified <i>format</i>
* or use a standard <i>format</i> for a specified <code>Locale</code>.
* </p>
* <ul>
* <li><a href="DateValidator.html">Date Validator</a> - validates dates
* converting to a <code>java.util.Date</code> type.</li>
* <li><a href="CalendarValidator.html">Calendar Validator</a> - validates dates
* converting to a <code>java.util.Calendar</code> type.</li>
* <li><a href="TimeValidator.html">Time Validator</a> - validates times
* converting to a <code>java.util.Calendar</code> type.</li>
* </ul>
* <a id="date.validate"></a>
* <h2>2.2 Validating a Date Value</h2>
* <p>
* You can either use one of the <code>isValid()</code> methods to just determine
* if a date is valid, or use one of the <code>validate()</code> methods to
* validate a date and convert it to a <code>java.util.Date</code>...
* </p>
* <pre>
* // Get the Date validator
* DateValidator validator = DateValidator.getInstance();
* // Validate/Convert the date
* Date fooDate = validator.validate(fooString, "dd/MM/yyyy");
* if (fooDate == null) {
* // error...not a valid date
* return;
* }
* </pre>
* <p>The following methods are provided to validate a date/time (return a boolean result):
* </p>
* <ul>
* <li><code>isValid(<i>value</i>)</code></li>
* <li><code>isValid(<i>value</i>, <i>pattern</i>)</code></li>
* <li><code>isValid(<i>value</i>, Locale)</code></li>
* <li><code>isValid(<i>value</i>, <i>pattern</i>, Locale)</code></li>
* </ul>
* <p>The following methods are provided to validate a date/time and convert it to either a
* <code>java.util.Date</code> or <code>java.util.Calendar</code>:
* </p>
* <ul>
* <li><code>validate(<i>value</i>)</code></li>
* <li><code>validate(<i>value</i>, <i>pattern</i>)</code></li>
* <li><code>validate(<i>value</i>, Locale)</code></li>
* <li><code>validate(<i>value</i>, <i>pattern</i>, Locale)</code></li>
* </ul>
* <a id="date.format"></a>
* <h2>2.3 Formatting</h2>
* <p>
* Formatting and validating are two sides of the same coin. Typically
* <i>input</i> values which are converted from Strings according to a
* specified <i>format</i> also have to be rendered for <i>output</i> in
* the same format. These validators provide the mechanism for formatting from
* date/time objects to Strings. The following methods are provided to format
* date/time values as Strings:
* </p>
* <ul>
* <li><code>format(<i>date/calendar</i>)</code></li>
* <li><code>format(<i>date/calendar</i>, <i>pattern</i>)</code></li>
* <li><code>format(<i>date/calendar</i>, Locale)</code></li>
* <li><code>format(<i>date/calendar</i>, <i>pattern</i>, Locale)</code></li>
* </ul>
* <a id="date.timezone"></a>
* <h2>2.4 Time Zones</h2>
* <p>
* If the date being parsed relates to a different time zone than the
* system default, you can specify the <code>TimeZone</code> to use when
* validating/converting:
* </p>
* <pre>
* // Get the GMT time zone
* TimeZone GMT = TimeZone.getInstance("GMT");
* // Validate/Convert the date using GMT
* Date fooDate = validator.validate(fooString, "dd/MM/yyyy", GMT);
* </pre>
* <p>The following Time Zone <i>flavors</i> of the Validation/Conversion methods
* are provided:</p>
* <ul>
* <li><code>validate(<i>value</i>, TimeZone)</code></li>
* <li><code>validate(<i>value</i>, <i>pattern</i>, TimeZone)</code></li>
* <li><code>validate(<i>value</i>, Locale, TimeZone)</code></li>
* <li><code>validate(<i>value</i>, <i>pattern</i>, Locale, TimeZone)</code></li>
* </ul>
* <a id="date.compare"></a>
* <h2>2.5 Comparing Dates and Times</h2>
* <p>
* As well as validating that a value is a valid date or time, these validators
* also provide <i>date comparison</i> functions. The <code>DateValidator</code>
* and <code>CalendarValidator</code> provide functions for comparing years,
* quarters, months, weeks and dates and the <code>TimeValidator</code> provides
* functions for comparing hours, minutes, seconds and milliseconds.
* For example, to check that a date is in the current month, you could use
* the <code>compareMonths()</code> method, which compares the year and month
* components of a date:
* </p>
* <pre>
* // Check if the date is in the current month
* int compare = validator.compareMonths(fooDate, new Date(), null);
* if (compare == 0) {
* // do current month processing
* return;
* }
* // Check if the date is in the previous quarter
* compare = validator.compareQuarters(fooDate, new Date(), null);
* if (compare < 0) {
* // do previous quarter processing
* return;
* }
* // Check if the date is in the next year
* compare = validator.compareYears(fooDate, new Date(), null);
* if (compare > 0) {
* // do next year processing
* return;
* }
* </pre>
* <a id="numeric"></a>
* <h2>3 Numeric Validators</h2>
* <a id="numeric.overview"></a>
* <h2>3.1 Overview</h2>
* <p>
* The numeric validators either validate according to a specified <i>format</i>
* or use a standard <i>format</i> for a specified <code>Locale</code> or use
* a <i>custom</i> format for a specified <code>Locale</code>.
* </p>
* <ul>
* <li><a href="ByteValidator.html">Byte Validator</a> - validates numbers
* converting to a <code>java.lang.Byte</code> type.</li>
* <li><a href="ShortValidator.html">Short Validator</a> - validates numbers
* converting to a <code>java.lang.Short</code> type.</li>
* <li><a href="IntegerValidator.html">Integer Validator</a> - validates numbers
* converting to a <code>java.lang.Integer</code> type.</li>
* <li><a href="LongValidator.html">Long Validator</a> - validates numbers
* converting to a <code>java.lang.Long</code> type.</li>
* <li><a href="FloatValidator.html">Float Validator</a> - validates numbers
* converting to a <code>java.lang.Float</code> type.</li>
* <li><a href="DoubleValidator.html">Double Validator</a> - validates numbers
* converting to a <code>java.lang.Double</code> type.</li>
* <li><a href="BigIntegerValidator.html">BigInteger Validator</a> - validates numbers
* converting to a <code>java.math.BigInteger</code> type.</li>
* <li><a href="BigDecimalValidator.html">BigDecimal Validator</a> - validates numbers
* converting to a <code>java.math.BigDecimal</code> type.</li>
* </ul>
* <a id="numeric.validate"></a>
* <h2>3.2 Validating a Numeric Value</h2>
* <p>
* You can either use one of the <code>isValid()</code> methods to just determine
* if a number is valid, or use one of the <code>validate()</code> methods to
* validate a number and convert it to an appropriate type.
* </p>
* <p>
* The following example validates an integer against a custom pattern
* for the <i>German</i> locale. Please note the format is specified using
* the standard symbols for <code>java.text.DecimalFormat</code> so although
* the decimal separator is indicated as a period (".") in the format, the
* validator will check using the German decimal separator - which is a comma (",").
* </p>
* <pre>
* // Get the Integer validator
* IntegerValidator validator = IntegerValidator.getInstance();
* // Validate/Convert the number
* Integer fooInteger = validator.validate(fooString, "#,##0.00", Locale.GERMAN);
* if (fooInteger == null) {
* // error...not a valid Integer
* return;
* }
* </pre>
* <p>The following methods are provided to validate a number (return a boolean result):</p>
* <ul>
* <li><code>isValid(<i>value</i>)</code></li>
* <li><code>isValid(<i>value</i>, <i>pattern</i>)</code></li>
* <li><code>isValid(<i>value</i>, Locale)</code></li>
* <li><code>isValid(<i>value</i>, <i>pattern</i>, Locale)</code></li>
* </ul>
* <p>The following methods are provided to validate a number and convert it one of
* the <code>java.lang.Number</code> implementations:</p>
* <ul>
* <li><code>validate(<i>value</i>)</code></li>
* <li><code>validate(<i>value</i>, <i>pattern</i>)</code></li>
* <li><code>validate(<i>value</i>, Locale)</code></li>
* <li><code>validate(<i>value</i>, <i>pattern</i>, Locale)</code></li>
* </ul>
* <a id="numeric.format"></a>
* <h2>3.3 Formatting</h2>
* <p>
* Formatting and validating are two sides of the same coin. Typically
* <i>input</i> values which are converted from Strings according to a
* specified <i>format</i> also have to be rendered for <i>output</i> in
* the same format. These validators provide the mechanism for formatting from
* numeric objects to Strings. The following methods are provided to format
* numeric values as Strings:
* </p>
* <ul>
* <li><code>format(<i>number</i>)</code></li>
* <li><code>format(<i>number</i>, <i>pattern</i>)</code></li>
* <li><code>format(<i>number</i>, Locale)</code></li>
* <li><code>format(<i>number</i>, <i>pattern</i>, Locale)</code></li>
* </ul>
* <a id="numeric.compare"></a>
* <h2>3.4 Comparing Numbers</h2>
* <p>
* As well as validating that a value is a valid number, these validators
* also provide functions for validating the <i>minimum</i>, <i>maximum</i>
* and <i>range</i> of a value.
* </p>
* <pre>
* // Check the number is between 25 and 75
* if (validator.isInRange(fooInteger, 25, 75) {
* // valid...in the specified range
* return;
* }
* </pre>
* <a id="numeric.currency"></a>
* <h2>3.5 Currency Validation</h2>
* <p>
* A default <a href="CurrencyValidator.html">Currency Validator</a>
* implementation is provided, although all the <i>numeric</i> validators
* support currency validation. The default implementation converts
* currency amounts to a <code>java.math.BigDecimal</code> and additionally
* it provides <i>lenient</i> currency symbol validation. That is, currency
* amounts are valid with <i>or</i> without the currency symbol.
* </p>
* <pre>
* BigDecimalValidator validator = CurrencyValidator.getInstance();
* BigDecimal fooAmount = validator.validate("$12,500.00", Locale.US);
* if (fooAmount == null) {
* // error...not a valid currency amount
* return;
* }
* // Check the amount is a minimum of $1,000
* if (validator.minValue(fooAmount, 1000) {
* // valid...in the specified range
* return;
* }
* </pre>
* <p>
* If, for example, you want to use the <a href="IntegerValidator.html">Integer
* Validator</a> to validate a currency, then you can simply create a
* new instance with the appropriate <i>format style</i>. Note that
* the other validators do not support the <i>lenient</i> currency symbol
* validation.
* </p>
* <pre>
* IntegerValidator validator =
* new IntegerValidator(true, IntegerValidator.CURRENCY_FORMAT);
* String pattern = "#,###" + '\u00A4' + '\u00A4'; // Use international symbol
* Integer fooAmount = validator.validate("10.100EUR", pattern, Locale.GERMAN);
* if (fooAmount == null) {
* // error...not a valid currency amount
* return;
* }
* </pre>
* <a id="numeric.percent"></a>
* <h2>3.6 Percent Validation</h2>
* <p>
* A default <a href="PercentValidator.html">Percent Validator</a>
* implementation is provided, although the <i>Float</i>,
* <i>Double</i> and <i>BigDecimal</i> validators also support
* percent validation. The default implementation converts
* percent amounts to a <code>java.math.BigDecimal</code> and additionally
* it provides <i>lenient</i> percent symbol validation. That is, percent
* amounts are valid with <i>or</i> without the percent symbol.
* </p>
* <pre>
* BigDecimalValidator validator = PercentValidator.getInstance();
* BigDecimal fooPercent = validator.validate("20%", Locale.US);
* if (fooPercent == null) {
* // error...not a valid percent
* return;
* }
* // Check the percent is between 10% and 90%
* if (validator.isInRange(fooPercent, 0.1, 0.9) {
* // valid...in the specified range
* return;
* }
* </pre>
* <p>
* If, for example, you want to use the <a href="FloatValidator.html">Float
* Validator</a> to validate a percent, then you can simply create a
* new instance with the appropriate <i>format style</i>. Note that
* the other validators do not support the <i>lenient</i> percent symbol
* validation.
* </p>
* <pre>
* FloatValidator validator =
* new FloatValidator(true, FloatValidator.PERCENT_FORMAT);
* Float fooPercent = validator.validate("20%", "###%");
* if (fooPercent == null) {
* // error...not a valid percent
* return;
* }
* </pre>
* <p>
* <strong>Note</strong>: in theory the other numeric validators besides
* <i>Float</i>, <i>Double</i> and <i>BigDecimal</i> (i.e. <i>Byte</i>,
* <i>Short</i>, <i>Integer</i>, <i>Long</i> and <i>BigInteger</i>)
* also support percent validation. However, since they don't allow fractions
* they will only work with percentages greater than 100%.
* </p>
* <a id="other"></a>
* <h2>4. Other Validators</h2>
* <a id="other.overview"></a>
* <h2>4.1 Overview</h2>
* <p>
* This section lists other available validators.
* </p>
* <ul>
* <li><a href="#other.regex">Regular Expressions</a> - validates
* using Java 1.4+ regular expression support</li>
* <li><a href="#other.checkdigit">Check Digit</a> - validates/calculates
* check digits (i.e. EAN/UPC, credit card, ISBN).</li>
* <li><a href="#other.code">Code Validation</a> - provides generic
* code validation - format, minimum/maximum length and check digit.</li>
* <li><a href="#other.isbn">ISBN Validation</a> - provides ISBN-10
* and ISBN-13 validation.</li>
* <li><a href="#other.inet">IP Address Validation</a> - provides IPv4 address
* validation.</li>
* <li><a href="#other.email">Email Address Validation</a> - provides email
* address validation according to RFC 822 standards.</li>
* <li><a href="#other.url">URL Validation</a> - provides URL validation on
* scheme, domain, and authority.</li>
* <li><a href="#other.domain">Domain Name Validation</a> - provides domain
* name and IANA TLD validation.</li>
* </ul>
* <a id="other.regex"></a>
* <h2>4.2 Regular Expression Validation</h2>
* <p>
* Regular expression validation can be done either by using the <i>static</i>
* methods provied by <a href="RegexValidator.html">RegexValidator</a> or
* by creating a new instance, which caches and re-uses compiled Patterns.
* </p>
* <ul>
* <li><b>Method Flavours</b> - three <i>flavors</i> of validation metods are provided:</li>
* <li>
* <ul>
* <li><code>isValid()</code> methods return true/false to indicate
* whether validation was successful.</li>
* <li><code>validate()</code> methods return a <code>String</code>
* value of the matched <i>groups</i> aggregated together or
* <code>null</code> if invalid.</li>
* <li><code>match()</code> methods return a <code>String</code> array
* of the matched <i>groups</i> or <code>null</code> if invalid.</li>
* </ul>
* </li>
* <li><b>Case Sensitivity</b> - matching can be done in either a <i>case
* sensitive</i> or <i>case in-sensitive</i> way.</li>
* <li><b>Multiple Expressions</b> - instances of the
* <a href="RegexValidator.html">RegexValidator</a>
* can be created to either match against a single regular expression
* or set (String array) of regular expressions.</li>
* </ul>
* <p>
* Below is an example of using one of the static methods to validate,
* matching in a <i>case insensitive</i> manner and returning a String
* of the matched groups (which doesn't include the hyphen).
* </p>
* <pre>
* // set up the parameters
* boolean caseSensitive = false;
* String regex = "^([A-Z]*)(?:\\-)([A-Z]*)$";
* // validate - result should be a String of value "abcdef"
* String result = RegexValidator.validate("abc-def", regex, caseSensitive);
* </pre>
* <p>The following static methods are provided for regular expression validation:
* </p>
* <ul>
* <li><code>isValid(<i>value</i>, <i>regex</i>)</code></li>
* <li><code>isValid(<i>value</i>, <i>regex</i>, <i>caseSensitive</i>)</code></li>
* <li><code>validate(<i>value</i>, <i>regex</i>)</code></li>
* <li><code>validate(<i>value</i>, <i>regex</i>, <i>caseSensitive</i>)</code></li>
* <li><code>match(<i>value</i>, <i>regex</i>)</code></li>
* <li><code>match(<i>value</i>, <i>regex</i>, <i>caseSensitive</i>)</code></li>
* </ul>
* <p>
* Below is an example of creating an instance of
* <a href="RegexValidator.html">RegexValidator</a> matching in a <i>case insensitive</i>
* manner against a set of regular expressions:
* </p>
* <pre>
* // set up the parameters
* boolean caseSensitive = false;
* String regex1 = "^([A-Z]*)(?:\\-)([A-Z]*)*$"
* String regex2 = "^([A-Z]*)$";
* String[] regexs = new String[] {regex1, regex1};
* // Create the validator
* RegexValidator validator = new RegexValidator(regexs, caseSensitive);
* // Validate true/false
* boolean valid = validator.isValid("abc-def");
* // Validate and return a String
* String result = validator.validate("abc-def");
* // Validate and return a String[]
* String[] groups = validator.match("abc-def");
* </pre>
* <p>See the
* <a href="RegexValidator.html">RegexValidator</a> javadoc for a full list
* of the available constructors.
* </p>
* <a id="other.checkdigit"></a>
* <h2>4.3 Check Digit validation/calculation</h2>
* <p>
* <a href="checkdigit/CheckDigit.html">CheckDigit</a> defines a new
* type for the calculation and validation of check digits with the
* following methods:
* </p>
* <ul>
* <li><code>isValid(<i>code</i>)</code> - validates the check digit of a code,
* returning {@code true} or {@code false}.</li>
* <li><code>calculate(<i>code</i>)</code> - calulates the check digit for a code
* returning the check digit character.</li>
* </ul>
* <p>
* The following implementations are provided:
* </p>
* <ul>
* <li><a href="checkdigit/ABANumberheckDigit.html">ABANumberCheckDigit</a>
* for <b>ABA Number</b> (or <b>Routing Transit Number</b> (RTN)) check digit calculation.</li>
* <li><a href="checkdigit/CUSIPCheckDigit.html">CUSIPCheckDigit</a>
* for <b>CUSIP</b> (North American Securities) check digit calculation.</li>
* <li><a href="checkdigit/EAN13CheckDigit.html">EAN13CheckDigit</a>
* for <b>EAN-13</b>, <b>UPC</b>, <b>ISBN-13</b> check digit calculation.</li>
* <li><a href="checkdigit/ISBNCheckDigit.html">ISBNCheckDigit</a>
* for <b>ISBN-10</b> and <b>ISBN-13</b> check digit calculation.</li>
* <li><a href="checkdigit/ISBN10CheckDigit.html">ISBN10CheckDigit</a>
* for <b>ISBN-10</b> check digit calculation.</li>
* <li><a href="checkdigit/ISINCheckDigit.html">ISINCheckDigit</a>
* for <b>ISIN</b> International Securities Identifying Number check digit calculation.</li>
* <li><a href="checkdigit/LuhnCheckDigit.html">LuhnCheckDigit</a>
* for <b>Luhn</b> check digit calculation - used by <b>credit cards</b>.</li>
* <li><a href="checkdigit/ModulusCheckDigit.html">ModulusCheckDigit</a>
* - <b>abstract</b> class for custom <b>modulus</b> check digit
* implementations.</li>
* <li><a href="checkdigit/SedolCheckDigit.html">SedolCheckDigit</a>
* for <b>SEDOL</b> (UK Securities) check digit calculation.</li>
* <li><a href="checkdigit/VerhoeffCheckDigit.html">VerhoeffCheckDigit</a>
* for <b>Verhoeff</b> (Dihedral) check digit calculation.</li>
* </ul>
* <p>
* The following examples show validating the check digit of a code:
* </p>
* <pre>
* // Luhn check digit validation
* boolean valid = LuhnCheckDigit.INSTANCE.isValid(code);
* // EAN / UPC / ISBN-13 check digit validation
* boolean valid = EAN13CheckDigit.INSTANCE.isValid(code);
* // ISBN-10 check digit validation
* boolean valid = ISBNCheckDigit.ISBN10.isValid(code);
* boolean valid = ISBN10CheckDigit.INSTANCE.isValid(code);
* // ISBN-13 check digit validation
* boolean valid = ISBNCheckDigit.ISBN13.isValid(code);
* // ISBN-10 or ISBN-13 check digit validation
* boolean valid = ISBNCheckDigit.ISBN.isValid(code);
* </pre>
* <p>
* The following examples show calulating the check digit of a code:
* </p>
* <pre>
* // Luhn check digit validation
* char checkdigit = LuhnCheckDigit.INSTANCE.calculate(code);
* // EAN / UPC / ISBN-13 check digit validation
* char checkdigit = EAN13CheckDigit.INSTANCE.calculate(code);
* // ISBN-10 check digit validation
* char checkdigit = ISBNCheckDigit.ISBN10.isValid(code);
* char checkdigit = ISBN10CheckDigit.INSTANCE.calculate(code);
* // ISBN-13 check digit validation
* char checkdigit = ISBNCheckDigit.ISBN13.calculate(code);
* // ISBN-10 or ISBN-13 check digit validation
* char checkdigit = ISBNCheckDigit.ISBN.calculate(code);
* </pre>
* <a id="other.code"></a>
* <h2>4.4 General Code validation</h2>
* <p>
* <a href="CodeValidator.html">CodeValidator</a> provides a generic
* implementation for validating codes. It performs the following
* validations on a code:
* </p>
* <ul>
* <li><b>Format</b> - the format of the code is validated using
* a <i>regular expression</i> (see <a href="RegexValidator.html">RegexValidator</a>).</li>
* <li><b>Length</b> - the minimum/maximum length of the code is
* checked - after being parsed by the regular expression - with which
* <i>format</i> characters can be removed with the use of
* <i>non-capturing</i> groups.</li>
* <li><b>Check Digit</b> - a <a href="checkdigit/CheckDigit.html">CheckDigit</a>
* routine checks that code's check digit is valid.</li>
* </ul>
* <p>
* For example to create a validator to validate EAN-13 codes (numeric,
* with a length of 13):
* </p>
* <pre>
* // Create an EAN-13 code validator
* CodeValidator validator = new CodeValidator("^[0-9]*$", 13, EAN13CheckDigit.INSTANCE);
* // Validate an EAN-13 code
* if (!validator.isValid(code)) {
* ... // invalid
* }
* </pre>
* <a id="other.isbn"></a>
* <h2>4.5 ISBN validation</h2>
* <p>
* <a href="ISBNValidator.html">ISBNValidator</a> provides ISBN-10
* and ISBN-13 validation and can <i>optionally</i> convert
* ISBN-10 codes to ISBN-13.
* </p>
* <ul>
* <li><b>ISBN-10</b> - validates using a
* <a href="CodeValidator.html">CodeValidator</a> with the
* <a href="checkdigit/ISBN10CheckDigit.html">ISBN10CheckDigit</a>
* routine.</li>
* <li>
* <ul>
* <li><code>isValidISBN10(<i>value</i>)</code> - returns a boolean</li>
* <li><code>validateISBN10(<i>value</i>)</code> - returns a reformatted ISBN-10 code</li>
* </ul>
* </li>
* <li><b>ISBN-13</b> - validates using a
* <a href="CodeValidator.html">CodeValidator</a> with the
* <a href="checkdigit/EAN13CheckDigit.html">EAN13CheckDigit</a>
* routine.</li>
* <li>
* <ul>
* <li><code>isValidISBN13(<i>value</i>)</code> - returns a boolean</li>
* <li><code>validateISBN13(<i>value</i>)</code> - returns a reformatted ISBN-13 code</li>
* </ul>
* </li>
* <li><b>ISBN-10</b> and <b>ISBN-13</b> - validates codes are either
* valid ISBN-10 or valid ISBN-13 - optionally can convert ISBN-10 codes to ISBN-13.</li>
* <li>
* <ul>
* <li><code>isValid(<i>value</i>)</code> - returns a boolean</li>
* <li><code>validate(<i>value</i>)</code> - returns a reformatted ISBN code
* (converts ISBN-10 to ISBN-13 if the <i>convert</i> option is {@code true}).</li>
* </ul>
* </li>
* </ul>
* <p>
* For example to validate
* </p>
* <pre>
* // Validate an ISBN-10 or ISBN-13 code
* if (!ISBNValidator.getInstance().isValid(code)) {
* ... // invalid
* }
* // Validate an ISBN-10 or ISBN-13 code (converting to ISBN-13)
* String code = ISBNValidator.getInstance().validate(code);
* // Validate an ISBN-10 or ISBN-13 code (not converting)
* String code = ISBNValidator.getInstance(false).validate(code);
* </pre>
* <a id="other.inet"></a>
* <h2>4.6 IP Address Validation</h2>
* <p>
* <a href="InetAddressValidator.html">InetAddressValidator</a> provides
* IPv4 address validation.
* </p>
* <p>
* For example:
* </p>
* <pre>
* // Get an InetAddressValidator
* InetAddressValidator validator = InetAddressValidator.getInstance();
* // Validate an IPv4 address
* if (!validator.isValid(candidateInetAddress)) {
* ... // invalid
* }
* </pre>
* <a id="other.email"></a>
* <h2>4.7 Email Address Validation</h2>
* <p>
* <a href="EmailValidator.html">EmailValidator</a> provides email address
* validation according to RFC 822 standards.
* </p>
* <p>
* For example:
* </p>
* <pre>
* // Get an EmailValidator
* EmailValidator validator = EmailValidator.getInstance();
* // Validate an email address
* boolean isAddressValid = validator.isValid("user@apache.org");
* // Validate a variable containing an email address
* if (!validator.isValid(addressFromUserForm)) {
* webController.sendRedirect(ERROR_REDIRECT, "Email address isn't valid");
* // etc.
* }
* </pre>
* <a id="other.url"></a>
* <h2>4.8 URL Validation</h2>
* <p>
* <a href="UrlValidator.html">UrlValidator</a> provides URL validation by
* checking the scheme, authority, path, query, and fragment in turn. Clients
* may specify valid schemes to be used in validating in addition to or instead of
* the default values (HTTP, HTTPS, FTP). The UrlValidator also supports options
* that change the parsing rules; for example, the ALLOW_2_SLASHES option instructs
* the Validator to allow consecutive slash characters in the path component, which
* is considered an error by default.
* For more information on the available options, see the UrlValidator documentation.
* </p>
* <p>
* For example:
* </p>
* <pre>
* // Get an UrlValidator
* UrlValidator defaultValidator = new UrlValidator(); // default schemes
* if (defaultValidator.isValid("http://www.apache.org")) {
* ... // valid
* }
* if (!defaultValidator.isValid("http//www.oops.com")) {
* ... // invalid
* }
* // Get an UrlValidator with custom schemes
* String[] customSchemes = { "sftp", "scp", "https" };
* UrlValidator customValidator = new UrlValidator(customSchemes);
* if (!customValidator.isValid("http://www.apache.org")) {
* ... // invalid due to insecure protocol
* }
* // Get an UrlValidator that allows double slashes in the path
* UrlValidator doubleSlashValidator = new UrlValidator(UrlValidator.ALLOW_2_SLASHES);
* if (doubleSlashValidator.isValid("http://www.apache.org//projects")) {
* ... // valid only in this Validator instance
* }
* </pre>
* <a id="other.domain"></a>
* <h2>4.9 Domain Name Validation</h2>
* <p>
* <a href="DomainValidator.html">DomainValidator</a> provides validation of Internet
* domain names as specified by RFC1034/RFC1123 and according to the IANA-recognized
* list of top-level domains (TLDs). Clients may validate an entire domain name, a
* TLD of any category, or a TLD within a specific category.
* </p>
* <p>
* For example:
* </p>
* <pre>
* // Get a DomainValidator
* DomainValidator validator = DomainValidator.getInstance();
* // Validate a domain name
* if (validator.isValid("www.apache.org")) {
* ... // valid
* }
* if (!validator.isValid("www.apache.wrong")) {
* ... // invalid
* }
* // Validate a TLD
* if (validator.isValidTld(".com")) {
* ... // valid
* }
* if (validator.isValidTld("org")) {
* ... // valid, the leading dot is optional
* }
* if (validator.isValidTld(".us")) {
* ... // valid, country code TLDs are also accepted
* }
* // Validate TLDs in categories
* if (validator.isValidGenericTld(".name")) {
* ... // valid
* }
* if (!validator.isValidGenericTld(".uk")) {
* ... // invalid, .uk is a country code TLD
* }
* if (!validator.isValidCountryCodeTld(".info")) {
* ... // invalid, .info is a generic TLD
* }
* </pre>
*/
package org.apache.commons.validator.routines; | 8,004 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/LongValidator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines;
import java.text.Format;
import java.util.Locale;
/**
* <p><b>Long Validation</b> and Conversion routines (<code>java.lang.Long</code>).</p>
*
* <p>This validator provides a number of methods for
* validating/converting a <code>String</code> value to
* a <code>Long</code> using <code>java.text.NumberFormat</code>
* to parse either:</p>
* <ul>
* <li>using the default format for the default <code>Locale</code></li>
* <li>using a specified pattern with the default <code>Locale</code></li>
* <li>using the default format for a specified <code>Locale</code></li>
* <li>using a specified pattern with a specified <code>Locale</code></li>
* </ul>
*
* <p>Use one of the <code>isValid()</code> methods to just validate or
* one of the <code>validate()</code> methods to validate and receive a
* <i>converted</i> <code>Long</code> value.</p>
*
* <p>Once a value has been successfully converted the following
* methods can be used to perform minimum, maximum and range checks:</p>
* <ul>
* <li><code>minValue()</code> checks whether the value is greater
* than or equal to a specified minimum.</li>
* <li><code>maxValue()</code> checks whether the value is less
* than or equal to a specified maximum.</li>
* <li><code>isInRange()</code> checks whether the value is within
* a specified range of values.</li>
* </ul>
*
* <p>So that the same mechanism used for parsing an <i>input</i> value
* for validation can be used to format <i>output</i>, corresponding
* <code>format()</code> methods are also provided. That is you can
* format either:</p>
* <ul>
* <li>using a specified pattern</li>
* <li>using the format for a specified <code>Locale</code></li>
* <li>using the format for the <i>default</i> <code>Locale</code></li>
* </ul>
*
* @since 1.3.0
*/
public class LongValidator extends AbstractNumberValidator {
private static final long serialVersionUID = -5117231731027866098L;
private static final LongValidator VALIDATOR = new LongValidator();
/**
* Return a singleton instance of this validator.
* @return A singleton instance of the LongValidator.
*/
public static LongValidator getInstance() {
return VALIDATOR;
}
/**
* Constructs a <i>strict</i> instance.
*/
public LongValidator() {
this(true, STANDARD_FORMAT);
}
/**
* <p>Construct an instance with the specified strict setting
* and format type.</p>
*
* <p>The <code>formatType</code> specified what type of
* <code>NumberFormat</code> is created - valid types
* are:</p>
* <ul>
* <li>AbstractNumberValidator.STANDARD_FORMAT -to create
* <i>standard</i> number formats (the default).</li>
* <li>AbstractNumberValidator.CURRENCY_FORMAT -to create
* <i>currency</i> number formats.</li>
* <li>AbstractNumberValidator.PERCENT_FORMAT -to create
* <i>percent</i> number formats (the default).</li>
* </ul>
*
* @param strict {@code true} if strict
* <code>Format</code> parsing should be used.
* @param formatType The <code>NumberFormat</code> type to
* create for validation, default is STANDARD_FORMAT.
*/
public LongValidator(final boolean strict, final int formatType) {
super(strict, formatType, false);
}
/**
* <p>Validate/convert a <code>Long</code> using the default
* <code>Locale</code>.
*
* @param value The value validation is being performed on.
* @return The parsed <code>Long</code> if valid or <code>null</code>
* if invalid.
*/
public Long validate(final String value) {
return (Long)parse(value, (String)null, (Locale)null);
}
/**
* <p>Validate/convert a <code>Long</code> using the
* specified <i>pattern</i>.
*
* @param value The value validation is being performed on.
* @param pattern The pattern used to validate the value against.
* @return The parsed <code>Long</code> if valid or <code>null</code> if invalid.
*/
public Long validate(final String value, final String pattern) {
return (Long)parse(value, pattern, (Locale)null);
}
/**
* <p>Validate/convert a <code>Long</code> using the
* specified <code>Locale</code>.
*
* @param value The value validation is being performed on.
* @param locale The locale to use for the number format, system default if null.
* @return The parsed <code>Long</code> if valid or <code>null</code> if invalid.
*/
public Long validate(final String value, final Locale locale) {
return (Long)parse(value, (String)null, locale);
}
/**
* <p>Validate/convert a <code>Long</code> using the
* specified pattern and/ or <code>Locale</code>.
*
* @param value The value validation is being performed on.
* @param pattern The pattern used to validate the value against, or the
* default for the <code>Locale</code> if <code>null</code>.
* @param locale The locale to use for the date format, system default if null.
* @return The parsed <code>Long</code> if valid or <code>null</code> if invalid.
*/
public Long validate(final String value, final String pattern, final Locale locale) {
return (Long)parse(value, pattern, locale);
}
/**
* Check if the value is within a specified range.
*
* @param value The <code>Number</code> value to check.
* @param min The minimum value of the range.
* @param max The maximum value of the range.
* @return {@code true} if the value is within the
* specified range.
*/
public boolean isInRange(final long value, final long min, final long max) {
return value >= min && value <= max;
}
/**
* Check if the value is within a specified range.
*
* @param value The <code>Number</code> value to check.
* @param min The minimum value of the range.
* @param max The maximum value of the range.
* @return {@code true} if the value is within the
* specified range.
*/
public boolean isInRange(final Long value, final long min, final long max) {
return isInRange(value.longValue(), min, max);
}
/**
* Check if the value is greater than or equal to a minimum.
*
* @param value The value validation is being performed on.
* @param min The minimum value.
* @return {@code true} if the value is greater than
* or equal to the minimum.
*/
public boolean minValue(final long value, final long min) {
return value >= min;
}
/**
* Check if the value is greater than or equal to a minimum.
*
* @param value The value validation is being performed on.
* @param min The minimum value.
* @return {@code true} if the value is greater than
* or equal to the minimum.
*/
public boolean minValue(final Long value, final long min) {
return minValue(value.longValue(), min);
}
/**
* Check if the value is less than or equal to a maximum.
*
* @param value The value validation is being performed on.
* @param max The maximum value.
* @return {@code true} if the value is less than
* or equal to the maximum.
*/
public boolean maxValue(final long value, final long max) {
return value <= max;
}
/**
* Check if the value is less than or equal to a maximum.
*
* @param value The value validation is being performed on.
* @param max The maximum value.
* @return {@code true} if the value is less than
* or equal to the maximum.
*/
public boolean maxValue(final Long value, final long max) {
return maxValue(value.longValue(), max);
}
/**
* Convert the parsed value to a <code>Long</code>.
*
* @param value The parsed <code>Number</code> object created.
* @param formatter The Format used to parse the value with.
* @return The parsed <code>Number</code> converted to a
* <code>Long</code>.
*/
@Override
protected Object processParsedValue(final Object value, final Format formatter) {
// Parsed value will be Long if it fits in a long and is not fractional
if (value instanceof Long) {
return value;
}
return null;
}
}
| 8,005 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/ShortValidator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines;
import java.text.Format;
import java.util.Locale;
/**
* <p><b>Short Validation</b> and Conversion routines (<code>java.lang.Short</code>).</p>
*
* <p>This validator provides a number of methods for
* validating/converting a <code>String</code> value to
* a <code>Short</code> using <code>java.text.NumberFormat</code>
* to parse either:</p>
* <ul>
* <li>using the default format for the default <code>Locale</code></li>
* <li>using a specified pattern with the default <code>Locale</code></li>
* <li>using the default format for a specified <code>Locale</code></li>
* <li>using a specified pattern with a specified <code>Locale</code></li>
* </ul>
*
* <p>Use one of the <code>isValid()</code> methods to just validate or
* one of the <code>validate()</code> methods to validate and receive a
* <i>converted</i> <code>Short</code> value.</p>
*
* <p>Once a value has been successfully converted the following
* methods can be used to perform minimum, maximum and range checks:</p>
* <ul>
* <li><code>minValue()</code> checks whether the value is greater
* than or equal to a specified minimum.</li>
* <li><code>maxValue()</code> checks whether the value is less
* than or equal to a specified maximum.</li>
* <li><code>isInRange()</code> checks whether the value is within
* a specified range of values.</li>
* </ul>
*
* <p>So that the same mechanism used for parsing an <i>input</i> value
* for validation can be used to format <i>output</i>, corresponding
* <code>format()</code> methods are also provided. That is you can
* format either:</p>
* <ul>
* <li>using the default format for the default <code>Locale</code></li>
* <li>using a specified pattern with the default <code>Locale</code></li>
* <li>using the default format for a specified <code>Locale</code></li>
* <li>using a specified pattern with a specified <code>Locale</code></li>
* </ul>
*
* @since 1.3.0
*/
public class ShortValidator extends AbstractNumberValidator {
private static final long serialVersionUID = -5227510699747787066L;
private static final ShortValidator VALIDATOR = new ShortValidator();
/**
* Return a singleton instance of this validator.
* @return A singleton instance of the ShortValidator.
*/
public static ShortValidator getInstance() {
return VALIDATOR;
}
/**
* Constructs a <i>strict</i> instance.
*/
public ShortValidator() {
this(true, STANDARD_FORMAT);
}
/**
* <p>Construct an instance with the specified strict setting
* and format type.</p>
*
* <p>The <code>formatType</code> specified what type of
* <code>NumberFormat</code> is created - valid types
* are:</p>
* <ul>
* <li>AbstractNumberValidator.STANDARD_FORMAT -to create
* <i>standard</i> number formats (the default).</li>
* <li>AbstractNumberValidator.CURRENCY_FORMAT -to create
* <i>currency</i> number formats.</li>
* <li>AbstractNumberValidator.PERCENT_FORMAT -to create
* <i>percent</i> number formats (the default).</li>
* </ul>
*
* @param strict {@code true} if strict
* <code>Format</code> parsing should be used.
* @param formatType The <code>NumberFormat</code> type to
* create for validation, default is STANDARD_FORMAT.
*/
public ShortValidator(final boolean strict, final int formatType) {
super(strict, formatType, false);
}
/**
* <p>Validate/convert a <code>Short</code> using the default
* <code>Locale</code>.
*
* @param value The value validation is being performed on.
* @return The parsed <code>Short</code> if valid or <code>null</code>
* if invalid.
*/
public Short validate(final String value) {
return (Short)parse(value, (String)null, (Locale)null);
}
/**
* <p>Validate/convert a <code>Short</code> using the
* specified <i>pattern</i>.
*
* @param value The value validation is being performed on.
* @param pattern The pattern used to validate the value against.
* @return The parsed <code>Short</code> if valid or <code>null</code> if invalid.
*/
public Short validate(final String value, final String pattern) {
return (Short)parse(value, pattern, (Locale)null);
}
/**
* <p>Validate/convert a <code>Short</code> using the
* specified <code>Locale</code>.
*
* @param value The value validation is being performed on.
* @param locale The locale to use for the number format, system default if null.
* @return The parsed <code>Short</code> if valid or <code>null</code> if invalid.
*/
public Short validate(final String value, final Locale locale) {
return (Short)parse(value, (String)null, locale);
}
/**
* <p>Validate/convert a <code>Short</code> using the
* specified pattern and/ or <code>Locale</code>.
*
* @param value The value validation is being performed on.
* @param pattern The pattern used to validate the value against, or the
* default for the <code>Locale</code> if <code>null</code>.
* @param locale The locale to use for the date format, system default if null.
* @return The parsed <code>Short</code> if valid or <code>null</code> if invalid.
*/
public Short validate(final String value, final String pattern, final Locale locale) {
return (Short)parse(value, pattern, locale);
}
/**
* Check if the value is within a specified range.
*
* @param value The <code>Number</code> value to check.
* @param min The minimum value of the range.
* @param max The maximum value of the range.
* @return {@code true} if the value is within the
* specified range.
*/
public boolean isInRange(final short value, final short min, final short max) {
return value >= min && value <= max;
}
/**
* Check if the value is within a specified range.
*
* @param value The <code>Number</code> value to check.
* @param min The minimum value of the range.
* @param max The maximum value of the range.
* @return {@code true} if the value is within the
* specified range.
*/
public boolean isInRange(final Short value, final short min, final short max) {
return isInRange(value.shortValue(), min, max);
}
/**
* Check if the value is greater than or equal to a minimum.
*
* @param value The value validation is being performed on.
* @param min The minimum value.
* @return {@code true} if the value is greater than
* or equal to the minimum.
*/
public boolean minValue(final short value, final short min) {
return value >= min;
}
/**
* Check if the value is greater than or equal to a minimum.
*
* @param value The value validation is being performed on.
* @param min The minimum value.
* @return {@code true} if the value is greater than
* or equal to the minimum.
*/
public boolean minValue(final Short value, final short min) {
return minValue(value.shortValue(), min);
}
/**
* Check if the value is less than or equal to a maximum.
*
* @param value The value validation is being performed on.
* @param max The maximum value.
* @return {@code true} if the value is less than
* or equal to the maximum.
*/
public boolean maxValue(final short value, final short max) {
return value <= max;
}
/**
* Check if the value is less than or equal to a maximum.
*
* @param value The value validation is being performed on.
* @param max The maximum value.
* @return {@code true} if the value is less than
* or equal to the maximum.
*/
public boolean maxValue(final Short value, final short max) {
return maxValue(value.shortValue(), max);
}
/**
* <p>Perform further validation and convert the <code>Number</code> to
* a <code>Short</code>.</p>
*
* @param value The parsed <code>Number</code> object created.
* @param formatter The Format used to parse the value with.
* @return The parsed <code>Number</code> converted to a
* <code>Short</code> if valid or <code>null</code> if invalid.
*/
@Override
protected Object processParsedValue(final Object value, final Format formatter) {
final long longValue = ((Number)value).longValue();
if (longValue < Short.MIN_VALUE ||
longValue > Short.MAX_VALUE) {
return null;
}
return Short.valueOf((short)longValue);
}
}
| 8,006 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/ModulusTenCheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
import java.util.Arrays;
import org.apache.commons.validator.routines.CodeValidator;
/**
* General Modulus 10 Check Digit calculation/validation.
*
* <h2>How it Works</h2>
* <p>
* This implementation calculates/validates the check digit in the following
* way:
* <ul>
* <li>Converting each character to an integer value using
* <code>Character.getNumericValue(char)</code> - negative integer values from
* that method are invalid.</li>
* <li>Calculating a <i>weighted value</i> by multiplying the character's
* integer value by a <i>weighting factor</i>. The <i>weighting factor</i> is
* selected from the configured <code>postitionWeight</code> array based on its
* position. The <code>postitionWeight</code> values are used either
* left-to-right (when <code>useRightPos=false</code>) or right-to-left (when
* <code>useRightPos=true</code>).</li>
* <li>If <code>sumWeightedDigits=true</code>, the <i>weighted value</i> is
* re-calculated by summing its digits.</li>
* <li>The <i>weighted values</i> of each character are totalled.</li>
* <li>The total modulo 10 will be zero for a code with a valid Check Digit.</li>
* </ul>
* <h2>Limitations</h2>
* <p>
* This implementation has the following limitations:
* <ul>
* <li>It assumes the last character in the code is the Check Digit and
* validates that it is a numeric character.</li>
* <li>The only limitation on valid characters are those that
* <code>Character.getNumericValue(char)</code> returns a positive value. If,
* for example, the code should only contain numbers, this implementation does
* not check that.</li>
* <li>There are no checks on code length.</li>
* </ul>
* <p>
* <b>Note:</b> This implementation can be combined with the
* {@link CodeValidator} in order to ensure the length and characters are valid.
*
* <h2>Example Usage</h2>
* <p>
* This implementation was added after a number of Modulus 10 routines and these
* are shown re-implemented using this routine below:
*
* <p>
* <b>ABA Number</b> Check Digit Routine (equivalent of
* {@link ABANumberCheckDigit}). Weighting factors are <code>[1, 7, 3]</code>
* applied from right to left.
*
* <pre>
* CheckDigit routine = new ModulusTenCheckDigit(new int[] { 1, 7, 3 }, true);
* </pre>
*
* <p>
* <b>CUSIP</b> Check Digit Routine (equivalent of {@link CUSIPCheckDigit}).
* Weighting factors are <code>[1, 2]</code> applied from right to left and the
* digits of the <i>weighted value</i> are summed.
*
* <pre>
* CheckDigit routine = new ModulusTenCheckDigit(new int[] { 1, 2 }, true, true);
* </pre>
*
* <p>
* <b>EAN-13 / UPC</b> Check Digit Routine (equivalent of
* {@link EAN13CheckDigit}). Weighting factors are <code>[1, 3]</code> applied
* from right to left.
*
* <pre>
* CheckDigit routine = new ModulusTenCheckDigit(new int[] { 1, 3 }, true);
* </pre>
*
* <p>
* <b>Luhn</b> Check Digit Routine (equivalent of {@link LuhnCheckDigit}).
* Weighting factors are <code>[1, 2]</code> applied from right to left and the
* digits of the <i>weighted value</i> are summed.
*
* <pre>
* CheckDigit routine = new ModulusTenCheckDigit(new int[] { 1, 2 }, true, true);
* </pre>
*
* <p>
* <b>SEDOL</b> Check Digit Routine (equivalent of {@link SedolCheckDigit}).
* Weighting factors are <code>[1, 3, 1, 7, 3, 9, 1]</code> applied from left to
* right.
*
* <pre>
* CheckDigit routine = new ModulusTenCheckDigit(new int[] { 1, 3, 1, 7, 3, 9, 1 });
* </pre>
*
* @since 1.6
*/
public final class ModulusTenCheckDigit extends ModulusCheckDigit {
private static final long serialVersionUID = -3752929983453368497L;
private final int[] postitionWeight;
private final boolean useRightPos;
private final boolean sumWeightedDigits;
/**
* Constructs a modulus 10 Check Digit routine with the specified weighting
* from left to right.
*
* @param postitionWeight the weighted values to apply based on the
* character position
*/
public ModulusTenCheckDigit(final int[] postitionWeight) {
this(postitionWeight, false, false);
}
/**
* Constructs a modulus 10 Check Digit routine with the specified weighting,
* indicating whether its from the left or right.
*
* @param postitionWeight the weighted values to apply based on the
* character position
* @param useRightPos {@code true} if use positionWeights from right to
* left
*/
public ModulusTenCheckDigit(final int[] postitionWeight, final boolean useRightPos) {
this(postitionWeight, useRightPos, false);
}
/**
* Constructs a modulus 10 Check Digit routine with the specified weighting,
* indicating whether its from the left or right and whether the weighted
* digits should be summed.
*
* @param postitionWeight the weighted values to apply based on the
* character position
* @param useRightPos {@code true} if use positionWeights from right to
* left
* @param sumWeightedDigits {@code true} if sum the digits of the
* weighted value
*/
public ModulusTenCheckDigit(final int[] postitionWeight, final boolean useRightPos, final boolean sumWeightedDigits) {
super(10); // CHECKSTYLE IGNORE MagicNumber
this.postitionWeight = Arrays.copyOf(postitionWeight, postitionWeight.length);
this.useRightPos = useRightPos;
this.sumWeightedDigits = sumWeightedDigits;
}
/**
* Validate a modulus check digit for a code.
* <p>
* Note: assumes last digit is the check digit
*
* @param code The code to validate
* @return {@code true} if the check digit is valid, otherwise
* {@code false}
*/
@Override
public boolean isValid(final String code) {
if (code == null || code.isEmpty()) {
return false;
}
if (!Character.isDigit(code.charAt(code.length() - 1))) {
return false;
}
return super.isValid(code);
}
/**
* Convert a character at a specified position to an integer value.
* <p>
* <b>Note:</b> this implementation only handlers values that
* Character.getNumericValue(char) returns a non-negative number.
*
* @param character The character to convert
* @param leftPos The position of the character in the code, counting from
* left to right (for identifying the position in the string)
* @param rightPos The position of the character in the code, counting from
* right to left (not used here)
* @return The integer value of the character
* @throws CheckDigitException if Character.getNumericValue(char) returns a
* negative number
*/
@Override
protected int toInt(final char character, final int leftPos, final int rightPos) throws CheckDigitException {
final int num = Character.getNumericValue(character);
if (num < 0) {
throw new CheckDigitException("Invalid Character[" + leftPos + "] = '" + character + "'");
}
return num;
}
/**
* Calculates the <i>weighted</i> value of a character in the code at a
* specified position.
*
* @param charValue The numeric value of the character.
* @param leftPos The position of the character in the code, counting from
* left to right
* @param rightPos The position of the character in the code, counting from
* right to left
* @return The weighted value of the character.
*/
@Override
protected int weightedValue(final int charValue, final int leftPos, final int rightPos) {
final int pos = useRightPos ? rightPos : leftPos;
final int weight = postitionWeight[(pos - 1) % postitionWeight.length];
int weightedValue = charValue * weight;
if (sumWeightedDigits) {
weightedValue = ModulusCheckDigit.sumDigits(weightedValue);
}
return weightedValue;
}
/**
* Return a string representation of this implementation.
*
* @return a string representation
*/
@Override
public String toString() {
return getClass().getSimpleName() + "[postitionWeight=" + Arrays.toString(postitionWeight) + ", useRightPos="
+ useRightPos + ", sumWeightedDigits=" + sumWeightedDigits + "]";
}
}
| 8,007 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/CUSIPCheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
/**
* Modulus 10 <b>CUSIP</b> (North American Securities) Check Digit calculation/validation.
*
* <p>
* CUSIP Numbers are 9 character alphanumeric codes used
* to identify North American Securities.
* </p>
*
* <p>
* Check digit calculation uses the <i>Modulus 10 Double Add Double</i> technique
* with every second digit being weighted by 2. Alphabetic characters are
* converted to numbers by their position in the alphabet starting with A being 10.
* Weighted numbers greater than ten are treated as two separate numbers.
* </p>
*
* <p>
* See <a href="http://en.wikipedia.org/wiki/CUSIP">Wikipedia - CUSIP</a>
* for more details.
* </p>
*
* @since 1.4
*/
public final class CUSIPCheckDigit extends ModulusCheckDigit {
private static final long serialVersionUID = 666941918490152456L;
/** Singleton CUSIP Check Digit instance */
public static final CheckDigit CUSIP_CHECK_DIGIT = new CUSIPCheckDigit();
/** weighting given to digits depending on their right position */
private static final int[] POSITION_WEIGHT = {2, 1};
/**
* Constructs a CUSIP Identifier Check Digit routine.
*/
public CUSIPCheckDigit() {
super(10); // CHECKSTYLE IGNORE MagicNumber
}
/**
* Convert a character at a specified position to an integer value.
*
* @param character The character to convert
* @param leftPos The position of the character in the code, counting from left to right
* @param rightPos The position of the character in the code, counting from right to left
* @return The integer value of the character
* @throws CheckDigitException if the character is not alphanumeric
*/
@Override
protected int toInt(final char character, final int leftPos, final int rightPos)
throws CheckDigitException {
final int charValue = Character.getNumericValue(character);
// the final character is only allowed to reach 9
final int charMax = rightPos == 1 ? 9 : 35; // CHECKSTYLE IGNORE MagicNumber
if (charValue < 0 || charValue > charMax) {
throw new CheckDigitException("Invalid Character[" +
leftPos + "," + rightPos + "] = '" + charValue + "' out of range 0 to " + charMax);
}
return charValue;
}
/**
* <p>Calculates the <i>weighted</i> value of a character in the
* code at a specified position.</p>
*
* <p>For CUSIP (from right to left) <b>odd</b> digits are weighted
* with a factor of <b>one</b> and <b>even</b> digits with a factor
* of <b>two</b>. Weighted values > 9, have 9 subtracted</p>
*
* @param charValue The numeric value of the character.
* @param leftPos The position of the character in the code, counting from left to right
* @param rightPos The position of the character in the code, counting from right to left
* @return The weighted value of the character.
*/
@Override
protected int weightedValue(final int charValue, final int leftPos, final int rightPos) {
final int weight = POSITION_WEIGHT[rightPos % 2];
final int weightedValue = charValue * weight;
return ModulusCheckDigit.sumDigits(weightedValue);
}
}
| 8,008 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/ABANumberCheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
/**
* Modulus 10 <b>ABA Number</b> (or <b>Routing Transit Number</b> (RTN)) Check Digit
* calculation/validation.
*
* <p>
* ABA Numbers (or Routing Transit Numbers) are a nine digit numeric code used
* to identify American financial institutions for things such as checks or deposits
* (ABA stands for the American Bankers Association).
* </p>
*
* Check digit calculation is based on <i>modulus 10</i> with digits being weighted
* based on their position (from right to left) as follows:
*
* <ul>
* <li>Digits 1, 4 and & 7 are weighted 1</li>
* <li>Digits 2, 5 and & 8 are weighted 7</li>
* <li>Digits 3, 6 and & 9 are weighted 3</li>
* </ul>
*
* <p>
* For further information see
* <a href="http://en.wikipedia.org/wiki/Routing_transit_number">Wikipedia -
* Routing transit number</a>.
* </p>
*
* @since 1.4
*/
public final class ABANumberCheckDigit extends ModulusCheckDigit {
private static final long serialVersionUID = -8255937433810380145L;
/** Singleton Routing Transit Number Check Digit instance */
public static final CheckDigit ABAN_CHECK_DIGIT = new ABANumberCheckDigit();
/** weighting given to digits depending on their right position */
private static final int[] POSITION_WEIGHT = {3, 1, 7};
/**
* Constructs a modulus 10 Check Digit routine for ABA Numbers.
*/
public ABANumberCheckDigit() {
super(10); // CHECKSTYLE IGNORE MagicNumber
}
/**
* Calculates the <i>weighted</i> value of a character in the
* code at a specified position.
* <p>
* ABA Routing numbers are weighted in the following manner:
* <pre><code>
* left position: 1 2 3 4 5 6 7 8 9
* weight: 3 7 1 3 7 1 3 7 1
* </code></pre>
*
* @param charValue The numeric value of the character.
* @param leftPos The position of the character in the code, counting from left to right
* @param rightPos The positionof the character in the code, counting from right to left
* @return The weighted value of the character.
*/
@Override
protected int weightedValue(final int charValue, final int leftPos, final int rightPos) {
final int weight = POSITION_WEIGHT[rightPos % 3]; // CHECKSTYLE IGNORE MagicNumber
return charValue * weight;
}
}
| 8,009 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/CheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
import org.apache.commons.validator.routines.CodeValidator;
/**
* <b>Check Digit</b> calculation and validation.
* <p>
* The logic for validating check digits has previously been
* embedded within the logic for specific code validation, which
* includes other validations such as verifying the format
* or length of a code. {@link CheckDigit} provides for separating out
* the check digit calculation logic enabling it to be more easily
* tested and reused.
* </p>
* <p>
* Although Commons Validator is primarily concerned with validation,
* {@link CheckDigit} also defines behavior for calculating/generating check
* digits, since it makes sense that users will want to (re-)use the
* same logic for both. The {@link org.apache.commons.validator.routines.ISBNValidator}
* makes specific use of this feature by providing the facility to validate ISBN-10 codes
* and then convert them to the new ISBN-13 standard.
* </p>
* <p>
* CheckDigit is used by the new generic {@link CodeValidator} implementation.
* </p>
*
* <h2>Implementations</h2>
* See the
* <a href="package-summary.html">Package Summary</a> for a full
* list of implementations provided within Commons Validator.
*
* @see org.apache.commons.validator.routines.CodeValidator
* @since 1.4
*/
public interface CheckDigit {
/**
* Calculates the <i>Check Digit</i> for a code.
*
* @param code The code to calculate the Check Digit for.
* The string must not include the check digit
* @return The calculated Check Digit
* @throws CheckDigitException if an error occurs.
*/
String calculate(String code) throws CheckDigitException;
/**
* Validates the check digit for the code.
*
* @param code The code to validate, the string must include the check digit.
* @return {@code true} if the check digit is valid, otherwise
* {@code false}.
*/
boolean isValid(String code);
}
| 8,010 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/IBANCheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
import java.io.Serializable;
/**
* <b>IBAN</b> (International Bank Account Number) Check Digit calculation/validation.
* <p>
* This routine is based on the ISO 7064 Mod 97,10 check digit calculation routine.
* <p>
* The two check digit characters in a IBAN number are the third and fourth characters
* in the code. For <i>check digit</i> calculation/validation the first four characters are moved
* to the end of the code.
* So <code>CCDDnnnnnnn</code> becomes <code>nnnnnnnCCDD</code> (where
* <code>CC</code> is the country code and <code>DD</code> is the check digit). For
* check digit calculation the check digit value should be set to zero (i.e.
* <code>CC00nnnnnnn</code> in this example.
* <p>
* Note: the class does not check the format of the IBAN number, only the check digits.
* <p>
* For further information see
* <a href="http://en.wikipedia.org/wiki/International_Bank_Account_Number">Wikipedia -
* IBAN number</a>.
*
* @since 1.4
*/
public final class IBANCheckDigit implements CheckDigit, Serializable {
private static final int MIN_CODE_LEN = 5;
private static final long serialVersionUID = -3600191725934382801L;
private static final int MAX_ALPHANUMERIC_VALUE = 35; // Character.getNumericValue('Z')
/** Singleton IBAN Number Check Digit instance */
public static final CheckDigit IBAN_CHECK_DIGIT = new IBANCheckDigit();
private static final long MAX = 999999999;
private static final long MODULUS = 97;
/**
* Constructs Check Digit routine for IBAN Numbers.
*/
public IBANCheckDigit() {
}
/**
* Validate the check digit of an IBAN code.
*
* @param code The code to validate
* @return {@code true} if the check digit is valid, otherwise
* {@code false}
*/
@Override
public boolean isValid(final String code) {
if (code == null || code.length() < MIN_CODE_LEN) {
return false;
}
final String check = code.substring(2, 4); // CHECKSTYLE IGNORE MagicNumber
if ("00".equals(check) || "01".equals(check) || "99".equals(check)) {
return false;
}
try {
return calculateModulus(code) == 1;
} catch (final CheckDigitException ex) {
return false;
}
}
/**
* Calculate the <i>Check Digit</i> for an IBAN code.
* <p>
* <b>Note:</b> The check digit is the third and fourth
* characters and is set to the value "<code>00</code>".
*
* @param code The code to calculate the Check Digit for
* @return The calculated Check Digit as 2 numeric decimal characters, e.g. "42"
* @throws CheckDigitException if an error occurs calculating
* the check digit for the specified code
*/
@Override
public String calculate(String code) throws CheckDigitException {
if (code == null || code.length() < MIN_CODE_LEN) {
throw new CheckDigitException("Invalid Code length=" + (code == null ? 0 : code.length()));
}
code = code.substring(0, 2) + "00" + code.substring(4); // CHECKSTYLE IGNORE MagicNumber
final int modulusResult = calculateModulus(code);
final int charValue = 98 - modulusResult; // CHECKSTYLE IGNORE MagicNumber
final String checkDigit = Integer.toString(charValue);
return charValue > 9 ? checkDigit : "0" + checkDigit; // CHECKSTYLE IGNORE MagicNumber
}
/**
* Calculate the modulus for a code.
*
* @param code The code to calculate the modulus for.
* @return The modulus value
* @throws CheckDigitException if an error occurs calculating the modulus
* for the specified code
*/
private int calculateModulus(final String code) throws CheckDigitException {
final String reformattedCode = code.substring(4) + code.substring(0, 4); // CHECKSTYLE IGNORE MagicNumber
long total = 0;
for (int i = 0; i < reformattedCode.length(); i++) {
final int charValue = Character.getNumericValue(reformattedCode.charAt(i));
if (charValue < 0 || charValue > MAX_ALPHANUMERIC_VALUE) {
throw new CheckDigitException("Invalid Character[" + i + "] = '" + charValue + "'");
}
total = (charValue > 9 ? total * 100 : total * 10) + charValue; // CHECKSTYLE IGNORE MagicNumber
if (total > MAX) {
total = total % MODULUS;
}
}
return (int) (total % MODULUS);
}
}
| 8,011 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/SedolCheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
/**
* Modulus 10 <b>SEDOL</b> (UK Securities) Check Digit calculation/validation.
*
* <p>
* SEDOL Numbers are 7 character alphanumeric codes used
* to identify UK Securities (SEDOL stands for Stock Exchange Daily Official List).
* </p>
* <p>
* Check digit calculation is based on <i>modulus 10</i> with digits being weighted
* based on their position, from left to right, as follows:
* </p>
* <pre><code>
* position: 1 2 3 4 5 6 7
* weighting: 1 3 1 7 3 9 1
* </code></pre>
* <p>
* See <a href="http://en.wikipedia.org/wiki/SEDOL">Wikipedia - SEDOL</a>
* for more details.
* </p>
*
* @since 1.4
*/
public final class SedolCheckDigit extends ModulusCheckDigit {
private static final long serialVersionUID = -8976881621148878443L;
private static final int MAX_ALPHANUMERIC_VALUE = 35; // Character.getNumericValue('Z')
/** Singleton SEDOL check digit instance */
public static final CheckDigit SEDOL_CHECK_DIGIT = new SedolCheckDigit();
/** weighting given to digits depending on their right position */
private static final int[] POSITION_WEIGHT = {1, 3, 1, 7, 3, 9, 1};
/**
* Constructs a modulus 11 Check Digit routine for ISBN-10.
*/
public SedolCheckDigit() {
super(10); // CHECKSTYLE IGNORE MagicNumber
}
/**
* Calculate the modulus for an SEDOL code.
*
* @param code The code to calculate the modulus for.
* @param includesCheckDigit Whether the code includes the Check Digit or not.
* @return The modulus value
* @throws CheckDigitException if an error occurs calculating the modulus
* for the specified code
*/
@Override
protected int calculateModulus(final String code, final boolean includesCheckDigit) throws CheckDigitException {
if (code.length() > POSITION_WEIGHT.length) {
throw new CheckDigitException("Invalid Code Length = " + code.length());
}
return super.calculateModulus(code, includesCheckDigit);
}
/**
* Calculates the <i>weighted</i> value of a character in the
* code at a specified position.
*
* @param charValue The numeric value of the character.
* @param leftPos The position of the character in the code, counting from left to right
* @param rightPos The positionof the character in the code, counting from right to left
* @return The weighted value of the character.
*/
@Override
protected int weightedValue(final int charValue, final int leftPos, final int rightPos) {
return charValue * POSITION_WEIGHT[leftPos - 1];
}
/**
* Convert a character at a specified position to an integer value.
*
* @param character The character to convert
* @param leftPos The position of the character in the code, counting from left to right
* @param rightPos The positionof the character in the code, counting from right to left
* @return The integer value of the character
* @throws CheckDigitException if character is not alphanumeric
*/
@Override
protected int toInt(final char character, final int leftPos, final int rightPos)
throws CheckDigitException {
final int charValue = Character.getNumericValue(character);
// the check digit is only allowed to reach 9
final int charMax = rightPos == 1 ? 9 : MAX_ALPHANUMERIC_VALUE; // CHECKSTYLE IGNORE MagicNumber
if (charValue < 0 || charValue > charMax) {
throw new CheckDigitException("Invalid Character[" +
leftPos + "," + rightPos + "] = '" + charValue + "' out of range 0 to " + charMax);
}
return charValue;
}
}
| 8,012 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/ISSNCheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
/**
* International Standard Serial Number (ISSN)
* is an eight-digit serial number used to
* uniquely identify a serial publication.
* <pre>
* The format is:
*
* ISSN dddd-dddC
* where:
* d = decimal digit (0-9)
* C = checksum (0-9 or X)
*
* The checksum is formed by adding the first 7 digits multiplied by
* the position in the entire number (counting from the right).
* For example, abcd-efg would be 8a + 7b + 6c + 5d + 4e +3f +2g.
* The check digit is modulus 11, where the value 10 is represented by 'X'
* For example:
* ISSN 0317-8471
* ISSN 1050-124X
* </pre>
* <p>
* <b>Note:</b> This class expects the input to be numeric only,
* with all formatting removed.
* For example:
* <pre>
* 03178471
* 1050124X
* </pre>
* @since 1.5.0
*/
public final class ISSNCheckDigit extends ModulusCheckDigit {
private static final long serialVersionUID = 1L;
/** Singleton ISSN Check Digit instance */
public static final CheckDigit ISSN_CHECK_DIGIT = new ISSNCheckDigit();
/**
* Creates the instance using a checkdigit modulus of 11
*/
public ISSNCheckDigit() {
super(11); // CHECKSTYLE IGNORE MagicNumber
}
@Override
protected int weightedValue(final int charValue, final int leftPos, final int rightPos) throws CheckDigitException {
return charValue * (9 - leftPos); // CHECKSTYLE IGNORE MagicNumber
}
@Override
protected String toCheckDigit(final int charValue) throws CheckDigitException {
if (charValue == 10) { // CHECKSTYLE IGNORE MagicNumber
return "X";
}
return super.toCheckDigit(charValue);
}
@Override
protected int toInt(final char character, final int leftPos, final int rightPos)
throws CheckDigitException {
if (rightPos == 1 && character == 'X') {
return 10; // CHECKSTYLE IGNORE MagicNumber
}
return super.toInt(character, leftPos, rightPos);
}
}
| 8,013 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/ISINCheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
/**
* Modulus 10 <b>ISIN</b> (International Securities Identifying Number) Check Digit calculation/validation.
*
* <p>
* ISIN Numbers are 12 character alphanumeric codes used
* to identify Securities.
* </p>
*
* <p>
* Check digit calculation uses the <i>Modulus 10 Double Add Double</i> technique
* with every second digit being weighted by 2. Alphabetic characters are
* converted to numbers by their position in the alphabet starting with A being 10.
* Weighted numbers greater than ten are treated as two separate numbers.
* </p>
*
* <p>
* See <a href="http://en.wikipedia.org/wiki/ISIN">Wikipedia - ISIN</a>
* for more details.
* </p>
*
* @since 1.4
*/
public final class ISINCheckDigit extends ModulusCheckDigit {
private static final long serialVersionUID = -1239211208101323599L;
private static final int MAX_ALPHANUMERIC_VALUE = 35; // Character.getNumericValue('Z')
/** Singleton ISIN Check Digit instance */
public static final CheckDigit ISIN_CHECK_DIGIT = new ISINCheckDigit();
/** weighting given to digits depending on their right position */
private static final int[] POSITION_WEIGHT = {2, 1};
/**
* Constructs an ISIN Identifier Check Digit routine.
*/
public ISINCheckDigit() {
super(10); // CHECKSTYLE IGNORE MagicNumber
}
/**
* Calculate the modulus for an ISIN code.
*
* @param code The code to calculate the modulus for.
* @param includesCheckDigit Whether the code includes the Check Digit or not.
* @return The modulus value
* @throws CheckDigitException if an error occurs calculating the modulus
* for the specified code
*/
@Override
protected int calculateModulus(final String code, final boolean includesCheckDigit) throws CheckDigitException {
final StringBuilder transformed = new StringBuilder(code.length() * 2); // CHECKSTYLE IGNORE MagicNumber
if (includesCheckDigit) {
final char checkDigit = code.charAt(code.length()-1); // fetch the last character
if (!Character.isDigit(checkDigit)){
throw new CheckDigitException("Invalid checkdigit["+ checkDigit+ "] in " + code);
}
}
for (int i = 0; i < code.length(); i++) {
final int charValue = Character.getNumericValue(code.charAt(i));
if (charValue < 0 || charValue > MAX_ALPHANUMERIC_VALUE) {
throw new CheckDigitException("Invalid Character[" +
(i + 1) + "] = '" + charValue + "'");
}
// this converts alphanumerics to two digits
// so there is no need to overload toInt()
transformed.append(charValue);
}
return super.calculateModulus(transformed.toString(), includesCheckDigit);
}
/**
* <p>Calculates the <i>weighted</i> value of a character in the
* code at a specified position.</p>
*
* <p>For ISIN (from right to left) <b>odd</b> digits are weighted
* with a factor of <b>one</b> and <b>even</b> digits with a factor
* of <b>two</b>. Weighted values are reduced to their digital root</p>
*
* @param charValue The numeric value of the character.
* @param leftPos The position of the character in the code, counting from left to right
* @param rightPos The position of the character in the code, counting from right to left
* @return The weighted value of the character.
*/
@Override
protected int weightedValue(final int charValue, final int leftPos, final int rightPos) {
final int weight = POSITION_WEIGHT[rightPos % 2]; // CHECKSTYLE IGNORE MagicNumber
final int weightedValue = charValue * weight;
return ModulusCheckDigit.sumDigits(weightedValue);
}
}
| 8,014 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/CheckDigitException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
/**
* Check Digit calculation/validation error.
*
* @since 1.4
*/
public class CheckDigitException extends Exception {
private static final long serialVersionUID = -3519894732624685477L;
/**
* Constructs an Exception with no message.
*/
public CheckDigitException() {
}
/**
* Constructs an Exception with a message.
*
* @param msg The error message.
*/
public CheckDigitException(final String msg) {
super(msg);
}
/**
* Constructs an Exception with a message and
* the underlying cause.
*
* @param msg The error message.
* @param cause The underlying cause of the error
*/
public CheckDigitException(final String msg, final Throwable cause) {
super(msg, cause);
}
}
| 8,015 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/VerhoeffCheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
import java.io.Serializable;
/**
* <b>Verhoeff</b> (Dihedral) Check Digit calculation/validation.
* <p>
* Check digit calculation for numeric codes using a
* <a href="http://en.wikipedia.org/wiki/Dihedral_group">Dihedral Group</a>
* of order 10.
* <p>
* See <a href="http://en.wikipedia.org/wiki/Verhoeff_algorithm">Wikipedia
* - Verhoeff algorithm</a> for more details.
*
* @since 1.4
*/
public final class VerhoeffCheckDigit implements CheckDigit, Serializable {
private static final long serialVersionUID = 4138993995483695178L;
/** Singleton Verhoeff Check Digit instance */
public static final CheckDigit VERHOEFF_CHECK_DIGIT = new VerhoeffCheckDigit();
/** D - multiplication table */
private static final int[][] D_TABLE = {
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
{1, 2, 3, 4, 0, 6, 7, 8, 9, 5},
{2, 3, 4, 0, 1, 7, 8, 9, 5, 6},
{3, 4, 0, 1, 2, 8, 9, 5, 6, 7},
{4, 0, 1, 2, 3, 9, 5, 6, 7, 8},
{5, 9, 8, 7, 6, 0, 4, 3, 2, 1},
{6, 5, 9, 8, 7, 1, 0, 4, 3, 2},
{7, 6, 5, 9, 8, 2, 1, 0, 4, 3},
{8, 7, 6, 5, 9, 3, 2, 1, 0, 4},
{9, 8, 7, 6, 5, 4, 3, 2, 1, 0}};
/** P - permutation table */
private static final int[][] P_TABLE = {
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
{1, 5, 7, 6, 2, 8, 3, 0, 9, 4},
{5, 8, 0, 3, 7, 9, 6, 1, 4, 2},
{8, 9, 1, 6, 0, 4, 3, 5, 2, 7},
{9, 4, 5, 3, 1, 2, 6, 8, 7, 0},
{4, 2, 8, 6, 5, 7, 3, 9, 0, 1},
{2, 7, 9, 3, 8, 0, 6, 4, 1, 5},
{7, 0, 4, 6, 9, 1, 3, 2, 5, 8}};
/** inv: inverse table */
private static final int[] INV_TABLE = {0, 4, 3, 2, 1, 5, 6, 7, 8, 9};
/**
* Validate the Verhoeff <i>Check Digit</i> for a code.
*
* @param code The code to validate
* @return {@code true} if the check digit is valid,
* otherwise {@code false}
*/
@Override
public boolean isValid(final String code) {
if (code == null || code.isEmpty()) {
return false;
}
try {
return calculateChecksum(code, true) == 0;
} catch (final CheckDigitException e) {
return false;
}
}
/**
* Calculate a Verhoeff <i>Check Digit</i> for a code.
*
* @param code The code to calculate the Check Digit for
* @return The calculated Check Digit
* @throws CheckDigitException if an error occurs calculating
* the check digit for the specified code
*/
@Override
public String calculate(final String code) throws CheckDigitException {
if (code == null || code.isEmpty()) {
throw new CheckDigitException("Code is missing");
}
final int checksum = calculateChecksum(code, false);
return Integer.toString(INV_TABLE[checksum]);
}
/**
* Calculate the checksum.
*
* @param code The code to calculate the checksum for.
* @param includesCheckDigit Whether the code includes the Check Digit or not.
* @return The checksum value
* @throws CheckDigitException if the code contains an invalid character (i.e. not numeric)
*/
private int calculateChecksum(final String code, final boolean includesCheckDigit) throws CheckDigitException {
int checksum = 0;
for (int i = 0; i < code.length(); i++) {
final int idx = code.length() - (i + 1);
final int num = Character.getNumericValue(code.charAt(idx));
if (num < 0 || num > 9) { // CHECKSTYLE IGNORE MagicNumber
throw new CheckDigitException("Invalid Character[" +
i + "] = '" + (int)code.charAt(idx) + "'");
}
final int pos = includesCheckDigit ? i : i + 1;
checksum = D_TABLE[checksum][P_TABLE[pos % 8][num]]; // CHECKSTYLE IGNORE MagicNumber
}
return checksum;
}
}
| 8,016 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/LuhnCheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
/**
* Modulus 10 <b>Luhn</b> Check Digit calculation/validation.
*
* Luhn check digits are used, for example, by:
* <ul>
* <li><a href="http://en.wikipedia.org/wiki/Credit_card">Credit Card Numbers</a></li>
* <li><a href="http://en.wikipedia.org/wiki/IMEI">IMEI Numbers</a> - International
* Mobile Equipment Identity Numbers</li>
* </ul>
* Check digit calculation is based on <i>modulus 10</i> with digits in
* an <i>odd</i> position (from right to left) being weighted 1 and <i>even</i>
* position digits being weighted 2 (weighted values greater than 9 have 9 subtracted).
*
* <p>
* See <a href="http://en.wikipedia.org/wiki/Luhn_algorithm">Wikipedia</a>
* for more details.
* </p>
*
* @since 1.4
*/
public final class LuhnCheckDigit extends ModulusCheckDigit {
private static final long serialVersionUID = -2976900113942875999L;
/** Singleton Luhn Check Digit instance */
public static final CheckDigit LUHN_CHECK_DIGIT = new LuhnCheckDigit();
/** weighting given to digits depending on their right position */
private static final int[] POSITION_WEIGHT = {2, 1};
/**
* Constructs a modulus 10 Luhn Check Digit routine.
*/
public LuhnCheckDigit() {
super(10); // CHECKSTYLE IGNORE MagicNumber
}
/**
* <p>Calculates the <i>weighted</i> value of a character in the
* code at a specified position.</p>
*
* <p>For Luhn (from right to left) <b>odd</b> digits are weighted
* with a factor of <b>one</b> and <b>even</b> digits with a factor
* of <b>two</b>. Weighted values > 9, have 9 subtracted</p>
*
* @param charValue The numeric value of the character.
* @param leftPos The position of the character in the code, counting from left to right
* @param rightPos The position of the character in the code, counting from right to left
* @return The weighted value of the character.
*/
@Override
protected int weightedValue(final int charValue, final int leftPos, final int rightPos) {
final int weight = POSITION_WEIGHT[rightPos % 2]; // CHECKSTYLE IGNORE MagicNumber
final int weightedValue = charValue * weight;
return weightedValue > 9 ? weightedValue - 9 : weightedValue; // CHECKSTYLE IGNORE MagicNumber
}
}
| 8,017 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/ISBN10CheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
/**
* Modulus 11 <b>ISBN-10</b> Check Digit calculation/validation.
* <p>
* ISBN-10 Numbers are a numeric code except for the last (check) digit
* which can have a value of "X".
* <p>
* Check digit calculation is based on <i>modulus 11</i> with digits being weighted
* based by their position, from right to left with the first digit being weighted
* 1, the second 2 and so on. If the check digit is calculated as "10" it is converted
* to "X".
* <p>
* <b>N.B.</b> From 1st January 2007 the book industry will start to use a new 13 digit
* ISBN number (rather than this 10 digit ISBN number) which uses the EAN-13 / UPC
* (see {@link EAN13CheckDigit}) standard.
* <p>
* For further information see:
* <ul>
* <li><a href="http://en.wikipedia.org/wiki/ISBN">Wikipedia - International
* Standard Book Number (ISBN)</a>.</li>
* <li><a href="http://www.isbn.org/standards/home/isbn/transition.asp">ISBN-13
* Transition details</a>.</li>
* </ul>
*
* @since 1.4
*/
public final class ISBN10CheckDigit extends ModulusCheckDigit {
private static final long serialVersionUID = 8000855044504864964L;
/** Singleton ISBN-10 Check Digit instance */
public static final CheckDigit ISBN10_CHECK_DIGIT = new ISBN10CheckDigit();
/**
* Constructs a modulus 11 Check Digit routine for ISBN-10.
*/
public ISBN10CheckDigit() {
super(11); // CHECKSTYLE IGNORE MagicNumber
}
/**
* Calculates the <i>weighted</i> value of a character in the
* code at a specified position.
*
* <p>For ISBN-10 (from right to left) digits are weighted
* by their position.</p>
*
* @param charValue The numeric value of the character.
* @param leftPos The position of the character in the code, counting from left to right
* @param rightPos The positionof the character in the code, counting from right to left
* @return The weighted value of the character.
*/
@Override
protected int weightedValue(final int charValue, final int leftPos, final int rightPos) {
return charValue * rightPos;
}
/**
* <p>Convert a character at a specified position to an
* integer value.</p>
*
* <p>Character 'X' check digit converted to 10.</p>
*
* @param character The character to convert.
* @param leftPos The position of the character in the code, counting from left to right
* @param rightPos The position of the character in the code, counting from right to left
* @return The integer value of the character.
* @throws CheckDigitException if an error occurs.
*/
@Override
protected int toInt(final char character, final int leftPos, final int rightPos)
throws CheckDigitException {
if (rightPos == 1 && character == 'X') {
return 10; // CHECKSTYLE IGNORE MagicNumber
}
return super.toInt(character, leftPos, rightPos);
}
/**
* <p>Convert an integer value to a character at a specified position.</p>
*
* <p>Value '10' for position 1 (check digit) converted to 'X'.</p>
*
* @param charValue The integer value of the character.
* @return The converted character.
* @throws CheckDigitException if an error occurs.
*/
@Override
protected String toCheckDigit(final int charValue)
throws CheckDigitException {
if (charValue == 10) { // CHECKSTYLE IGNORE MagicNumber
return "X";
}
return super.toCheckDigit(charValue);
}
}
| 8,018 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/ModulusCheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
import java.io.Serializable;
/**
* Abstract <b>Modulus</b> Check digit calculation/validation.
* <p>
* Provides a <i>base</i> class for building <i>modulus</i> Check Digit routines.
* </p>
* <p>
* This implementation only handles <i>single-digit numeric</i> codes, such as <b>EAN-13</b>. For <i>alphanumeric</i> codes such as <b>EAN-128</b> you will need
* to implement/override the <code>toInt()</code> and <code>toChar()</code> methods.
* </p>
*
* @since 1.4
*/
public abstract class ModulusCheckDigit implements CheckDigit, Serializable {
private static final long serialVersionUID = 2948962251251528941L;
// N.B. The modulus can be > 10 provided that the implementing class overrides toCheckDigit and toInt
// (for example as in ISBN10CheckDigit)
private final int modulus;
/**
* Constructs a {@link CheckDigit} routine for a specified modulus.
*
* @param modulus The modulus value to use for the check digit calculation
*/
public ModulusCheckDigit(final int modulus) {
this.modulus = modulus;
}
/**
* Return the modulus value this check digit routine is based on.
*
* @return The modulus value this check digit routine is based on
*/
public int getModulus() {
return modulus;
}
/**
* Validate a modulus check digit for a code.
*
* @param code The code to validate
* @return {@code true} if the check digit is valid, otherwise
* {@code false}
*/
@Override
public boolean isValid(final String code) {
if (code == null || code.isEmpty()) {
return false;
}
try {
final int modulusResult = calculateModulus(code, true);
return modulusResult == 0;
} catch (final CheckDigitException ex) {
return false;
}
}
/**
* Calculate a modulus <i>Check Digit</i> for a code which does not yet have one.
*
* @param code The code for which to calculate the Check Digit;
* the check digit should not be included
* @return The calculated Check Digit
* @throws CheckDigitException if an error occurs calculating the check digit
*/
@Override
public String calculate(final String code) throws CheckDigitException {
if (code == null || code.isEmpty()) {
throw new CheckDigitException("Code is missing");
}
final int modulusResult = calculateModulus(code, false);
final int charValue = (modulus - modulusResult) % modulus;
return toCheckDigit(charValue);
}
/**
* Calculate the modulus for a code.
*
* @param code The code to calculate the modulus for.
* @param includesCheckDigit Whether the code includes the Check Digit or not.
* @return The modulus value
* @throws CheckDigitException if an error occurs calculating the modulus
* for the specified code
*/
protected int calculateModulus(final String code, final boolean includesCheckDigit) throws CheckDigitException {
int total = 0;
for (int i = 0; i < code.length(); i++) {
final int lth = code.length() + (includesCheckDigit ? 0 : 1);
final int leftPos = i + 1;
final int rightPos = lth - i;
final int charValue = toInt(code.charAt(i), leftPos, rightPos);
total += weightedValue(charValue, leftPos, rightPos);
}
if (total == 0) {
throw new CheckDigitException("Invalid code, sum is zero");
}
return total % modulus;
}
/**
* Calculates the <i>weighted</i> value of a character in the
* code at a specified position.
* <p>
* Some modulus routines weight the value of a character
* depending on its position in the code (e.g. ISBN-10), while
* others use different weighting factors for odd/even positions
* (e.g. EAN or Luhn). Implement the appropriate mechanism
* required by overriding this method.
*
* @param charValue The numeric value of the character
* @param leftPos The position of the character in the code, counting from left to right
* @param rightPos The positionof the character in the code, counting from right to left
* @return The weighted value of the character
* @throws CheckDigitException if an error occurs calculating
* the weighted value
*/
protected abstract int weightedValue(int charValue, int leftPos, int rightPos)
throws CheckDigitException;
/**
* Convert a character at a specified position to an integer value.
* <p>
* <b>Note:</b> this implementation only handlers numeric values
* For non-numeric characters, override this method to provide
* character-->integer conversion.
*
* @param character The character to convert
* @param leftPos The position of the character in the code, counting from left to right (for identifiying the position in the string)
* @param rightPos The position of the character in the code, counting from right to left (not used here)
* @return The integer value of the character
* @throws CheckDigitException if character is non-numeric
*/
protected int toInt(final char character, final int leftPos, final int rightPos)
throws CheckDigitException {
if (Character.isDigit(character)) {
return Character.getNumericValue(character);
}
throw new CheckDigitException("Invalid Character[" +
leftPos + "] = '" + character + "'");
}
/**
* Convert an integer value to a check digit.
* <p>
* <b>Note:</b> this implementation only handles single-digit numeric values
* For non-numeric characters, override this method to provide
* integer-->character conversion.
*
* @param charValue The integer value of the character
* @return The converted character
* @throws CheckDigitException if integer character value
* doesn't represent a numeric character
*/
protected String toCheckDigit(final int charValue)
throws CheckDigitException {
if (charValue >= 0 && charValue <= 9) { // CHECKSTYLE IGNORE MagicNumber
return Integer.toString(charValue);
}
throw new CheckDigitException("Invalid Check Digit Value =" +
+ charValue);
}
/**
* Add together the individual digits in a number.
*
* @param number The number whose digits are to be added
* @return The sum of the digits
*/
public static int sumDigits(final int number) {
int total = 0;
int todo = number;
while (todo > 0) {
total += todo % 10; // CHECKSTYLE IGNORE MagicNumber
todo = todo / 10; // CHECKSTYLE IGNORE MagicNumber
}
return total;
}
}
| 8,019 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/EAN13CheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
/**
* Modulus 10 <b>EAN-13</b> / <b>UPC</b> / <b>ISBN-13</b> Check Digit
* calculation/validation.
* <p>
* Check digit calculation is based on <i>modulus 10</i> with digits in
* an <i>odd</i> position (from right to left) being weighted 1 and <i>even</i>
* position digits being weighted 3.
* <p>
* For further information see:
* <ul>
* <li>EAN-13 - see
* <a href="http://en.wikipedia.org/wiki/European_Article_Number">Wikipedia -
* European Article Number</a>.</li>
* <li>UPC - see
* <a href="http://en.wikipedia.org/wiki/Universal_Product_Code">Wikipedia -
* Universal Product Code</a>.</li>
* <li>ISBN-13 - see
* <a href="http://en.wikipedia.org/wiki/ISBN">Wikipedia - International
* Standard Book Number (ISBN)</a>.</li>
* </ul>
*
* @since 1.4
*/
public final class EAN13CheckDigit extends ModulusCheckDigit {
private static final long serialVersionUID = 1726347093230424107L;
/** Singleton EAN-13 Check Digit instance */
public static final CheckDigit EAN13_CHECK_DIGIT = new EAN13CheckDigit();
/** weighting given to digits depending on their right position */
private static final int[] POSITION_WEIGHT = {3, 1};
/**
* Constructs a modulus 10 Check Digit routine for EAN/UPC.
*/
public EAN13CheckDigit() {
super(10); // CHECKSTYLE IGNORE MagicNumber
}
/**
* <p>Calculates the <i>weighted</i> value of a character in the
* code at a specified position.</p>
*
* <p>For EAN-13 (from right to left) <b>odd</b> digits are weighted
* with a factor of <b>one</b> and <b>even</b> digits with a factor
* of <b>three</b>.</p>
*
* @param charValue The numeric value of the character.
* @param leftPos The position of the character in the code, counting from left to right
* @param rightPos The positionof the character in the code, counting from right to left
* @return The weighted value of the character.
*/
@Override
protected int weightedValue(final int charValue, final int leftPos, final int rightPos) {
final int weight = POSITION_WEIGHT[rightPos % 2];
return charValue * weight;
}
}
| 8,020 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This package contains <i>Check Digit</i> validation/calculation routines.
* <p>
* Note that these <b>do not validate</b> the input for length or syntax. Such validation is performed by the org.apache.commons.validator.routines.XYZValidator
* classes.
* </p>
*/
package org.apache.commons.validator.routines.checkdigit; | 8,021 |
0 | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines | Create_ds/commons-validator/src/main/java/org/apache/commons/validator/routines/checkdigit/ISBNCheckDigit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines.checkdigit;
import java.io.Serializable;
/**
* Combined <b>ISBN-10</b> / <b>ISBN-13</b> Check Digit calculation/validation.
* <p>
* This implementation validates/calculates ISBN check digits
* based on the length of the code passed to it - delegating
* either to the {@link ISBNCheckDigit#ISBN10_CHECK_DIGIT} or the
* {@link ISBNCheckDigit#ISBN13_CHECK_DIGIT} routines to perform the actual
* validation/calculation.
* <p>
* <b>N.B.</b> From 1st January 2007 the book industry will start to use a new 13 digit
* ISBN number (rather than this 10 digit ISBN number) which uses the EAN-13 / UPC
* standard.
*
* @since 1.4
*/
public final class ISBNCheckDigit implements CheckDigit, Serializable {
private static final long serialVersionUID = 1391849166205184558L;
/** Singleton ISBN-10 Check Digit instance */
public static final CheckDigit ISBN10_CHECK_DIGIT = ISBN10CheckDigit.ISBN10_CHECK_DIGIT;
/** Singleton ISBN-13 Check Digit instance */
public static final CheckDigit ISBN13_CHECK_DIGIT = EAN13CheckDigit.EAN13_CHECK_DIGIT;
/** Singleton combined ISBN-10 / ISBN-13 Check Digit instance */
public static final CheckDigit ISBN_CHECK_DIGIT = new ISBNCheckDigit();
/**
* Calculate an ISBN-10 or ISBN-13 check digit, depending
* on the length of the code.
* <p>
* If the length of the code is 9, it is treated as an ISBN-10
* code or if the length of the code is 12, it is treated as an ISBN-13
* code.
*
* @param code The ISBN code to validate (should have a length of
* 9 or 12)
* @return The ISBN-10 check digit if the length is 9 or an ISBN-13
* check digit if the length is 12.
* @throws CheckDigitException if the code is missing, or an invalid
* length (i.e. not 9 or 12) or if there is an error calculating the
* check digit.
*/
@Override
public String calculate(final String code) throws CheckDigitException {
if (code == null || code.isEmpty()) {
throw new CheckDigitException("ISBN Code is missing");
}
if (code.length() == 9) { // CHECKSTYLE IGNORE MagicNumber
return ISBN10_CHECK_DIGIT.calculate(code);
}
if (code.length() == 12) { // CHECKSTYLE IGNORE MagicNumber
return ISBN13_CHECK_DIGIT.calculate(code);
}
throw new CheckDigitException("Invalid ISBN Length = " + code.length());
}
/**
* <p>Validate an ISBN-10 or ISBN-13 check digit, depending
* on the length of the code.</p>
* <p>
* If the length of the code is 10, it is treated as an ISBN-10
* code or ff the length of the code is 13, it is treated as an ISBN-13
* code.
*
* @param code The ISBN code to validate (should have a length of
* 10 or 13)
* @return {@code true} if the code has a length of 10 and is
* a valid ISBN-10 check digit or the code has a length of 13 and is
* a valid ISBN-13 check digit - otherwise {@code false}.
*/
@Override
public boolean isValid(final String code) {
if (code == null) {
return false;
}
if (code.length() == 10) { // CHECKSTYLE IGNORE MagicNumber
return ISBN10_CHECK_DIGIT.isValid(code);
}
if (code.length() == 13) { // CHECKSTYLE IGNORE MagicNumber
return ISBN13_CHECK_DIGIT.isValid(code);
}
return false;
}
}
| 8,022 |
0 | Create_ds/fineract-cn-default-setup/accounting/src/test/java/org/apache/fineract | Create_ds/fineract-cn-default-setup/accounting/src/test/java/org/apache/fineract/cn/ImportTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn;
import java.io.IOException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
import org.apache.fineract.cn.accounting.api.v1.client.LedgerManager;
import org.apache.fineract.cn.accounting.api.v1.domain.Account;
import org.apache.fineract.cn.accounting.api.v1.domain.Ledger;
import org.apache.fineract.cn.accounting.importer.AccountImporter;
import org.apache.fineract.cn.accounting.importer.LedgerImporter;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Matchers;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.context.junit4.SpringRunner;
/**
* @author Myrle Krantz
*/
@RunWith(SpringRunner.class)
public class ImportTest {
private Map<String, Ledger> createdLedgers = new HashMap<>();
@MockBean
LedgerManager ledgerManagerMock;
@MockBean
Logger loggerMock;
@Test
public void testStandardChartOfAccountsIsCorrectlyFormatted() throws IOException {
final LedgerImporter ledgerImporter = new LedgerImporter(ledgerManagerMock, loggerMock);
final URL ledgersUrl = ClassLoader.getSystemResource("standardChartOfAccounts/ledgers.csv");
Assert.assertNotNull(ledgersUrl);
ledgerImporter.importCSV(ledgersUrl);
final AccountImporter accountImporter = new AccountImporter(ledgerManagerMock, loggerMock);
final URL accountsUrl = ClassLoader.getSystemResource("standardChartOfAccounts/accounts.csv");
Assert.assertNotNull(accountsUrl);
accountImporter.importCSV(accountsUrl);
}
@Before
public void prepare() {
Mockito.doAnswer(new CollectCreatedLedgers(0)).when(ledgerManagerMock).createLedger(Matchers.any());
Mockito.doAnswer(new CollectCreatedLedgers(1)).when(ledgerManagerMock).addSubLedger(Matchers.any(), Matchers.any());
Mockito.doAnswer(new ReturnLedgers()).when(ledgerManagerMock).findLedger(Matchers.any());
Mockito.doAnswer(new ValidateArgument<>(0, Account.class)).when(ledgerManagerMock).createAccount(Matchers.any());
}
class CollectCreatedLedgers implements Answer {
private final int indexOfLedger;
CollectCreatedLedgers(int indexOfLedger) {
this.indexOfLedger = indexOfLedger;
}
@Override
public Void answer(InvocationOnMock invocation) {
final Ledger ledger = invocation.getArgumentAt(indexOfLedger, Ledger.class);
final ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
final Set<ConstraintViolation<Ledger>> errors = validator.validate(ledger);
if (errors.size() > 0)
Assert.fail("Creation was requested for an invalid ledger.");
createdLedgers.put(ledger.getIdentifier(), ledger);
return null;
}
}
class ReturnLedgers implements Answer {
@Override
public Ledger answer(InvocationOnMock invocation) {
final String ledgerIdentifier = invocation.getArgumentAt(0, String.class);
return createdLedgers.get(ledgerIdentifier);
}
}
class ValidateArgument<T> implements Answer {
private final int indexOfArgument;
private final Class<T> thingyClass;
ValidateArgument(final int indexOfArgument, final Class<T> thingyClass) {
this.indexOfArgument = indexOfArgument;
this.thingyClass = thingyClass;
}
@Override
public Void answer(InvocationOnMock invocation) {
final T thingy = invocation.getArgumentAt(indexOfArgument, thingyClass);
final ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
final Set<ConstraintViolation<T>> errors = validator.validate(thingy);
if (errors.size() > 0)
Assert.fail("Creation was requested for an invalid thingy.");
return null;
}
}
}
| 8,023 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop/chukwa/TestArchiveReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import junit.framework.Assert;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.chukwa.util.GenerateTestFile;
import org.apache.hadoop.chukwa.util.TempFileUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.pig.*;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.logicalLayer.schema.*;
/**
* Note that this test will NOT work if run from eclipse.
*
* Pig needs a jarfile, and therefore the test makes fairly strong
* assumptions about its environment. It'll work correctly
* if you do ant test.
*
*/
public class TestArchiveReader extends PigTest {
protected ExecType getExecType() {
return ExecType.LOCAL;
}
public void testLocal() {
File tempDir = new File(System.getProperty("test.build.data", "/tmp"));
if (!tempDir.exists()) {
tempDir.mkdirs();
}
String directory = tempDir.getAbsolutePath() + "/TestArchiveChukwaStorage_"
+ System.currentTimeMillis() + "/";
System.out.println(directory);
FileSystem fs = null;
Configuration conf = null;
try {
conf = new Configuration();
fs = FileSystem.getLocal(conf);
Path seqFile = new Path(directory, "test.seq");
TempFileUtil.writeASinkFile(conf, fs, seqFile, 10);
File buildDir = new File(System.getProperty("chukwa.root.build.dir", "../../build/"));
// File buildDir = new File(System.getProperty("chukwa.root.build.dir",
// "/Users/asrabkin/workspace/chukwa_trunk/build"));
String[] files = buildDir.list();
for (String f : files) {
if (f.startsWith("chukwa-core") && f.endsWith(".jar")) {
log.info("Found" + buildDir.getAbsolutePath() + "/" + f);
pigServer.registerJar(buildDir.getAbsolutePath() + "/" + f);
break;
}
}
String pigJarDir = System.getProperty("chukwa-pig.build.dir", "../../build/");
// pigJarDir = "/Users/asrabkin/workspace/chukwa_trunk/contrib/chukwa-pig";
pigServer.registerJar(pigJarDir + "/chukwa-pig.jar");
pigServer.registerQuery("A = load '" + seqFile.toString()
+ "' using org.apache.hadoop.chukwa.ChukwaArchive()"
// +" as (ts: long,fields);");
+ ";");
// pigServer.registerQuery("B = FOREACH A GENERATE ts,'myCluster',fields,fields#'csource','myRecord',fields#'csource','myApplication', fields#'A';");
// pigServer.registerQuery("define seqWriter org.apache.hadoop.chukwa.pig.ChukwaStorer('c_timestamp', 'c_cluster' ,'fields','c_pk','c_recordtype','c_source','c_application','myFieldA');");
// pigServer.registerQuery("STORE B into '" + directory
// + "/chukwa-pig.evt' using seqWriter;");
Schema schema_A = pigServer.dumpSchema("A");
assertTrue(schema_A.equals(ChukwaArchive.chukwaArchiveSchema));
// pigServer.explain("A", System.out);
// pigServer.registerQuery("B = DUMP A");
pigServer.registerQuery("B = FOREACH A GENERATE seqNo;");
Iterator<Tuple> chunks = pigServer.openIterator("B");
if(!chunks.hasNext())
System.out.println("WARN: I expected to get some seqNos");
while(chunks.hasNext()) {
System.out.println(chunks.next());
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
} finally {
if (fs != null) {
try {
fs.delete(new Path(directory), true);
} catch (IOException e) {
e.printStackTrace();
}
}
pigServer.shutdown();
}
}
protected String dumpArchive(FileSystem fs, Configuration conf, String file)
throws Throwable {
SequenceFile.Reader reader = null;
log.info("File: [" + file + "]" + fs.exists(new Path(file)));
try {
reader = new SequenceFile.Reader(fs, new Path(file), conf);
ChukwaRecordKey key = new ChukwaRecordKey();
ChukwaRecord record = new ChukwaRecord();
StringBuilder sb = new StringBuilder();
while (reader.next(key, record)) {
sb.append("===== KEY =====");
sb.append("DataType: " + key.getReduceType());
sb.append("Key: " + key.getKey());
sb.append("===== Value =====");
String[] fields = record.getFields();
Arrays.sort(fields );
sb.append("Timestamp : " + record.getTime());
for (String field : fields) {
sb.append("[" + field + "] :" + record.getValue(field));
}
}
return sb.toString();
} catch (Throwable e) {
e.printStackTrace();
Assert.fail("Exception while reading SeqFile" + e.getMessage());
throw e;
}
finally {
if (reader != null) {
reader.close();
}
}
}
}
| 8,024 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop/chukwa/PigTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import static org.apache.pig.ExecType.LOCAL;
import static org.apache.pig.ExecType.MAPREDUCE;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.apache.pig.test.MiniCluster;
import org.junit.After;
import org.junit.Before;
public abstract class PigTest extends TestCase {
protected final Log log = LogFactory.getLog(getClass());
protected ExecType execType = LOCAL;
protected MiniCluster cluster;
protected PigServer pigServer;
protected abstract ExecType getExecType();
@Before
@Override
protected void setUp() throws Exception {
if (getExecType() == MAPREDUCE) {
log.info("MapReduce cluster");
cluster = MiniCluster.buildCluster();
pigServer = new PigServer(MAPREDUCE, cluster.getProperties());
} else {
log.info("Local cluster");
pigServer = new PigServer(LOCAL);
}
}
@After
@Override
protected void tearDown() throws Exception {
pigServer.shutdown();
}
}
| 8,025 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop/chukwa/TestRecordMerger.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DefaultBagFactory;
import org.apache.pig.data.TupleFactory;
import junit.framework.Assert;
import junit.framework.TestCase;
public class TestRecordMerger extends TestCase{
@SuppressWarnings("unchecked")
public void testRecordMerger() {
RecordMerger func = new RecordMerger();
try {
Map in = new HashMap<String, String>();
TupleFactory tf = TupleFactory.getInstance();
in.put("A", new DataByteArray("100"));
in.put("B", new DataByteArray("200"));
in.put("C", new DataByteArray("300"));
Map in2 = new HashMap<String, String>();
in2.put("D", new DataByteArray("400"));
in2.put("E", new DataByteArray("500"));
DataBag bg = DefaultBagFactory.getInstance().newDefaultBag();
bg.add(tf.newTuple(in));
bg.add(tf.newTuple(in2));
Map output = func.exec( tf.newTuple(bg) );
Assert.assertTrue(output.containsKey("A") );
Assert.assertTrue(output.containsKey("B") );
Assert.assertTrue(output.containsKey("C") );
Assert.assertTrue(output.containsKey("D") );
Assert.assertTrue(output.containsKey("E") );
Assert.assertTrue(output.get("A").toString().equals("100") );
Assert.assertTrue(output.get("B").toString().equals("200") );
Assert.assertTrue(output.get("C").toString().equals("300") );
Assert.assertTrue(output.get("D").toString().equals("400") );
Assert.assertTrue(output.get("E").toString().equals("500") );
} catch (IOException e) {
Assert.fail();
}
}
}
| 8,026 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop/chukwa/TestLocalChukwaStorage.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import junit.framework.Assert;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.chukwa.util.GenerateTestFile;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.pig.ExecType;
public class TestLocalChukwaStorage extends PigTest {
protected ExecType getExecType() {
return ExecType.LOCAL;
}
public void testLocal() {
File tempDir = new File(System.getProperty("test.build.data", "/tmp"));
if (!tempDir.exists()) {
tempDir.mkdirs();
}
String directory = tempDir.getAbsolutePath() + "/TestLocalChukwaStorage_"
+ System.currentTimeMillis() + "/";
System.out.println(directory);
FileSystem fs = null;
Configuration conf = null;
try {
conf = new Configuration();
fs = FileSystem.getLocal(conf);
GenerateTestFile.fs =fs;
GenerateTestFile.conf = conf;
GenerateTestFile.createFile(directory);
File buildDir = new File(System.getProperty("chukwa.root.build.dir", "../../build/"));
String[] files = buildDir.list();
for (String f : files) {
if (f.startsWith("chukwa-core") && f.endsWith(".jar")) {
log.info("Found" + buildDir.getAbsolutePath() + "/" + f);
pigServer.registerJar(buildDir.getAbsolutePath() + "/" + f);
break;
}
}
pigServer.registerJar(System.getProperty("chukwa-pig.build.dir", "../../build/") + "/chukwa-pig.jar");
pigServer.registerQuery("A = load '"
+ directory
+ "/chukwaTestFile.evt' using org.apache.hadoop.chukwa.pig.ChukwaLoader() as (ts: long,fields);");
pigServer.registerQuery("B = FOREACH A GENERATE ts,'myCluster',fields,fields#'csource','myRecord',fields#'csource','myApplication', fields#'A';");
pigServer.registerQuery("define seqWriter org.apache.hadoop.chukwa.pig.ChukwaStorer('c_timestamp', 'c_cluster' ,'fields','c_pk','c_recordtype','c_source','c_application','myFieldA');");
pigServer.registerQuery("STORE B into '" + directory
+ "/chukwa-pig.evt' using seqWriter;");
try {
String res = dumpArachive(fs,conf,directory+ "chukwa-pig.evt/part-m-00000");
String expected = "===== KEY =====DataType: myRecordKey: 1242000000/M0/1242205800===== Value =====Timestamp : 1242205800[A] :7[B] :3[C] :9[capp] :myApplication[csource] :M0[ctags] : cluster=\"myCluster\" [myFieldA] :7===== KEY =====DataType: myRecordKey: 1242000000/M0/1242205800===== Value =====Timestamp : 1242205800[D] :1[capp] :myApplication[csource] :M0[ctags] : cluster=\"myCluster\" ===== KEY =====DataType: myRecordKey: 1242000000/M1/1242205800===== Value =====Timestamp : 1242205800[A] :17[capp] :myApplication[csource] :M1[ctags] : cluster=\"myCluster\" [myFieldA] :17===== KEY =====DataType: myRecordKey: 1242000000/M1/1242205800===== Value =====Timestamp : 1242205800[B] :37[C] :51[capp] :myApplication[csource] :M1[ctags] : cluster=\"myCluster\" ===== KEY =====DataType: myRecordKey: 1242000000/M0/1242205860===== Value =====Timestamp : 1242205860[A] :8[C] :3[D] :12[capp] :myApplication[csource] :M0[ctags] : cluster=\"myCluster\" [myFieldA] :8===== KEY =====DataType: myRecordKey: 1242000000/M0/1242205860===== Value =====Timestamp : 1242205860[A] :8[B] :6[capp] :myApplication[csource] :M0[ctags] : cluster=\"myCluster\" [myFieldA] :8===== KEY =====DataType: myRecordKey: 1242000000/M1/1242205860===== Value =====Timestamp : 1242205860[A] :13.2[B] :23[C] :8.5[D] :6[capp] :myApplication[csource] :M1[ctags] : cluster=\"myCluster\" [myFieldA] :13.2===== KEY =====DataType: myRecordKey: 1242000000/M1/1242205860===== Value =====Timestamp : 1242205860[A] :13.2[B] :23[C] :8.5[D] :6[capp] :myApplication[csource] :M1[ctags] : cluster=\"myCluster\" [myFieldA] :13.2===== KEY =====DataType: myRecordKey: 1242000000/M0/1242205920===== Value =====Timestamp : 1242205920[A] :8[B] :6[C] :8[D] :6[E] :48.5[capp] :myApplication[csource] :M0[ctags] : cluster=\"myCluster\" [myFieldA] :8===== KEY =====DataType: myRecordKey: 1242000000/M1/1242205920===== Value =====Timestamp : 1242205920[A] :8.3[B] :5.2[C] :37.7[D] :61.9[E] :40.3[capp] :myApplication[csource] :M1[ctags] : cluster=\"myCluster\" [myFieldA] :8.3===== KEY =====DataType: myRecordKey: 1242000000/M1/1242205980===== Value =====Timestamp : 1242205980[A] :18.3[B] :1.2[C] :7.7[capp] :myApplication[csource] :M1[ctags] : cluster=\"myCluster\" [myFieldA] :18.3===== KEY =====DataType: myRecordKey: 1242000000/M2/1242205980===== Value =====Timestamp : 1242205980[A] :8.9[B] :8.3[C] :7.2[D] :6.1[capp] :myApplication[csource] :M2[ctags] : cluster=\"myCluster\" [myFieldA] :8.9===== KEY =====DataType: myRecordKey: 1242000000/M3/1242205920===== Value =====Timestamp : 1242205920[A] :12.5[B] :26.82[C] :89.51[capp] :myApplication[csource] :M3[ctags] : cluster=\"myCluster\" [myFieldA] :12.5===== KEY =====DataType: myRecordKey: 1242000000/M4/1242205920===== Value =====Timestamp : 1242205920[A] :13.91[B] :21.02[C] :18.05[capp] :myApplication[csource] :M4[ctags] : cluster=\"myCluster\" [myFieldA] :13.91";
log.info("res[" + res + "]");
Assert.assertTrue("expected result differ from current result",res.equals(expected));
log.info(res);
} catch (Throwable e) {
e.printStackTrace();
Assert.fail();
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
} finally {
if (fs != null) {
try {
fs.delete(new Path(directory), true);
} catch (IOException e) {
e.printStackTrace();
}
}
pigServer.shutdown();
}
}
protected String dumpArachive(FileSystem fs, Configuration conf, String file)
throws Throwable {
SequenceFile.Reader reader = null;
log.info("File: [" + file + "]" + fs.exists(new Path(file)));
try {
reader = new SequenceFile.Reader(fs, new Path(file), conf);
ChukwaRecordKey key = new ChukwaRecordKey();
ChukwaRecord record = new ChukwaRecord();
StringBuilder sb = new StringBuilder();
while (reader.next(key, record)) {
sb.append("===== KEY =====");
sb.append("DataType: " + key.getReduceType());
sb.append("Key: " + key.getKey());
sb.append("===== Value =====");
String[] fields = record.getFields();
Arrays.sort(fields );
sb.append("Timestamp : " + record.getTime());
for (String field : fields) {
sb.append("[" + field + "] :" + record.getValue(field));
}
}
return sb.toString();
} catch (Throwable e) {
e.printStackTrace();
Assert.fail("Exception while reading SeqFile" + e.getMessage());
throw e;
}
finally {
if (reader != null) {
reader.close();
}
}
}
}
| 8,027 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop/chukwa/TestTimePartition.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import java.io.IOException;
import org.apache.pig.data.DefaultTupleFactory;
import org.apache.pig.data.Tuple;
import junit.framework.Assert;
import junit.framework.TestCase;
public class TestTimePartition extends TestCase {
public void test_5sec_TimePartition() {
TimePartition func = new TimePartition("" + (5*1000L));
Long timestamp = 1243377169372L;
Tuple input = DefaultTupleFactory.getInstance().newTuple(timestamp);
try {
Long timePartition = func.exec(input);
long expectedTimePartition = 1243377165000L;
Assert.assertTrue(timePartition.longValue() == expectedTimePartition);
} catch (IOException e) {
Assert.fail();
}
}
public void test_5Min_TimePartition() {
TimePartition func = new TimePartition("" + (5*60*1000L));
Long timestamp = 1243377169372L;
Tuple input = DefaultTupleFactory.getInstance().newTuple(timestamp);
try {
Long timePartition = func.exec(input);
long expectedTimePartition = 1243377000000L;
Assert.assertTrue(timePartition.longValue() == expectedTimePartition);
} catch (IOException e) {
Assert.fail();
}
}
public void test_60Min_TimePartition() {
TimePartition func = new TimePartition("" + (60*60*1000L));
Long timestamp = 1243377169372L;
Tuple input = DefaultTupleFactory.getInstance().newTuple(timestamp);
try {
Long timePartition = func.exec(input);
long expectedTimePartition = 1243375200000L;
Assert.assertTrue(timePartition.longValue() == expectedTimePartition);
} catch (IOException e) {
Assert.fail();
}
}
public void test_1Day_TimePartition() {
TimePartition func = new TimePartition("" + (24*60*60*1000L));
Long timestamp = 1243377169372L;
Tuple input = DefaultTupleFactory.getInstance().newTuple(timestamp);
try {
Long timePartition = func.exec(input);
long expectedTimePartition = 1243296000000L;
Assert.assertTrue(timePartition.longValue() == expectedTimePartition);
} catch (IOException e) {
Assert.fail();
}
}
public void test_largeTimePartition() {
try {
TimePartition func = new TimePartition("7776000000" );
Long timestamp = 1243377169372L;
Tuple input = DefaultTupleFactory.getInstance().newTuple(timestamp);
Long timePartition = func.exec(input);
} catch (Throwable e) {
Assert.fail();
}
}
}
| 8,028 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop/chukwa/TestParseDouble.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import java.io.IOException;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DefaultTupleFactory;
import org.apache.pig.data.Tuple;
public class TestParseDouble extends TestCase {
public void testPARSEDOUBLE() {
PARSEDOUBLE func = new PARSEDOUBLE();
String in = "10";
Double inDouble = Double.parseDouble(in);
Tuple input = DefaultTupleFactory.getInstance().newTuple(in);
Double output = null;;
try {
output = func.exec(input);
Assert.assertTrue(output.doubleValue() == inDouble.doubleValue());
} catch (IOException e) {
Assert.fail();
}
}
public void testPARSEDOUBLE2() {
PARSEDOUBLE func = new PARSEDOUBLE();
String in = "10.86";
Double inDouble = Double.parseDouble(in);
Tuple input = DefaultTupleFactory.getInstance().newTuple(in);
Double output = null;;
try {
output = func.exec(input);
Assert.assertTrue(output.doubleValue() == inDouble.doubleValue());
} catch (IOException e) {
Assert.fail();
}
}
public void testPARSEDOUBLE3() {
PARSEDOUBLE func = new PARSEDOUBLE();
String in = "10aaa";
Double output = null;;
try {
Tuple input = DefaultTupleFactory.getInstance().newTuple(in);
output = func.exec(input);
Assert.assertNull(output);
} catch (IOException e) {
Assert.fail();
}
}
public void testPARSEDOUBLE4() {
PARSEDOUBLE func = new PARSEDOUBLE();
String in = "10.86";
Double inDouble = Double.parseDouble(in);
Tuple input = DefaultTupleFactory.getInstance().newTuple(new DataByteArray(in));
Double output = null;;
try {
output = func.exec(input);
Assert.assertTrue(output.doubleValue() == inDouble.doubleValue());
} catch (IOException e) {
Assert.fail();
}
}
}
| 8,029 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/contrib/chukwa-pig/test/src/java/org/apache/hadoop/chukwa/util/GenerateTestFile.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.util;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
public class GenerateTestFile {
/* Pig Test:
A = load './chukwaTestFile.evt' using org.apache.hadoop.chukwa.pig.ChukwaLoader() as (ts: long,fields);
Dump A;
(1242205800L,[A#7,B#3,csource#M0,C#9])
(1242205800L,[D#1,csource#M0])
(1242205800L,[A#17,csource#M1])
(1242205800L,[B#37,C#51,csource#M1])
(1242205860L,[D#12,A#8,csource#M0,C#3])
(1242205860L,[A#8,B#6,csource#M0])
(1242205860L,[D#6,A#13.2,B#23,C#8.5,csource#M1])
(1242205860L,[D#6,A#13.2,B#23,C#8.5,csource#M1])
(1242205920L,[D#6,E#48.5,A#8,B#6,C#8,csource#M0])
(1242205920L,[D#61.9,E#40.3,A#8.3,B#5.2,C#37.7,csource#M1])
(1242205980L,[A#18.3,B#1.2,csource#M1,C#7.7])
(1242205980L,[D#6.1,A#8.9,B#8.3,C#7.2,csource#M2])
(1242205920L,[A#12.5,B#26.82,csource#M3,C#89.51])
(1242205920L,[A#13.91,B#21.02,csource#M4,C#18.05])
B = group A by (ts,fields#'csource');
Dump B;
((1242205800L,M0),{(1242205800L,[A#7,B#3,csource#M0,C#9]),(1242205800L,[D#1,csource#M0])})
((1242205800L,M1),{(1242205800L,[A#17,csource#M1]),(1242205800L,[B#37,C#51,csource#M1])})
((1242205860L,M0),{(1242205860L,[D#12,A#8,csource#M0,C#3]),(1242205860L,[A#8,B#6,csource#M0])})
((1242205860L,M1),{(1242205860L,[D#6,A#13.2,B#23,C#8.5,csource#M1]),(1242205860L,[D#6,A#13.2,B#23,C#8.5,csource#M1])})
((1242205920L,M0),{(1242205920L,[D#6,E#48.5,A#8,B#6,C#8,csource#M0])})
((1242205920L,M1),{(1242205920L,[D#61.9,E#40.3,A#8.3,B#5.2,C#37.7,csource#M1])})
((1242205920L,M3),{(1242205920L,[A#12.5,B#26.82,csource#M3,C#89.51])})
((1242205920L,M4),{(1242205920L,[A#13.91,B#21.02,csource#M4,C#18.05])})
((1242205980L,M1),{(1242205980L,[A#18.3,B#1.2,csource#M1,C#7.7])})
((1242205980L,M2),{(1242205980L,[D#6.1,A#8.9,B#8.3,C#7.2,csource#M2])})
C = FOREACH B GENERATE group.$0,group.$1,org.apache.hadoop.chukwa.RecordMerger(A.fields);
Dump C;
(1242205800L,M0,[D#1,A#7,B#3,csource#M0,C#9])
(1242205800L,M1,[A#17,B#37,C#51,csource#M1])
(1242205860L,M0,[D#12,A#8,B#6,csource#M0,C#3])
(1242205860L,M1,[D#6,A#13.2,B#23,csource#M1,C#8.5])
(1242205920L,M0,[D#6,E#48.5,A#8,B#6,csource#M0,C#8])
(1242205920L,M1,[D#61.9,E#40.3,A#8.3,B#5.2,csource#M1,C#37.7])
(1242205920L,M3,[A#12.5,B#26.82,C#89.51,csource#M3])
(1242205920L,M4,[A#13.91,B#21.02,C#18.05,csource#M4])
(1242205980L,M1,[A#18.3,B#1.2,C#7.7,csource#M1])
(1242205980L,M2,[D#6.1,A#8.9,B#8.3,csource#M2,C#7.2])
*/
public static Configuration conf = null;
public static FileSystem fs = null;
public static void main(String[] args) throws Exception {
conf = new Configuration();
fs = FileSystem.getLocal(conf);
createFile(null);
}
public static void createFile(String path) throws Exception {
Path outputFile = null;
if (path != null) {
outputFile = new Path(path + "/chukwaTestFile.evt");
} else {
outputFile = new Path("chukwaTestFile.evt");
}
outputFile = outputFile.makeQualified(fs);
if (fs.exists(outputFile)) {
System.out.println("File already there, exit -1," + outputFile );
return;
}
System.out.println("outputFile:" + outputFile);
SequenceFile.Writer seqFileWriter = SequenceFile.createWriter(fs, conf,outputFile, ChukwaRecordKey.class,ChukwaRecord.class,CompressionType.NONE);
ChukwaRecordKey key = new ChukwaRecordKey();
key.setReduceType("TestSeqFile");
String chukwaKey = "";
String machine = "";
String TimePartion = "1242205200"; //Wed, 13 May 2009 09:00:00 GMT
{
machine = "M0";
long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "7");
record.add("B", "3");
record.add("C", "9");
seqFileWriter.append(key, record);
}
{
machine = "M0";
long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("D", "1");
seqFileWriter.append(key, record);
}
{
machine = "M1";
long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "17");
seqFileWriter.append(key, record);
}
{
machine = "M1";
long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("B", "37");
record.add("C", "51");
seqFileWriter.append(key, record);
}
{
machine = "M0";
long time = 1242205860; // Wed, 13 May 2009 09:10:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "8");
record.add("C", "3");
record.add("D", "12");
seqFileWriter.append(key, record);
}
{
machine = "M0";
long time = 1242205860; // Wed, 13 May 2009 09:11:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "8");
record.add("B", "6");
seqFileWriter.append(key, record);
}
{
machine = "M1";
long time = 1242205860; // Wed, 13 May 2009 09:11:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "13.2");
record.add("B", "23");
record.add("C", "8.5");
record.add("D", "6");
// create duplicate
seqFileWriter.append(key, record);
seqFileWriter.append(key, record);
}
{
machine = "M0";
long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "8");
record.add("B", "6");
record.add("C", "8");
record.add("D", "6");
record.add("E", "48.5");
seqFileWriter.append(key, record);
}
{
machine = "M1";
long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "8.3");
record.add("B", "5.2");
record.add("C", "37.7");
record.add("D", "61.9");
record.add("E", "40.3");
seqFileWriter.append(key, record);
}
{
machine = "M1";
long time = 1242205980; // Wed, 13 May 2009 09:13:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "18.3");
record.add("B", "1.2");
record.add("C", "7.7");
seqFileWriter.append(key, record);
}
{
machine = "M2";
long time = 1242205980; // Wed, 13 May 2009 09:13:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "8.9");
record.add("B", "8.3");
record.add("C", "7.2");
record.add("D", "6.1");
seqFileWriter.append(key, record);
}
{
machine = "M3";
// late arrival T0
long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "12.5");
record.add("B", "26.82");
record.add("C", "89.51");
seqFileWriter.append(key, record);
}
{
machine = "M4";
// late arrival T0
long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "13.91");
record.add("B", "21.02");
record.add("C", "18.05");
seqFileWriter.append(key, record);
}
seqFileWriter.close();
}
}
| 8,030 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop/chukwa/ChukwaArchive.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import java.io.IOException;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.pig.LoadFunc;
import org.apache.pig.LoadMetadata;
import org.apache.pig.ResourceSchema;
import org.apache.pig.ResourceStatistics;
import org.apache.pig.Expression;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
import org.apache.pig.data.DataType;
import org.apache.pig.data.DefaultTupleFactory;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import static org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
public class ChukwaArchive extends LoadFunc implements LoadMetadata {
SequenceFileRecordReader<ChukwaArchiveKey, ChunkImpl> reader;
private TupleFactory tf = DefaultTupleFactory.getInstance();
static Schema chukwaArchiveSchema;
static ResourceSchema chukwaArchiveResourceSchema;
static int schemaFieldCount;
static {
chukwaArchiveSchema = new Schema();
chukwaArchiveSchema.add(new FieldSchema("seqNo", DataType.LONG));
chukwaArchiveSchema.add(new FieldSchema("type", DataType.CHARARRAY));
chukwaArchiveSchema.add(new FieldSchema("name", DataType.CHARARRAY));
chukwaArchiveSchema.add(new FieldSchema("source", DataType.CHARARRAY));
chukwaArchiveSchema.add(new FieldSchema("tags", DataType.CHARARRAY));
chukwaArchiveSchema.add(new FieldSchema("data", DataType.BYTEARRAY));
schemaFieldCount = chukwaArchiveSchema.size();
chukwaArchiveResourceSchema = new ResourceSchema(chukwaArchiveSchema);
//do we want to expose the record offsets?
}
@Override
public Tuple getNext() throws IOException {
try {
if (!reader.nextKeyValue()) {
return null;
}
} catch (InterruptedException e) {
throw new IOException(e);
}
ChunkImpl val = ChunkImpl.getBlankChunk();
Tuple t = tf.newTuple(schemaFieldCount);
t.set(0, new Long(val.seqID));
t.set(1, val.getDataType());
t.set(2, val.getStreamName());
t.set(3, val.getSource());
t.set(4, val.getTags());
byte[] data = val.getData();
t.set(5, (data == null) ? new DataByteArray() : new DataByteArray(data));
// System.out.println("returning " + t);
return t;
}
@Override
public ResourceSchema getSchema(String s, Job job) throws IOException {
return chukwaArchiveResourceSchema;
}
@Override
public ResourceStatistics getStatistics(String s, Job job) throws IOException {
return null;
}
@Override
public String[] getPartitionKeys(String s, Job job) throws IOException {
return null;
}
@Override
public void setPartitionFilter(Expression expression) throws IOException {
}
@SuppressWarnings("unchecked")
@Override
public InputFormat getInputFormat() throws IOException {
return new SequenceFileInputFormat<ChukwaArchiveKey, ChunkImpl>();
}
@SuppressWarnings("unchecked")
@Override
public void prepareToRead(RecordReader reader, PigSplit split) throws IOException {
this.reader = (SequenceFileRecordReader)reader;
}
@Override
public void setLocation(String location, Job job) throws IOException {
FileInputFormat.setInputPaths(job, location);
}
}
| 8,031 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop/chukwa/TimePartition.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import java.io.IOException;
import org.apache.pig.EvalFunc;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
public class TimePartition extends EvalFunc<Long>{
protected long period = 0;
public TimePartition(String strPeriod) {
period = Long.parseLong(strPeriod);
}
@Override
public Long exec(Tuple input) throws IOException {
if (input == null || input.size() < 1)
return null;
try {
long timestamp = Long.parseLong(input.get(0).toString());
timestamp = timestamp - (timestamp % (period));
return timestamp;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
@Override
public Schema outputSchema(Schema input) {
Schema schema = null;
try {
schema = new Schema(new Schema.FieldSchema(input.getField(0).alias, DataType.LONG));
} catch (FrontendException e) {
schema = new Schema(new Schema.FieldSchema(getSchemaName("timePartition", input),
DataType.LONG));
}
return schema;
}
}
| 8,032 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop/chukwa/PARSEDOUBLE.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import java.io.IOException;
import org.apache.pig.EvalFunc;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
/**
* string.PARSEDOUBLE implements a binding to the Java method
* {@link java.lang.Double#parseDouble(String)}
*
* <dl>
* <dt><b>Parameters:</b></dt>
* <dd><code>strtoconvert</code> - <code>chararray</code>
*
* <dt><b>Return Value:</b></dt>
* <dd><code>double</code> parsed value</dd>
*
* <dt><b>Return Schema:</b></dt>
* <dd>parselong: double</dd>
*
* <dt><b>Example:</b></dt>
* <dd><code>
* register string.jar;<br/>
* A = load 'mydata' using PigStorage() as ( stringnumber: chararray );<br/>
* B = foreach A generate stringnumber, org.apache.hadoop.chukwa.PARSEDOUBLE(stringnumber));
* </code></dd>
* </dl>
*
*/
public class PARSEDOUBLE extends EvalFunc<Double> {
public Double exec(Tuple input) throws IOException {
if (input == null || input.size() < 1)
return null;
try {
String strtoconvert = input.get(0).toString();
Double number = Double.parseDouble(strtoconvert);
return number;
} catch (Exception e) {
return null;
}
}
@Override
public Schema outputSchema(Schema input) {
Schema schema = null;
try {
schema = new Schema(new Schema.FieldSchema(input.getField(0).alias, DataType.DOUBLE));
} catch (FrontendException e) {
schema = new Schema(new Schema.FieldSchema(getSchemaName("parseDouble", input),
DataType.DOUBLE));
}
return schema;
}
}
| 8,033 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop/chukwa/RecordMerger.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.pig.EvalFunc;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
/**
* return time,Map
*
*/
public class RecordMerger extends EvalFunc<Map<String,Object>> {
@SuppressWarnings("unchecked")
@Override
public Map<String,Object> exec(Tuple input) throws IOException {
Map<String, Object> newPigMapFields = new HashMap<String, Object>();
DataBag bg = (DataBag) input.get(0);
Iterator<Tuple> bagIterator = bg.iterator();
Object s = null;
while (bagIterator.hasNext()) {
Map<Object, Object> map = (Map<Object,Object>) bagIterator.next().get(0);
Iterator<Object> mapIterator = map.keySet().iterator();
while (mapIterator.hasNext()) {
s = mapIterator.next();
newPigMapFields.put(s.toString(), map.get(s));
}
}
return newPigMapFields;
}
@Override
public Schema outputSchema(Schema input) {
Schema schema = null;
try {
schema = new Schema(new Schema.FieldSchema(input.getField(0).alias, DataType.MAP));
} catch (FrontendException e) {
e.printStackTrace();
}
return schema;
}
}
| 8,034 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop/chukwa/pig/ChukwaStorer.java | package org.apache.hadoop.chukwa.pig;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Calendar;
import java.util.Map;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.chukwa.extraction.engine.Record;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.pig.StoreFunc;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.data.Tuple;
public class ChukwaStorer extends StoreFunc {
RecordWriter<ChukwaRecordKey, ChukwaRecord> writer;
Calendar calendar = Calendar.getInstance();
int timestampFieldIndex = -1;
int pkFieldIndex = -1;
int sourceFieldIndex = -1;
int clusterNameFieldIndex = -1;
int recordTypeFieldIndex = -1;
int applicationFieldIndex = -1;
String[] fields = null;
public ChukwaStorer() {
}
public ChukwaStorer(String... scfields ) {
this.fields = scfields;
for (int i=0;i< scfields.length;i++) {
if (scfields[i].equalsIgnoreCase("c_timestamp")) {
timestampFieldIndex = i;
} else if (scfields[i].equalsIgnoreCase("c_pk")) {
pkFieldIndex = i;
} else if (scfields[i].equalsIgnoreCase("c_source")) {
sourceFieldIndex = i;
} else if (scfields[i].equalsIgnoreCase("c_recordtype")) {
recordTypeFieldIndex =i;
} else if (scfields[i].equalsIgnoreCase("c_application")) {
applicationFieldIndex =i;
} else if (scfields[i].equalsIgnoreCase("c_cluster")) {
clusterNameFieldIndex =i;
}
}
}
@Override
public void putNext(Tuple f) throws IOException {
long timePartition = 0l;
long timestamp = 0L;
String source = "N/A";
String application = "N/A";
String recordType = "N/A";
String clusterName = "N/A";
String pk = "";
try {
ChukwaRecordKey key = new ChukwaRecordKey();
ChukwaRecord record = new ChukwaRecord();
record.setTime(System.currentTimeMillis());
int inputSize = f.size();
for(int i=0;i<inputSize;i++) {
Object field = f.get(i);
if (field == null) {
continue;
}
if (i == this.pkFieldIndex) {
pk = field.toString();
continue;
} else if ( i == this.sourceFieldIndex) {
source = field.toString();
continue;
}else if ( i== this.recordTypeFieldIndex) {
recordType = field.toString();
continue;
}else if ( i== this.applicationFieldIndex) {
application = field.toString();
continue;
} else if ( i== this.clusterNameFieldIndex) {
clusterName = field.toString();
continue;
}else if (i == this.timestampFieldIndex) {
timestamp = Long.parseLong(field.toString());
record.setTime(timestamp);
synchronized (calendar)
{
calendar.setTimeInMillis(timestamp);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
timePartition = calendar.getTimeInMillis();
}
record.setTime(Long.parseLong(field.toString()));
continue;
} else if (field instanceof Map) {
Map<Object, Object> m = (Map<Object, Object>)field;
for(Object o: m.keySet()) {
record.add(o.toString(),m.get(o).toString());
}
continue;
} else {
if (i <fields.length ) {
record.add(fields[i],field.toString());
} else {
record.add("field-"+i,field.toString());
}
continue;
}
}
record.add(Record.tagsField, " cluster=\"" + clusterName.trim() + "\" ");
record.add(Record.sourceField, source);
record.add(Record.applicationField, application);
key.setKey("" + timePartition + "/" + pk + "/" + timestamp);
key.setReduceType(recordType);
writer.write(key, record);
} catch (ExecException e) {
IOException ioe = new IOException();
ioe.initCause(e);
throw ioe;
} catch (InterruptedException e) {
throw new IOException(e);
}
}
@Override
public OutputFormat getOutputFormat() throws IOException {
return new SequenceFileOutputFormat<ChukwaRecordKey, ChukwaRecord>();
}
@SuppressWarnings("unchecked")
@Override
public void prepareToWrite(RecordWriter writer) throws IOException {
this.writer = writer;
}
@Override
public void setStoreLocation(String location, Job job) throws IOException {
FileOutputFormat.setOutputPath(job, new Path(location));
FileOutputFormat.setCompressOutput(job, true);
FileOutputFormat.setOutputCompressorClass(job, DefaultCodec.class);
job.setOutputKeyClass(ChukwaRecordKey.class);
job.setOutputValueClass(ChukwaRecord.class);
}
} | 8,035 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop/chukwa/pig/ChukwaLoader.java | package org.apache.hadoop.chukwa.pig;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.HashMap;
import java.util.TreeMap;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.record.Buffer;
import org.apache.pig.LoadFunc;
import org.apache.pig.LoadMetadata;
import org.apache.pig.ResourceSchema;
import org.apache.pig.ResourceStatistics;
import org.apache.pig.Expression;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DefaultTupleFactory;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.data.DataType;
public class ChukwaLoader extends LoadFunc implements LoadMetadata {
SequenceFileRecordReader<ChukwaRecordKey, ChukwaRecord> reader;
private TupleFactory tf = DefaultTupleFactory.getInstance();
public ChukwaLoader() {
}
@Override
public Tuple getNext() throws IOException {
ChukwaRecord record = null;
try {
if (!reader.nextKeyValue()) {
return null;
}
} catch (InterruptedException e) {
throw new IOException(e);
}
record = reader.getCurrentValue();
Tuple ret = tf.newTuple(2);
try
{
ret.set(0, new Long(record.getTime()));
HashMap<Object, Object> pigMapFields = new HashMap<Object, Object>();
TreeMap<String, Buffer> mapFields = record.getMapFields();
if (mapFields != null)
{
for (String key : mapFields.keySet())
{
pigMapFields.put(key, new DataByteArray(record.getValue(key).getBytes()));
}
}
ret.set(1, pigMapFields);
} catch (ExecException e)
{
e.printStackTrace();
throw new IOException(e);
}
return ret;
}
@Override
public ResourceSchema getSchema(String s, Job job) throws IOException {
Schema newSchema = new Schema();
newSchema.add(new Schema.FieldSchema("timestamp", DataType.LONG));
newSchema.add(new Schema.FieldSchema("map", DataType.MAP));
return new ResourceSchema(newSchema);
}
@Override
public ResourceStatistics getStatistics(String s, Job job) throws IOException {
return null;
}
@Override
public String[] getPartitionKeys(String s, Job job) throws IOException {
return null;
}
@Override
public void setPartitionFilter(Expression expression) throws IOException {
}
@SuppressWarnings("unchecked")
@Override
public InputFormat getInputFormat() throws IOException {
return new SequenceFileInputFormat<ChukwaRecordKey, ChukwaRecord>();
}
@SuppressWarnings("unchecked")
@Override
public void prepareToRead(RecordReader reader, PigSplit split) throws IOException {
this.reader = (SequenceFileRecordReader)reader;
}
@Override
public void setLocation(String location, Job job) throws IOException {
FileInputFormat.setInputPaths(job, location);
}
} | 8,036 |
0 | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/contrib/chukwa-pig/src/java/org/apache/hadoop/chukwa/tools/PigMover.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.tools;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
/**
*
* Utility class to move pig output closer to the Demux output <BR>
* pigDir should looks like:<BR>
* <UL>
* <LI> workingDay + ".D" </LI>
* <LI> workingDay + "_" + workingHour + ".H" </LI>
* <LI> workingDay + "_" + workingHour + "_" + [0-5] + [0,5] + ".R" </LI>
* </UL>
*
*/
public class PigMover {
private static Logger log = Logger.getLogger(PigMover.class);
public static void usage() {
System.out
.println("PigMover <cluster> <recordType> <pigDir> <finalOutPutDir>");
}
/**
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
if (args.length != 5) {
log.warn("Wrong number of arguments");
usage();
return;
}
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
log.info("fs URI:" + fs.getUri());
String cluster = args[0];
String recordType = args[1];
log.info("Cluster:" + cluster);
log.info("recordType:" + recordType);
Path rootpigDir = new Path(args[2]);
log.info("rootpigDir:" + rootpigDir);
Path dataDir = new Path(args[3]);
log.info("dataDir:" + dataDir);
if (!fs.exists(dataDir)) {
throw new RuntimeException("input directory does not exist.");
}
String fileName = dataDir.getName();
log.info("fileName:" + fileName);
String rootPigPostProcessDir = args[4];
log.info("chukwaPostProcessDir: [" + rootPigPostProcessDir + "]");
String finalPigOutputDir = rootPigPostProcessDir + "/pigOutputDir_" + System.currentTimeMillis()
+ "/" + cluster + "/" + recordType;
log.info("finalPigOutputDir:" + finalPigOutputDir);
Path postProcessDir = new Path(finalPigOutputDir);
fs.mkdirs(postProcessDir);
boolean movingDone = true;
FileStatus[] files = fs.listStatus(dataDir);
for (int i=0;i<files.length;i++) {
log.info("fileIn:" + files[i].getPath());
Path p = new Path(finalPigOutputDir + "/"+ recordType + "_" + i + "_" + fileName + ".evt");
log.info("fileOut:" + p);
if ( fs.rename(files[i].getPath(), p) == false) {
log.warn("Cannot rename " + files[i].getPath() + " to " + p);
movingDone = false;
}
}
if (movingDone) {
log.info("Deleting:" + rootpigDir);
fs.delete(rootpigDir,true);
}
}
}
| 8,037 |
0 | Create_ds/chukwa/core/contrib/xtrace/test/src/java/edu/berkeley | Create_ds/chukwa/core/contrib/xtrace/test/src/java/edu/berkeley/chukwa_xtrace/TestXtrAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.berkeley.chukwa_xtrace;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.conf.Configuration;
import junit.framework.TestCase;
import edu.berkeley.xtrace.XTraceContext;
import edu.berkeley.xtrace.reporting.*;
public class TestXtrAdaptor extends TestCase {
public void testXtrAdaptor() throws IOException,
ChukwaAgent.AlreadyRunningException, InterruptedException{
Configuration conf = new Configuration();
File baseDir = new File(System.getProperty("test.build.data", "/tmp"));
conf.set("chukwaAgent.checkpoint.dir", baseDir.getCanonicalPath());
conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
conf.set("chukwaAgent.control.port", "0");
ChukwaAgent agent = new ChukwaAgent(conf);
ChunkCatcherConnector chunks = new ChunkCatcherConnector();
chunks.start();
System.setProperty("xtrace.reporter", "edu.berkeley.xtrace.reporting.TcpReporter");
System.setProperty("xtrace.tcpdest", "localhost:7831");
assertEquals(0, agent.adaptorCount());
agent.processAddCommand("add edu.berkeley.chukwa_xtrace.XtrAdaptor XTrace TcpReportSource 0");
assertEquals(1, agent.adaptorCount());
XTraceContext.startTrace("test", "testtrace", "atag");
XTraceContext.logEvent("test", "label");
Chunk c = chunks.waitForAChunk();
String report = new String(c.getData());
assertTrue(report.contains("Agent: test"));
assertTrue(report.contains("Tag: atag"));
System.out.println(report);
System.out.println("-- next chunk --- ");
c = chunks.waitForAChunk();
report = new String(c.getData());
assertTrue(report.contains("Agent: test"));
assertTrue(report.contains("Label: label"));
System.out.println(report);
System.out.println("OK");
agent.shutdown();
}
}
| 8,038 |
0 | Create_ds/chukwa/core/contrib/xtrace/src/java/edu/berkeley | Create_ds/chukwa/core/contrib/xtrace/src/java/edu/berkeley/chukwa_xtrace/XtrExtract.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.berkeley.chukwa_xtrace;
import org.apache.hadoop.chukwa.ChunkImpl;
import org.apache.hadoop.chukwa.ChukwaArchiveKey;
import org.apache.hadoop.chukwa.extraction.demux.processor.mapper.AbstractProcessor;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.chukwa.extraction.engine.Record;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.Text;
import org.apache.log4j.Logger;
import edu.berkeley.xtrace.reporting.Report;
import edu.berkeley.xtrace.*;
/**
* MapReduce job to process xtrace reports coming out of chukwa demux.
*
* Map phase unwraps the chukwa records, reduce phase does trace reconstruction.
*
* We use task ID as the reduce sort key.
*
*/
public class XtrExtract extends Configured implements Tool {
/**
* Hadoop docs say to do this if you pass an ArrayWritable to reduce.
*/
public static class TextArrayWritable extends ArrayWritable {
public TextArrayWritable() { super(Text.class); }
}
public static final String OUTLINK_FIELD = "__xtr_outlinks";
static Logger log = Logger.getLogger(XtrExtract.class);
/**
* with more than 50,000 reports in a single trace, switch to on-disk sort,
* instead of in-memory topological sort.
*/
static final int MAX_IN_MEMORY_REPORTS = 50* 1000;
public static class MapClass extends Mapper <Object, Object, BytesWritable, Text> {
public MapClass() {
System.out.println("starting xtrace map");
}
@Override
protected void map(Object k, Object v,
Mapper<Object, Object,BytesWritable, Text>.Context context)
throws IOException, InterruptedException
{
Counter unparseableReport = context.getCounter("app", "unparseable chunks");
Text t;
BytesWritable bw;
if(k instanceof ChukwaArchiveKey && v instanceof ChunkImpl) {
ChunkImpl value = (ChunkImpl) v;
Report xtrReport = Report.createFromString(new String(value.getData()));
try { //we do this to handle the case where not all input is x-trace
bw = new BytesWritable(xtrReport.getMetadata().getTaskId().get());
} catch(Exception e) {
unparseableReport.increment(1);
return;
}
//FIXME: can probably optimize the above lines by doing a search in the raw bytes
t= new Text(value.getData());
} else if(k instanceof ChukwaRecordKey && v instanceof ChukwaRecord){
ChukwaRecord value = (ChukwaRecord) v;
Report xtrReport = Report.createFromString(value.getValue(Record.bodyField));
bw = new BytesWritable(xtrReport.getMetadata().getTaskId().get());
//FIXME: can probably optimize the above lines by doing a search in the raw bytes
t= new Text(value.getValue(Record.bodyField));
} else {
log.error("unexpected key/value types: "+ k.getClass().getCanonicalName()
+ " and " + v.getClass().getCanonicalName() );
return;
}
context.write(bw, t);
}
}
public static class Reduce extends Reducer<BytesWritable, Text,BytesWritable,ArrayWritable> {
public Reduce() {}
/**
*
* Note that loading everything into hashtables means
* we implicity suppress duplicate-but-identical reports.
*
*/
protected void reduce(BytesWritable taskID, Iterable<Text> values,
Reducer<BytesWritable, Text,BytesWritable,ArrayWritable>.Context context)
throws IOException, InterruptedException
{
String taskIDString = IoUtil.bytesToString(taskID.getBytes());
//in both cases, key is OpId string
HashMap<String, Report> reports = new LinkedHashMap<String, Report>();
Counter reportCounter = context.getCounter("app", "distinct reports");
Counter edgeCounter = context.getCounter("app", "edges");
Counter badEdgeCounter = context.getCounter("app", "reference to missing report");
Counter dupCounter = context.getCounter("app", "duplicate report");
int edgeCount = 0, dups = 0, numReports = 0;
for(Text rep_text: values) {
Report r = Report.createFromString(rep_text.toString());
numReports++;
if(numReports < MAX_IN_MEMORY_REPORTS) {
if(reports.containsKey(r.getMetadata().getOpIdString()))
dups++;
reports.put(r.getMetadata().getOpIdString(), r);
} else if(numReports == MAX_IN_MEMORY_REPORTS) {
//bail out, prepare to do an external sort.
return;
} else
;
// do the external sort
}
reportCounter.increment(reports.size());
dupCounter.increment(dups);
CausalGraph g = new CausalGraph(reports);
PtrReverse reverser = new PtrReverse();
List<Report> sortedReports = g.topoSort(reverser);
int sortedLen = sortedReports.size();
if(sortedLen!= reports.size()) {
if(sortedLen > 0)
log.warn(taskIDString+": I only sorted " + sortedLen + " items, but expected "
+ reports.size()+", is your list cyclic?");
else
log.warn(taskIDString+": every event in graph has a predecessor; perhaps "
+ "the start event isn't in the input set?");
}
log.debug(taskIDString+": " + reverser.edgeCount + " total edges");
edgeCounter.increment(reverser.edgeCount);
badEdgeCounter.increment(reverser.badCount);
Text[] finalOutput = new Text[sortedReports.size()];
int i=0;
for(Report r:sortedReports)
finalOutput[i++] = new Text(r.toString());
TextArrayWritable out = new TextArrayWritable();
out.set(finalOutput);
context.write(taskID, out);
//Should sort values topologically and output list. or?
} //end reduce
}//end reduce class
public static class PtrReverse {
int badCount = 0;
int edgeCount = 0;
public int setupForwardPointers(Map<String, Report> reports, Report r,
String myOpID) {
int parentCount =0;
for(String inLink: r.get("Edge")) {
//sanitize data from old, nonconformant C++ implementation
if(inLink.contains(","))
inLink = inLink.substring(0, inLink.indexOf(','));
Report parent = reports.get(inLink);
if(parent != null) {
parent.put(OUTLINK_FIELD, myOpID);
parentCount++;
} else { //no match
if(!inLink.equals("0000000000000000")) {
log.info("no sign of parent: " + inLink);
badCount++;
}
//else quietly suppress
}
}
edgeCount += badCount + parentCount;
return parentCount;
}
}
@Override
public int run(String[] arg) throws Exception {
Job extractor = new Job(getConf());
extractor.setMapperClass(MapClass.class);
extractor.setReducerClass(Reduce.class);
extractor.setJobName("x-trace reconstructor");
extractor.setJarByClass(this.getClass());
extractor.setMapOutputKeyClass(BytesWritable.class);
extractor.setMapOutputValueClass(Text.class);
extractor.setOutputKeyClass(BytesWritable.class);
extractor.setOutputValueClass(TextArrayWritable.class);
extractor.setInputFormatClass(SequenceFileInputFormat.class);
extractor.setOutputFormatClass(SequenceFileOutputFormat.class);
FileInputFormat.setInputPaths(extractor, new Path(arg[0]));
FileOutputFormat.setOutputPath(extractor, new Path(arg[1]));
System.out.println("looks OK. Submitting.");
extractor.submit();
// extractor.waitForCompletion(false);
return 0;
}
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(),
new XtrExtract(), args);
return;
}
}
| 8,039 |
0 | Create_ds/chukwa/core/contrib/xtrace/src/java/edu/berkeley | Create_ds/chukwa/core/contrib/xtrace/src/java/edu/berkeley/chukwa_xtrace/XtrAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.berkeley.chukwa_xtrace;
import org.apache.hadoop.chukwa.*;
import org.apache.hadoop.chukwa.datacollection.adaptor.AbstractAdaptor;
import org.apache.hadoop.chukwa.datacollection.adaptor.Adaptor;
import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorException;
import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorShutdownPolicy;
import org.apache.log4j.Logger;
import edu.berkeley.xtrace.server.*;
import edu.berkeley.xtrace.XTraceException;
import java.util.concurrent.*;
/**
* Adaptor that wraps an xtrace report source, so that xtrace messages
* can get picked up by the chukwa agent.
* Takes one mandatory param, the class name of the report source,
* implicitly inside package edu.berkeley.xtrace.server
*
*/
public class XtrAdaptor extends AbstractAdaptor implements Runnable {
ReportSource rs;
String rsName;
Thread pump = new Thread(this);
Thread reportSourceThread;
BlockingQueue<String> q = new ArrayBlockingQueue<String>(1000);
volatile boolean stopping = false;
long offset = 0;
static Logger log = Logger.getLogger(XtrAdaptor.class);
static final String XTR_RS_PACKAGE = "edu.berkeley.xtrace.server.";
/**
* Get an xtrace report source, of name classname
* @param classname
* @return a report source. Defaults to UdpReportSource on error.
*/
static ReportSource getXtrReportSource(String name) {
try {
Object obj = Class.forName(XTR_RS_PACKAGE + name).newInstance();
if (ReportSource.class.isInstance(obj)) {
return (ReportSource) obj;
} else
return new UdpReportSource();
} catch(Exception e) {
log.warn(e);
return new UdpReportSource();
}
}
/*
* This is effectively the main thread associated with the adaptor;
* however, each ReportSource separately might have a thread.
*/
public void run() {
try {
log.info("starting Pump Thread");
while(!stopping) {
String report = q.take();
log.info("got a report");
byte[] data = report.getBytes();
offset += data.length;
ChunkImpl i = new ChunkImpl(type, "xtrace", offset, data, this);
dest.add(i);
}
} catch(InterruptedException e) {
}
log.info("XtrAdaptor stopping");
}
@Override
public void start( long offset) throws AdaptorException {
this.offset = offset;
try{
rs.initialize();
rs.setReportQueue(q);
reportSourceThread = new Thread(rs);
reportSourceThread.start();
pump.start();
log.info("starting Report Source");
} catch(XTraceException e) {
throw new AdaptorException(e);
}
}
@Override
public String getCurrentStatus() {
return type +" "+ rsName;
}
@Override
public String parseArgs(String params) {
rs = getXtrReportSource(params);
rsName = params;
return params; //no optional params
}
@Override
public void hardStop() throws AdaptorException {
shutdown(AdaptorShutdownPolicy.HARD_STOP);
}
@Override
public long shutdown() throws AdaptorException {
return shutdown(AdaptorShutdownPolicy.GRACEFULLY);
}
@Override
public long shutdown(AdaptorShutdownPolicy shutdownPolicy)
throws AdaptorException {
switch(shutdownPolicy) {
case HARD_STOP:
case GRACEFULLY:
case WAIT_TILL_FINISHED:
rs.shutdown();
stopping = true;
}
return offset;
}
}
| 8,040 |
0 | Create_ds/chukwa/core/contrib/xtrace/src/java/edu/berkeley | Create_ds/chukwa/core/contrib/xtrace/src/java/edu/berkeley/chukwa_xtrace/CausalGraph.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.berkeley.chukwa_xtrace;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.File;
import java.util.*;
import org.apache.hadoop.io.Text;
import edu.berkeley.chukwa_xtrace.XtrExtract.PtrReverse;
import edu.berkeley.xtrace.reporting.Report;
/**
* Encapsulates a causal graph; nodes are xtrace reports.
*
*/
public class CausalGraph implements Iterable<Report> {
/**
* Returns the distance from report src to dest.
* Should be positive if dest happened after src
* @author asrabkin
*
*/
public static class RDistMetric {
long dist(Report src, Report dest) {
return getTS(src);
}
}
public static class IgnoreReportsMetric extends RDistMetric {
List<Report> pathToIgnore;
/**
* Path should be reverse-ordered, same as longestPath returns
* @param pathToIgnore
*/
public IgnoreReportsMetric(List<Report> pathToIgnore) {
this.pathToIgnore = pathToIgnore;
}
@Override
long dist(Report src, Report dest) {
for(int i =0; i < pathToIgnore.size()-1; ++i) {
if((pathToIgnore.get(i+1) == src) && (pathToIgnore.get(i) == dest))
return 0;
}
return getTS(src);
}
}
private Map<String, Report> reports;
private Report start;
private Report end;
public CausalGraph(Map<String, Report> reports) {
this.reports = reports;
}
public CausalGraph() {
reports = new LinkedHashMap<String, Report>();
}
public Report getStart() {
return start;
}
public void setStart(Report start) {
this.start = start;
}
public Report getEnd() {
return end;
}
public void setEnd(Report end) {
this.end = end;
}
public void add(Report r) {
String opID = r.getMetadata().getOpIdString();
reports.put(opID, r);
}
/////// Graph-analytic functions
public Set<Report> predecessors(Report p) {
HashSet<Report> predecessors = new HashSet<Report>();
Queue<Report> bfsQ = new LinkedList<Report>();
bfsQ.add(p);
while(!bfsQ.isEmpty()) {
Report r = bfsQ.remove();
assert r!= null;
predecessors.add(r);
List<String> backEdges = r.get("Edge");
if(backEdges != null)
for(String pred:backEdges) {
Report pre = reports.get(pred);
if(pre != null)
bfsQ.add(pre);
}
}
return predecessors;
}
public List<Report> topoSort(PtrReverse reverser) {
HashMap<String, Integer> counts = new HashMap<String, Integer>();
Queue<Report> zeroInlinkReports = new LinkedList<Report>();
//FIXME: could usefully compare reports.size() with numReports;
//that would measure duplicate reports
//increment link counts for children
for(Report r: reports.values()){
String myOpID = r.getMetadata().getOpIdString();
int parentCount = reverser.setupForwardPointers(reports, r, myOpID);
//if there weren't any parents, we can dequeue
if(parentCount == 0)
zeroInlinkReports.add(r);
else
counts.put(myOpID, parentCount);
}
//at this point, we have a map from metadata to report, and also
//from report op ID to inlink count.
//next step is to do a topological sort.
ArrayList<Report> finalOutput = new ArrayList<Report>();
while(!zeroInlinkReports.isEmpty()) {
Report r = zeroInlinkReports.remove();
List<String> outLinks = r.get(XtrExtract.OUTLINK_FIELD);
if(outLinks != null) {
for(String outLink: outLinks) {
Integer oldCount = counts.get(outLink);
if(oldCount == null) {
oldCount = 0; //FIXME: can this happen?
//Means we have a forward-edge to a node which we haven't ever set up a link count for
// log.warn(taskIDString+": found an in-edge where none was expected");
} if(oldCount == 1) {
zeroInlinkReports.add(reports.get(outLink));
}
counts.put(outLink, oldCount -1);
}
}
}
return finalOutput;
}
/////// Performance-analytic functions
private static final long getTS(Report r) {
List<String> staTL = r.get("Timestamp");
if(staTL != null && staTL.size() > 0) {
double t = Double.parseDouble(staTL.get(0));
return Math.round(1000 * t);
}
return Long.MIN_VALUE;
}
/**
* Returns the longest path ending at endID
*
* Path is in reversed order, starting with endID and going forwards.
*
* @param endID
* @return
*/
public List<Report> longestPath(String endID) {
return longestPath(new RDistMetric(), endID);
}
public List<Report> longestPath(RDistMetric metric, String endID) {
//if we have the reports in topological order, this should be easy.
//Just take max of all predecessors seen until that point.
//alternatively, could start at the end and walk backwards
ArrayList<Report> backpath = new ArrayList<Report>();
Report cur = reports.get(endID);
do {
backpath.add(cur);
Report limitingPred = null;
long latestPrereq = Long.MIN_VALUE;
for(String predID: cur.get("Edge")) {
Report pred = reports.get(predID);
long finishTime = metric.dist(pred, cur);
if( finishTime > latestPrereq) {
latestPrereq = finishTime;
limitingPred = pred;
}
cur = limitingPred;
}
} while(cur != null && cur.get("Edge") != null);
//should be able to just walk forward, keeping trac
return backpath;
}
/**
* Expect path to be sorted backwards.
* @param path
* @return
*/
public static long onHostTimes(List<Report> path) {
long time =0;
for(int i =0; i < path.size()-1; ++i) {
Report src = path.get(i+1);
Report dest = path.get(i);
List<String> srcHost = src.get("Host"), destHost = dest.get("Host");
if(srcHost != null && srcHost.size() > 0 && destHost != null && destHost.size() > 0) {
if(srcHost.get(0).equals(destHost.get(0))){
long src_ts = getTS(src);
long dest_ts = getTS(dest);
time += (dest_ts - src_ts);
System.out.println("adding segment of length " + (dest_ts - src_ts));
}
}
}
return time;
}
//// Glue to make CausalGraph look like a pseudocollection
public Iterator<Report> iterator() {
return reports.values().iterator();
}
public int size() {
return reports.size();
}
public Collection<Report> getReports() {
return reports.values();
}
//////IO utils
public void slurpTextFile(File f) throws IOException {
BufferedReader br = new BufferedReader(new FileReader(f));
Report rep;
while((rep = getNextReportFromReader(br)) != null ) {
add(rep);
}
}
private Report getNextReportFromReader(BufferedReader br) throws IOException {
StringBuilder sb = new StringBuilder();
String s;
while((s = br.readLine()) != null ) {
if(s.length() > 1) {
sb.append(s);
sb.append("\n");
} else //stop on blank line, if it isn't the first one we see
if(sb.length() > 1)
break;
}
if(sb.length() < 1)
return null;
return Report.createFromString(sb.toString());
}
}
| 8,041 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/TestChunkBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import junit.framework.TestCase;
public class TestChunkBuilder extends TestCase {
public void testChunkBuilder() {
ChunkBuilder cb = new ChunkBuilder();
cb.addRecord("foo".getBytes());
cb.addRecord("bar".getBytes());
cb.addRecord("baz".getBytes());
Chunk chunk = cb.getChunk();
assertEquals(3, chunk.getRecordOffsets().length);
assertEquals(9, chunk.getSeqID());
assertEquals(2, chunk.getRecordOffsets()[0]);
assertEquals(5, chunk.getRecordOffsets()[1]);
assertEquals(8, chunk.getRecordOffsets()[2]);
}
}
| 8,042 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/ChunkImplTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa;
import java.io.IOException;
import junit.framework.TestCase;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
public class ChunkImplTest extends TestCase {
public void testVersion() {
ChunkBuilder cb = new ChunkBuilder();
cb.addRecord("foo".getBytes());
cb.addRecord("bar".getBytes());
cb.addRecord("baz".getBytes());
Chunk c = cb.getChunk();
DataOutputBuffer ob = new DataOutputBuffer(c.getSerializedSizeEstimate());
try {
c.write(ob);
DataInputBuffer ib = new DataInputBuffer();
ib.reset(ob.getData(), c.getSerializedSizeEstimate());
int version = ib.readInt();
ib.close();
assertEquals(version, ChunkImpl.PROTOCOL_VERSION);
} catch (IOException e) {
e.printStackTrace();
fail("Should nor raise any exception");
}
}
public void testTag() {
ChunkBuilder cb = new ChunkBuilder();
cb.addRecord("foo".getBytes());
cb.addRecord("bar".getBytes());
cb.addRecord("baz".getBytes());
Chunk c = cb.getChunk();
assertNull(c.getTag("foo"));
c.addTag("foo=\"bar\"");
assertEquals("bar", c.getTag("foo"));
}
}
| 8,043 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/database/DatabaseSetup.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.database;
import java.io.*;
import java.sql.*;
import java.util.*;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.hadoop.chukwa.util.DatabaseWriter;
import org.apache.hadoop.chukwa.database.TableCreator;
public class DatabaseSetup {
public long[] timeWindow = {7, 30, 91, 365, 3650};
public String[] tables = {"system_metrics","disk","cluster_system_metrics","cluster_disk","mr_job","mr_task","dfs_namenode","dfs_datanode","dfs_fsnamesystem","dfs_throughput","hadoop_jvm","hadoop_mapred","hdfs_usage"};
public String cluster = "demo";
public long current = Calendar.getInstance().getTimeInMillis();
public void setUpDatabase() throws Exception {
System.setProperty("CLUSTER","demo");
DatabaseWriter db = new DatabaseWriter(cluster);
String buffer = "";
File aFile = new File(System.getenv("CHUKWA_CONF_DIR")
+ File.separator + "database_create_tables.sql");
buffer = readFile(aFile);
String tables[] = buffer.split(";");
for(String table : tables) {
if(table.length()>5) {
db.execute(table);
}
}
db.close();
for(int i=0;i<timeWindow.length;i++) {
TableCreator tc = new TableCreator();
long start = current;
long end = current + (timeWindow[i]*1440*60*1000);
tc.createTables(start, end);
}
}
public void tearDownDatabase() {
DatabaseWriter db = null;
try {
db = new DatabaseWriter(cluster);
ResultSet rs = db.query("show tables");
ArrayList<String> list = new ArrayList<String>();
while(rs.next()) {
String table = rs.getString(1);
list.add(table);
}
for(String table : list) {
db.execute("drop table "+table);
}
} catch(Throwable ex) {
} finally {
if(db!=null) {
db.close();
}
}
}
public String readFile(File aFile) {
StringBuffer contents = new StringBuffer();
try {
BufferedReader input = new BufferedReader(new FileReader(aFile));
try {
String line = null; // not declared within while loop
while ((line = input.readLine()) != null) {
contents.append(line);
contents.append(System.getProperty("line.separator"));
}
} finally {
input.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
return contents.toString();
}
}
| 8,044 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.database;
import junit.framework.TestCase;
import java.util.Calendar;
import org.apache.hadoop.chukwa.database.Macro;
import org.apache.hadoop.chukwa.util.DatabaseWriter;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.hadoop.chukwa.database.Aggregator;
import org.apache.hadoop.chukwa.database.TableCreator;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
public class TestDatabaseDataExpiration extends TestCase {
long[] timeWindow = {7, 30, 91, 365, 3650};
String[] tables = {"system_metrics","disk","cluster_system_metrics","cluster_disk","mr_job","mr_task","dfs_namenode","dfs_datanode","dfs_fsnamesystem","dfs_throughput","hadoop_jvm","hadoop_mapred","hdfs_usage"};
String cluster = "demo";
long current = Calendar.getInstance().getTimeInMillis();
public void setUp() throws Exception {
System.setProperty("CLUSTER","demo");
DatabaseWriter db = new DatabaseWriter(cluster);
String buffer = "";
File aFile = new File(System.getenv("CHUKWA_CONF_DIR")
+ File.separator + "database_create_tables.sql");
buffer = readFile(aFile);
String tables[] = buffer.split(";");
for(String table : tables) {
if(table.length()>5) {
db.execute(table);
}
}
db.close();
for(int i=0;i<timeWindow.length;i++) {
TableCreator tc = new TableCreator();
long start = current;
long end = current + (timeWindow[i]*1440*60*1000);
tc.createTables(start, end);
}
}
public void tearDown() {
DatabaseWriter db = null;
try {
db = new DatabaseWriter(cluster);
ResultSet rs = db.query("show tables");
ArrayList<String> list = new ArrayList<String>();
while(rs.next()) {
String table = rs.getString(1);
list.add(table);
}
for(String table : list) {
db.execute("drop table "+table);
}
} catch(Throwable ex) {
} finally {
if(db!=null) {
db.close();
}
}
}
public void verifyTable(String table) {
ChukwaConfiguration cc = new ChukwaConfiguration();
String query = "select * from ["+table+"];";
Macro mp = new Macro(current,query);
query = mp.toString();
try {
DatabaseWriter db = new DatabaseWriter(cluster);
ResultSet rs = db.query(query);
while(rs.next()) {
int i = 1;
String value = rs.getString(i);
}
db.close();
} catch(SQLException ex) {
fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
}
}
public String readFile(File aFile) {
StringBuffer contents = new StringBuffer();
try {
BufferedReader input = new BufferedReader(new FileReader(aFile));
try {
String line = null; // not declared within while loop
while ((line = input.readLine()) != null) {
contents.append(line);
contents.append(System.getProperty("line.separator"));
}
} finally {
input.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
return contents.toString();
}
public void testDataExpiration() {
for(int i=0;i<timeWindow.length;i++) {
long start = current + (365*1440*60*1000);
long end = start + (timeWindow[i]*1440*60*1000);
try {
DataExpiration de = new DataExpiration();
de.dropTables(start, end);
} catch(Throwable ex) {
fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
}
assertTrue("DataExpiration executed.", true);
DatabaseWriter db = null;
try {
db = new DatabaseWriter(cluster);
String query = "select * from [system_metrics];";
Macro mp = new Macro(current,query);
query = mp.toString();
ResultSet rs = db.query(query);
} catch(SQLException ex) {
assertTrue("Table is not suppose to exist.",true);
db.close();
}
}
}
}
| 8,045 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.database;
import junit.framework.TestCase;
import java.util.Calendar;
import org.apache.hadoop.chukwa.database.Macro;
import org.apache.hadoop.chukwa.util.DatabaseWriter;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.hadoop.chukwa.database.Aggregator;
import org.apache.hadoop.chukwa.database.TableCreator;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
public class TestDatabaseTableCreator extends TestCase {
long[] timeWindow = {7, 30, 91, 365, 3650};
String[] tables = {"system_metrics","disk","cluster_system_metrics","cluster_disk","mr_job","mr_task","dfs_namenode","dfs_datanode","dfs_fsnamesystem","dfs_throughput","hadoop_jvm","hadoop_mapred","hdfs_usage"};
String cluster = "demo";
long current = Calendar.getInstance().getTimeInMillis();
public void setUp() {
System.setProperty("CLUSTER","demo");
DatabaseWriter db = new DatabaseWriter(cluster);
String buffer = "";
File aFile = new File(System.getenv("CHUKWA_CONF_DIR")
+ File.separator + "database_create_tables.sql");
buffer = readFile(aFile);
String tables[] = buffer.split(";");
for(String table : tables) {
if(table.length()>5) {
try {
db.execute(table);
} catch (Exception e) {
fail("Fail to retrieve meta data for database table:"+table);
}
}
}
db.close();
for(int i=0;i<timeWindow.length;i++) {
TableCreator tc = new TableCreator();
long start = current;
long end = current + (timeWindow[i]*1440*60*1000);
try {
tc.createTables(start, end);
} catch (Exception e) {
e.printStackTrace();
fail("Fail to create database tables.");
}
}
}
public void tearDown() {
DatabaseWriter db = null;
try {
db = new DatabaseWriter(cluster);
ResultSet rs = db.query("show tables");
ArrayList<String> list = new ArrayList<String>();
while(rs.next()) {
String table = rs.getString(1);
list.add(table);
}
for(String table : list) {
db.execute("drop table "+table);
}
} catch(Throwable ex) {
} finally {
if(db!=null) {
db.close();
}
}
}
public void verifyTable(String table) {
ChukwaConfiguration cc = new ChukwaConfiguration();
String query = "select * from ["+table+"];";
Macro mp = new Macro(current,query);
query = mp.toString();
try {
DatabaseWriter db = new DatabaseWriter(cluster);
ResultSet rs = db.query(query);
while(rs.next()) {
int i = 1;
String value = rs.getString(i);
}
db.close();
} catch(SQLException ex) {
fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
}
}
public String readFile(File aFile) {
StringBuffer contents = new StringBuffer();
try {
BufferedReader input = new BufferedReader(new FileReader(aFile));
try {
String line = null; // not declared within while loop
while ((line = input.readLine()) != null) {
contents.append(line);
contents.append(System.getProperty("line.separator"));
}
} finally {
input.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
return contents.toString();
}
public void testTableCreator() {
for(int i=0;i<timeWindow.length;i++) {
try {
DatabaseWriter db = new DatabaseWriter(cluster);
for(String table : tables) {
String query = "select * from ["+table+"];";
Macro mp = new Macro(current,query);
query = mp.toString();
ResultSet rs = db.query(query);
rs.last();
int count = rs.getRow();
assertTrue("Table should exist and return empty result.", count==0);
}
db.close();
} catch(SQLException ex) {
fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
}
}
}
public void testTables() {
for(String table : tables) {
verifyTable(table);
assertTrue("Table verified: " + table, true);
}
}
}
| 8,046 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.database;
import junit.framework.TestCase;
import java.util.Calendar;
import org.apache.hadoop.chukwa.database.Macro;
import org.apache.hadoop.chukwa.util.DatabaseWriter;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.hadoop.chukwa.database.TableCreator;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
public class TestDatabasePrepareStatement extends TestCase {
long[] timeWindow = {7, 30, 91, 365, 3650};
String cluster = "demo";
long current = Calendar.getInstance().getTimeInMillis();
public void setUp() {
System.setProperty("CLUSTER","demo");
DatabaseWriter db = new DatabaseWriter(cluster);
String buffer = "";
File aFile = new File(System.getenv("CHUKWA_CONF_DIR")
+ File.separator + "database_create_tables.sql");
buffer = readFile(aFile);
String tables[] = buffer.split(";");
for(String table : tables) {
if(table.length()>5) {
try {
db.execute(table);
} catch (Exception e) {
fail("Fail to retrieve meta data from table:"+table);
}
}
}
db.close();
for(int i=0;i<timeWindow.length;i++) {
TableCreator tc = new TableCreator();
long start = current;
long end = current + (timeWindow[i]*1440*60*1000);
try {
tc.createTables(start, end);
} catch (Exception e) {
fail("Fail to create database tables.");
}
}
}
public void tearDown() {
DatabaseWriter db = null;
try {
db = new DatabaseWriter(cluster);
ResultSet rs = db.query("show tables");
ArrayList<String> list = new ArrayList<String>();
while(rs.next()) {
String table = rs.getString(1);
list.add(table);
}
for(String table : list) {
db.execute("drop table "+table);
}
} catch(Throwable ex) {
} finally {
if(db!=null) {
db.close();
}
}
}
public String readFile(File aFile) {
StringBuffer contents = new StringBuffer();
try {
BufferedReader input = new BufferedReader(new FileReader(aFile));
try {
String line = null; // not declared within while loop
while ((line = input.readLine()) != null) {
contents.append(line);
contents.append(System.getProperty("line.separator"));
}
} finally {
input.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
return contents.toString();
}
public void testPrepareStatement() {
DatabaseWriter db = new DatabaseWriter(cluster);
Date today = new Date();
long current = today.getTime();
Timestamp timestamp = new Timestamp(current);
String hostname="chukwa.example.org";
String query = "insert into [system_metrics] set timestamp='"+timestamp.toString()+"', host='"+hostname+"', cpu_user_pcnt=100;";
Macro mp = new Macro(current, current, query);
query = mp.toString();
try {
db.execute(query);
query = "select timestamp,host,cpu_user_pcnt from [system_metrics] where timestamp=? and host=? and cpu_user_pcnt=?;";
mp = new Macro(current, current, query);
query = mp.toString();
ArrayList<Object> parms = new ArrayList<Object>();
parms.add(current);
parms.add(hostname);
parms.add(100);
ResultSet rs = db.query(query, parms);
while(rs.next()) {
assertTrue(hostname.intern()==rs.getString(2).intern());
assertTrue(100==rs.getInt(3));
}
db.close();
} catch(SQLException ex) {
fail("Fail to run SQL statement:"+ExceptionUtil.getStackTrace(ex));
}
}
}
| 8,047 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/database/TestMacro.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.database;
import junit.framework.TestCase;
import java.util.TreeMap;
import java.util.ArrayList;
import java.util.Date;
import java.text.ParseException;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
public class TestMacro extends TestCase {
public void testPastXIntervals() {
Macro m = new Macro(1234567890000L, "select '[past_5_minutes]';");
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
long time = 0;
Date result = null;
result = format.parse(m.toString(), new ParsePosition(8));
time = result.getTime()+300000L;
assertTrue(time<=1234567890000L);
m = new Macro(1234567890000L, "select '[past_hour]';");
result = format.parse(m.toString(), new ParsePosition(8));
time = result.getTime()+3600000L;
assertTrue(time<=1234567890000L);
m = new Macro(1234567890000L, "select '[start]';");
result = format.parse(m.toString(), new ParsePosition(8));
time = result.getTime();
assertTrue(time==1234567890000L);
}
public void testPartitions() {
Macro m = new Macro(1234567890000L, "select from [system_metrics_week];");
System.out.println(m.toString());
assertTrue(m.toString().intern()=="select from system_metrics_2041_week;".intern());
m = new Macro(1234567890000L, "select from [system_metrics_month];");
System.out.println(m.toString());
assertTrue(m.toString().intern()=="select from system_metrics_476_month;".intern());
m = new Macro(1234567890000L, "select from [system_metrics_quarter];");
System.out.println(m.toString());
assertTrue(m.toString().intern()=="select from system_metrics_156_quarter;".intern());
m = new Macro(1234567890000L, "select from [system_metrics_year];");
System.out.println(m.toString());
assertTrue(m.toString().intern()=="select from system_metrics_39_year;".intern());
m = new Macro(1234567890000L, "select from [system_metrics_decade];");
System.out.println(m.toString());
assertTrue(m.toString().intern()=="select from system_metrics_3_decade;".intern());
}
}
| 8,048 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.database;
import junit.framework.TestCase;
import java.util.Calendar;
import org.apache.hadoop.chukwa.database.Macro;
import org.apache.hadoop.chukwa.util.DatabaseWriter;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.hadoop.chukwa.database.Aggregator;
import org.apache.hadoop.chukwa.database.TableCreator;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
public class TestDatabaseAggregator extends TestCase {
public DatabaseSetup dbSetup = new DatabaseSetup();
public void setUp() {
try{
dbSetup.setUpDatabase();
} catch (Exception e) {
fail(ExceptionUtil.getStackTrace(e));
}
}
public void tearDown() {
dbSetup.tearDownDatabase();
}
public void verifyTable(String table) {
ChukwaConfiguration cc = new ChukwaConfiguration();
String query = "select * from ["+table+"];";
Macro mp = new Macro(dbSetup.current,query);
query = mp.toString();
try {
DatabaseWriter db = new DatabaseWriter(dbSetup.cluster);
ResultSet rs = db.query(query);
while(rs.next()) {
int i = 1;
String value = rs.getString(i);
}
db.close();
} catch(SQLException ex) {
fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
}
}
public void testAggregator() {
Aggregator dba = new Aggregator();
DatabaseWriter db = new DatabaseWriter(dbSetup.cluster);
dba.setWriter(db);
String queries = Aggregator.getContents(new File(System
.getenv("CHUKWA_CONF_DIR")
+ File.separator + "aggregator.sql"));
String[] query = queries.split("\n");
for (int i = 0; i < query.length; i++) {
if(query[i].indexOf("#")==-1) {
try {
dba.process(query[i]);
assertTrue("Completed query: "+query[i],true);
} catch(Throwable ex) {
fail("Exception: "+ExceptionUtil.getStackTrace(ex));
}
}
}
db.close();
}
}
| 8,049 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/database/TestDatabaseWebJson.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.database;
import junit.framework.*;
import java.util.*;
import java.text.*;
import java.io.*;
import java.net.URL;
import java.sql.*;
import org.apache.commons.httpclient.*;
import org.apache.commons.httpclient.methods.*;
import org.apache.commons.httpclient.params.HttpMethodParams;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.mortbay.jetty.Server;
import org.mortbay.xml.XmlConfiguration;
import org.apache.hadoop.chukwa.util.*;
/*
* Testing the JSON output from the website with the database result.
*
*/
public class TestDatabaseWebJson extends TestCase {
protected HashMap testTables;
protected String data_url="http://localhost:8080/hicc/jsp/get_db_data.jsp";
private Server server = null;
private Log log = LogFactory.getLog(TestDatabaseWebJson.class);
/*
* setup list of tables to do testing.
* Add the table name and the table's primary keys.
*/
protected void setUp() {
testTables = new HashMap();
ArrayList<String> keys = new ArrayList<String>();
keys.add("timestamp");
keys.add("mount");
testTables.put("cluster_disk", keys);
keys = new ArrayList<String>();
keys.add("timestamp");
testTables.put("cluster_system_metrics", keys);
keys = new ArrayList<String>();
keys.add("timestamp");
keys.add("host");
keys.add("mount");
testTables.put("disk", keys);
keys = new ArrayList<String>();
keys.add("job_id");
testTables.put("mr_job", keys);
keys = new ArrayList<String>();
keys.add("task_id");
testTables.put("mr_task", keys);
keys = new ArrayList<String>();
keys.add("timestamp");
testTables.put("system_metrics", keys);
URL serverConf = TestDatabaseWebJson.class
.getResource("/WEB-INF/jetty.xml");
server = new Server();
XmlConfiguration configuration;
try {
configuration = new XmlConfiguration(serverConf);
configuration.configure(server);
server.start();
server.setStopAtShutdown(true);
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
}
protected void tearDown() {
try {
server.stop();
Thread.sleep(2000);
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
}
/*
* similar to PHP join function to join a string array to one single string.
* for example, if the token is "," and the strings array is ('a','b','c')
* the final result will be "a,b,c"
*
* @param token The separator which join the strings together
* @param strings list of strings which we want to merge together
* @return final string which merge together.
*/
protected static String join( String token, ArrayList<String> strings )
{
StringBuffer sb = new StringBuffer();
for( int x = 0; x < ( strings.size() - 1 ); x++ ) {
sb.append( strings.get(x) );
sb.append( token );
}
sb.append( strings.get( strings.size() - 1 ) );
return( sb.toString() );
}
/*
* format the query string for the database testing. Select the
* primary key value from the json object.
*
* @param tableName The name of the database table to build the query on.
* @param jo JSON object which contain the primary key of the row which we
* want to test.
* @return the actual database query string for the row selection.
*/
protected String getDatabaseQuery(String tableName, JSONObject jo) {
ArrayList<String> keys = (ArrayList<String>)testTables.get(tableName);
ArrayList<String> criterias = new ArrayList<String>();
Iterator i = keys.iterator();
while (i.hasNext()) {
String key=(String)i.next();
try {
String value=(String)jo.get(key);
if (key.compareToIgnoreCase("timestamp")==0) {
value=DatabaseWriter.formatTimeStamp(Long.parseLong(value));
}
String c=key+"=\""+value+"\"";
criterias.add(c);
} catch (Exception e) {
System.out.println("Cannot get value for key: "+key);
}
}
String criteria=join(" and ", criterias);
String query="select * from ["+tableName+"] where "+criteria;
return query;
}
/*
* the function will do the actual table verification. If will first
* get the result from the website JSON object. Then it will compare the
* JSON object with the values in the database.
*
* @param table name of the table to be verified.
*/
protected void verifyTableData(String table) {
Calendar startCalendar = new GregorianCalendar();
// startCalendar.add(Calendar.HOUR_OF_DAY,-1);
startCalendar.add(Calendar.MINUTE, -30);
long startTime=startCalendar.getTime().getTime();
Calendar endCalendar = new GregorianCalendar();
// endCalendar.add(Calendar.HOUR_OF_DAY,1);
long endTime=endCalendar.getTime().getTime();
String url=data_url+"?table="+table+"&start="+startTime+"&end="+endTime;
System.out.println(url);
HttpClient client = new HttpClient();
GetMethod method = new GetMethod(url);
try {
/*
* 1. get the json result for the specified table
*/
int statusCode = client.executeMethod(method);
if (statusCode != HttpStatus.SC_OK) {
System.out.println("Http Error: "+method.getStatusLine());
}
BufferedReader reader = new BufferedReader(new InputStreamReader( method.getResponseBodyAsStream(),
method.getResponseCharSet()));
String json_str="";
String str;
while ((str = reader.readLine()) != null) {
json_str+=str;
}
/*
* 2. convert the json string to individual field and compare it
* with the database
*/
String cluster = "demo";
DatabaseWriter db = new DatabaseWriter(cluster);
JSONArray json_array=(JSONArray)JSONValue.parse(json_str);
for (int i=0; i < json_array.size(); i++) {
JSONObject row_obj=(JSONObject) json_array.get(i);
// get the database row
String queryString=getDatabaseQuery(table, row_obj);
Macro m=new Macro(startTime, endTime, queryString);
ResultSet rs = db.query(m.toString());
// move to the first record
rs.next();
ResultSetMetaData md=rs.getMetaData();
Iterator names=row_obj.keySet().iterator();
while (names.hasNext()) {
String name=(String)names.next();
String jsonValue=(String)row_obj.get(name);
String dbValue=rs.getString(name);
int dbCol=rs.findColumn(name);
int dbType=md.getColumnType(dbCol);
if (dbType==93) {
// timestamp
dbValue=Long.toString(rs.getTimestamp(name).getTime());
}
// System.out.println("compare "+name+":"+dbType+":"+dbValue+":"+jsonValue);
assertEquals(dbValue, jsonValue);
}
}
db.close();
} catch (SQLException e) {
System.out.println("Exception: "+e.toString()+":"+e.getMessage());
System.out.println("Exception: "+e.toString()+":"+e.getSQLState());
System.out.println("Exception: "+e.toString()+":"+e.getErrorCode());
fail("SQL Error:"+ExceptionUtil.getStackTrace(e));
} catch (Exception eOther) {
System.out.println("Other Exception: "+eOther.toString());
eOther.printStackTrace();
fail("Error:"+ExceptionUtil.getStackTrace(eOther));
} finally {
}
}
/*
* Perform the actual testing. It will get the result from the web URL first.
* Then it will get the result from the database and compare it.
*/
public void testJsonResult() {
Iterator i=testTables.keySet().iterator();
while (i.hasNext()) {
String tableName=(String)i.next();
verifyTableData(tableName);
}
}
}
| 8,050 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/TestOffsetStatsManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection;
import junit.framework.TestCase;
/**
* Verifies that the stats manager calculates stats properly. This test will take
* at least 13 seconds to run.
*/
public class TestOffsetStatsManager extends TestCase {
org.apache.hadoop.chukwa.datacollection.OffsetStatsManager<DummyKey> statsManager = null;
DummyKey dummyKey = new DummyKey();
protected void setUp() throws Exception {
statsManager = new OffsetStatsManager<DummyKey>();
dummyKey = new DummyKey();
}
public void testCalcAverageRate() throws InterruptedException {
// add roughly 1000 bytes per second for about 5 seconds
for (int i = 0; i < 5; i++) {
statsManager.addOffsetDataPoint(dummyKey, 1000 * i, System.currentTimeMillis());
Thread.sleep(1000);
}
// calculate 5 second average
double rate = statsManager.calcAverageRate(dummyKey, 5);
assertTrue("Invalid average, expected about 1 kbyte/sec, found " + rate,
Math.abs(rate / 1000) <= 1);
}
public void testCalcAverageRateStaleData() throws InterruptedException {
// add offsets for about 5 seconds, but timestamp them 3 seconds ago to make
// them stale
for (int i = 0; i < 5; i++) {
statsManager.addOffsetDataPoint(dummyKey, 1000 * i, System.currentTimeMillis() - 3000L);
Thread.sleep(1000);
}
// calculate 5 second average
double rate = statsManager.calcAverageRate(dummyKey, 5);
assertEquals("Should have gotten a stale data response", -1.0, rate);
}
public void testCalcAverageRateNotEnoughData() throws InterruptedException {
// add offsets for about 3 seconds
for (int i = 0; i < 3; i++) {
statsManager.addOffsetDataPoint(dummyKey, 1000 * i, System.currentTimeMillis());
Thread.sleep(1000);
}
// calculate 5 second average
double rate = statsManager.calcAverageRate(dummyKey, 5);
assertEquals("Should have gotten a stale data response", -1.0, rate);
}
private static class DummyKey {}
}
| 8,051 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/sender/TestRetryListOfCollectors.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.sender;
import junit.framework.TestCase;
import java.io.*;
import java.util.*;
import org.apache.hadoop.chukwa.datacollection.sender.RetryListOfCollectors;
import org.apache.hadoop.conf.Configuration;
public class TestRetryListOfCollectors extends TestCase {
public void testRetryList() {
List<String> hosts = new ArrayList<String>();
hosts.add("host1");
hosts.add("host2");
hosts.add("host3");
hosts.add("host4");
Configuration conf = new Configuration();
RetryListOfCollectors rloc = new RetryListOfCollectors(hosts, conf);
rloc.shuffleList();
assertEquals(hosts.size(), rloc.total());
for (int i = 0; i < hosts.size(); ++i) {
assertTrue(rloc.hasNext());
String s = rloc.next();
assertTrue(s != null);
System.out.println(s);
}
if (rloc.hasNext()) {
String s = rloc.next();
System.out.println("saw unexpected collector " + s);
fail();
}
}
public void testCollectorsFile() {
try {
File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"),
"collectors_test");
PrintWriter out = new PrintWriter(new FileOutputStream(tmpOutput));
HashSet<String> validHosts = new HashSet<String>();
validHosts.add("http://host1:5052/");
validHosts.add("http://host2:5050/");
validHosts.add("http://host3:5052/");
validHosts.add("http://host4:5050/");
validHosts.add("http://host5:5052/");
validHosts.add("http://host6:5052/");
out.println("host1");
out.println("host2:5050");
out.println("http://host3");
out.println("http://host4:5050");
out.println("http://host5:5052/");
out.println("host6:5052");
out.close();
Configuration conf = new Configuration();
conf.setInt("chukwaCollector.http.port", 5052);
RetryListOfCollectors rloc = new RetryListOfCollectors(tmpOutput, conf);
for (int i = 0; i < validHosts.size(); ++i) {
assertTrue(rloc.hasNext());
String s = rloc.next();
assertTrue(s != null);
System.out.println("host: " + s);
assertTrue(validHosts.contains(s));
}
if (rloc.hasNext()) {
String s = rloc.next();
System.out.println("saw unexpected collector " + s);
fail();
}
} catch(IOException e) {
e.printStackTrace();
fail();
}
}
}
| 8,052 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/sender/TestAcksOnFailure.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.sender;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.ChunkImpl;
import org.apache.hadoop.conf.*;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.chukwa.*;
import org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.*;
import org.apache.hadoop.chukwa.datacollection.adaptor.Adaptor;
import org.apache.hadoop.chukwa.datacollection.sender.ChukwaHttpSender.CommitListEntry;
public class TestAcksOnFailure extends TestCase {
public void testNoCollector() {
Configuration conf = new Configuration();
conf.setInt("chukwaAgent.sender.retries", 3);
conf.setInt("chukwaAgent.sender.retryInterval", 1000);
ChukwaHttpSender send = new ChukwaHttpSender(conf);
ArrayList<String> collectors = new ArrayList<String>();
collectors.add("http://somehost.invalid/chukwa");
send.setCollectors(new RetryListOfCollectors(collectors, conf));
byte[] data = "sometestdata".getBytes();
Adaptor a = new FileTailingAdaptor();
ChunkImpl ci = new ChunkImpl("testtype", "sname", data.length, data, a);
ArrayList<Chunk> toSend = new ArrayList<Chunk>();
toSend.add(ci);
try {
List<CommitListEntry> resp = send.send(toSend);
assertTrue(resp.size() == 0);
} catch(Exception e) {
}
}
}
| 8,053 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestAdaptorTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.collector;
import java.io.File;
import org.apache.hadoop.chukwa.datacollection.adaptor.TestDirTailingAdaptor;
import org.apache.hadoop.chukwa.datacollection.agent.AdaptorResetThread;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.collector.servlet.CommitCheckServlet;
import org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector;
import org.apache.hadoop.chukwa.datacollection.connector.http.HttpConnector;
import org.apache.hadoop.chukwa.datacollection.sender.AsyncAckSender;
import org.apache.hadoop.chukwa.datacollection.writer.SeqFileWriter;
import org.apache.hadoop.chukwa.util.ConstRateAdaptor;
import org.apache.hadoop.conf.Configuration;
import org.mortbay.jetty.Server;
import junit.framework.TestCase;
public class TestAdaptorTimeout extends TestCase {
static final int PORTNO = 9997;
static final int TEST_DURATION_SECS = 30;
static int SEND_RATE = 10* 1000; //bytes/sec
public void testAdaptorTimeout() throws Exception {
Configuration conf = new Configuration();
String outputDirectory = TestDelayedAcks.buildConf(conf);
conf.setInt(AdaptorResetThread.TIMEOUT_OPT, 1000);
ServletCollector collector = new ServletCollector(conf);
Server collectorServ = TestDelayedAcks.startCollectorOnPort(conf, PORTNO, collector);
Thread.sleep(1000);
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
HttpConnector conn = new HttpConnector(agent, "http://localhost:"+PORTNO+"/");
conn.start();
String resp = agent.processAddCommand("add constSend = " + ConstRateAdaptor.class.getCanonicalName() +
" testData "+ SEND_RATE + " 0");
assertTrue("adaptor_constSend".equals(resp));
Thread.sleep(TEST_DURATION_SECS * 1000);
AsyncAckSender sender = (AsyncAckSender)conn.getSender();
int resets = sender.adaptorReset.getResetCount();
System.out.println(resets + " resets");
assertTrue(resets > 0);
agent.shutdown();
collectorServ.stop();
conn.shutdown();
Thread.sleep(5000); //for collector to shut down
long dups = TestFailedCollectorAck.checkDirs(conf, conf.get(SeqFileWriter.OUTPUT_DIR_OPT));
assertTrue(dups > 0);
TestDirTailingAdaptor.nukeDirContents(new File(outputDirectory));
}
}
| 8,054 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestFailedCollectorAck.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.collector;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import org.apache.hadoop.chukwa.ChukwaArchiveKey;
import org.apache.hadoop.chukwa.ChunkImpl;
import org.apache.hadoop.chukwa.datacollection.adaptor.TestDirTailingAdaptor;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.collector.servlet.CommitCheckServlet;
import org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector;
import org.apache.hadoop.chukwa.datacollection.connector.http.HttpConnector;
import org.apache.hadoop.chukwa.datacollection.sender.RetryListOfCollectors;
import org.apache.hadoop.chukwa.datacollection.writer.SeqFileWriter;
import org.apache.hadoop.chukwa.extraction.archive.SinkArchiver;
import org.apache.hadoop.chukwa.util.ConstRateAdaptor;
import org.apache.hadoop.chukwa.util.ConstRateValidator.ByteRange;
import org.apache.hadoop.chukwa.util.ConstRateValidator.ValidatorSM;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.mortbay.jetty.Server;
import junit.framework.TestCase;
public class TestFailedCollectorAck extends TestCase {
static final int PORTNO = 9993;
public void testFailureRecovery() {
try {
Configuration conf = new Configuration();
String outputDirectory = TestDelayedAcks.buildConf(conf);
SeqFileWriter.setEnableRotationOnClose(false);
File sinkA = new File(outputDirectory, "chukwa_sink_A");
sinkA.mkdir();
File sinkB = new File(outputDirectory, "chukwa_sink_B");
sinkB.mkdir();
conf.set(CommitCheckServlet.SCANPATHS_OPT, sinkA.getCanonicalPath()
+ "," + sinkB.getCanonicalPath());
conf.set(SeqFileWriter.OUTPUT_DIR_OPT, sinkA.getCanonicalPath() );
ServletCollector collector1 = new ServletCollector(new Configuration(conf));
conf.set(SeqFileWriter.OUTPUT_DIR_OPT,sinkB.getCanonicalPath() );
ServletCollector collector2 = new ServletCollector(conf);
Server collector1_s = TestDelayedAcks.startCollectorOnPort(conf, PORTNO+1, collector1);
Server collector2_s = TestDelayedAcks.startCollectorOnPort(conf, PORTNO+2, collector2);
Thread.sleep(2000); //for collectors to start
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
HttpConnector conn = new HttpConnector(agent);
RetryListOfCollectors clist = new RetryListOfCollectors(conf);
clist.add("http://localhost:"+(PORTNO+1)+"/");
clist.add("http://localhost:"+(PORTNO+2)+"/");
conn.setCollectors(clist);
conn.start();
//FIXME: somehow need to clue in commit checker which paths to check.
// Somehow need
String resp = agent.processAddCommand("add adaptor_constSend = " + ConstRateAdaptor.class.getCanonicalName() +
" testData "+ TestDelayedAcks.SEND_RATE + " 12345 0");
assertTrue("adaptor_constSend".equals(resp));
Thread.sleep(10 * 1000);
collector1_s.stop();
Thread.sleep(10 * 1000);
SeqFileWriter.setEnableRotationOnClose(true);
String[] stat = agent.getAdaptorList().get("adaptor_constSend").split(" ");
long bytesCommitted = Long.valueOf(stat[stat.length -1]);
assertTrue(bytesCommitted > 0);
agent.shutdown();
conn.shutdown();
Thread.sleep(2000); //for collectors to shut down
collector2_s.stop();
Thread.sleep(2000); //for collectors to shut down
checkDirs(conf, conf.get(CommitCheckServlet.SCANPATHS_OPT));
TestDirTailingAdaptor.nukeDirContents(new File(outputDirectory));
(new File(outputDirectory)).delete();
} catch(Exception e) {
e.printStackTrace();
fail(e.toString());
}
}
//returns number of dup chunks
public static long checkDirs(Configuration conf, String paths) throws IOException {
ArrayList<Path> toScan = new ArrayList<Path>();
ArrayList<ByteRange> bytes = new ArrayList<ByteRange>();
FileSystem localfs = FileSystem.getLocal(conf);
String[] paths_s = paths.split(",");
for(String s: paths_s)
if(s.length() > 1)
toScan.add(new Path(s));
for(Path p: toScan) {
FileStatus[] dataSinkFiles = localfs.listStatus(p, SinkArchiver.DATA_SINK_FILTER);
for(FileStatus fstatus: dataSinkFiles) {
if(!fstatus.getPath().getName().endsWith(".done"))
continue;
SequenceFile.Reader reader = new SequenceFile.Reader(localfs, fstatus.getPath(), conf);
ChukwaArchiveKey key = new ChukwaArchiveKey();
ChunkImpl chunk = ChunkImpl.getBlankChunk();
while (reader.next(key, chunk)) {
bytes.add(new ByteRange(chunk));
}
reader.close();
}
}
assertNotNull(bytes);
Collections.sort(bytes);
ValidatorSM sm = new ValidatorSM();
for(ByteRange b: bytes) {
String s = sm.advanceSM(b);
if(s != null)
System.out.println(s);
}
assertEquals(0, sm.missingBytes);
return sm.dupBytes;
}
}
| 8,055 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestDelayedAcks.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.collector;
import java.io.File;
import java.util.*;
import java.util.regex.*;
import org.apache.hadoop.chukwa.*;
import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorShutdownPolicy;
import org.apache.hadoop.chukwa.datacollection.adaptor.TestDirTailingAdaptor;
import org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.FileTailingAdaptor;
import org.apache.hadoop.chukwa.datacollection.agent.AdaptorResetThread;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.collector.servlet.CommitCheckServlet;
import org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.chukwa.datacollection.connector.http.HttpConnector;
import org.apache.hadoop.chukwa.datacollection.sender.*;
import org.apache.hadoop.chukwa.datacollection.sender.ChukwaHttpSender.CommitListEntry;
import org.apache.hadoop.chukwa.datacollection.writer.ChukwaWriter;
import org.apache.hadoop.chukwa.datacollection.writer.SeqFileWriter;
import org.apache.hadoop.chukwa.extraction.archive.SinkArchiver;
import org.apache.hadoop.chukwa.util.ConstRateAdaptor;
import org.apache.hadoop.chukwa.util.ConstRateValidator.ByteRange;
import org.apache.hadoop.chukwa.util.ConstRateValidator.ValidatorSM;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.ServletHolder;
import junit.framework.TestCase;
import static org.apache.hadoop.chukwa.datacollection.sender.AsyncAckSender.DelayedCommit;
import static org.apache.hadoop.chukwa.util.TempFileUtil.*;
public class TestDelayedAcks extends TestCase {
static final int PORTNO = 9993;
static int END2END_TEST_SECS = 30;
static int SEND_RATE = 180* 1000; //bytes/sec
static int CLIENT_SCANPERIOD = 1000;
static int SERVER_SCANPERIOD = 1000;
static int ROTATEPERIOD = 2000;
int ACK_TIMEOUT = 200;
//start an adaptor -- chunks should appear in the connector
//wait for timeout. More chunks should appear.
public void testAdaptorTimeout() throws Exception {
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
conf.setInt("chukwaAgent.adaptor.context.switch.time", 500);
conf.setInt(AdaptorResetThread.TIMEOUT_OPT, ACK_TIMEOUT);
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
ChunkCatcherConnector chunks = new ChunkCatcherConnector();
chunks.start();
assertEquals(0, agent.adaptorCount());
File testFile = makeTestFile("testDA", 50, new File(System.getProperty("test.build.data", "/tmp")));
long len = testFile.length();
System.out.println("wrote data to " + testFile);
AdaptorResetThread restart = new AdaptorResetThread(conf, agent);
//start timeout thread
agent.processAddCommand("add fta = "+ FileTailingAdaptor.class.getCanonicalName()
+ " testdata " + testFile.getCanonicalPath() + " 0" );
assertEquals(1, agent.adaptorCount());
Chunk c1 = chunks.waitForAChunk();
assertNotNull(c1);
List<CommitListEntry> pendingAcks = new ArrayList<CommitListEntry>();
pendingAcks.add(new DelayedCommit(c1.getInitiator(), c1.getSeqID(),
c1.getData().length, "foo", c1.getSeqID(), agent.getAdaptorName(c1.getInitiator())));
restart.reportPending(pendingAcks);
assertEquals(len, c1.getData().length);
Thread.sleep(ACK_TIMEOUT*2);
int resetCount = restart.resetTimedOutAdaptors(ACK_TIMEOUT);
Chunk c2 = chunks.waitForAChunk(1000);
assertNotNull(c2);
assertEquals(len, c2.getData().length);
assertTrue(resetCount > 0);
agent.shutdown();
testFile.delete();
}
/*
* Checks the CommitCheckServlet works correctly with a one-chunk file.
*/
public void testDelayedAck() throws Exception {
Configuration conf = new Configuration();
SeqFileWriter writer = new SeqFileWriter();
conf.set("writer.hdfs.filesystem", "file:///");
File tempDir = new File(System.getProperty("test.build.data", "/tmp"));
if (!tempDir.exists()) {
tempDir.mkdirs();
}
String outputDirectory = tempDir.getPath() + "/test_DA" + System.currentTimeMillis();
String seqWriterOutputDir = outputDirectory +"/seqWriter/seqOutputDir";
conf.set(SeqFileWriter.OUTPUT_DIR_OPT, seqWriterOutputDir );
writer.init(conf);
ArrayList<Chunk> oneChunk = new ArrayList<Chunk>();
oneChunk.add(new ChunkImpl("dt", "name", 1, new byte[] {'b'}, null));
ChukwaWriter.CommitStatus cs = writer.add(oneChunk);
writer.close();
File seqWriterFile = null;
File directory = new File(seqWriterOutputDir);
String[] files = directory.list();
for(String file: files) {
if ( file.endsWith(".done") ){
seqWriterFile = new File(directory, file);
break;
}
}
long lenWritten = seqWriterFile.length();
System.out.println("wrote " + lenWritten+ " bytes");
assertTrue(cs instanceof ChukwaWriter.COMMIT_PENDING);
ChukwaWriter.COMMIT_PENDING pending = (ChukwaWriter.COMMIT_PENDING) cs;
assertTrue(pending.pendingEntries.size() == 1);
String res = pending.pendingEntries.get(0);
System.out.println("result was " + res);
Pattern expectedPat= Pattern.compile(".* ([0-9]+)\n");
Matcher match = expectedPat.matcher(res);
assertTrue(match.matches());
long bytesPart = Long.parseLong(match.group(1));
assertEquals(bytesPart, lenWritten);
}
public static Server startCollectorOnPort(Configuration conf, int port,
ServletCollector collector) throws Exception {
Server server = new Server(port);
Context root = new Context(server, "/", Context.SESSIONS);
root.addServlet(new ServletHolder(collector), "/*");
root.addServlet(new ServletHolder(new CommitCheckServlet(conf)), "/"+CommitCheckServlet.DEFAULT_PATH);
server.start();
server.setStopAtShutdown(false);
return server;
}
public static String buildConf(Configuration conf) {
File tempDir = new File(System.getProperty("test.build.data", "/tmp"));
if (!tempDir.exists()) {
tempDir.mkdirs();
}
String outputDirectory = tempDir.getPath() + "/test_DA" + System.currentTimeMillis() ;
conf.setInt("chukwaCollector.rotateInterval", ROTATEPERIOD);
conf.set("writer.hdfs.filesystem", "file:///");
String seqWriterOutputDir = outputDirectory +"/chukwa_sink";
conf.set(SeqFileWriter.OUTPUT_DIR_OPT, seqWriterOutputDir );
conf.setInt(AsyncAckSender.POLLPERIOD_OPT, CLIENT_SCANPERIOD);
conf.setInt(CommitCheckServlet.SCANPERIOD_OPT, SERVER_SCANPERIOD);
conf.setBoolean(HttpConnector.ASYNC_ACKS_OPT, true);
conf.setInt(HttpConnector.MIN_POST_INTERVAL_OPT, 100);
conf.setInt(HttpConnector.MAX_SIZE_PER_POST_OPT, 10 * 1000*1000);
conf.setInt(SeqFileWriter.STAT_PERIOD_OPT, 60*60*24);
conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
//turn off stats reporting thread, so we can use Writer.dataSize
conf.set(AsyncAckSender.POLLHOSTS_OPT, "afilethatdoesntexist");
//so that it won't try to read conf/collectors
conf.setInt("chukwaAgent.control.port", 0);
return outputDirectory;
}
public void testEndToEnd() {
try {
Configuration conf = new Configuration();
String outputDirectory = buildConf(conf);
ServletCollector collector = new ServletCollector(conf);
Server collectorServ = startCollectorOnPort(conf, PORTNO, collector);
Thread.sleep(1000);
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
HttpConnector conn = new HttpConnector(agent, "http://localhost:"+PORTNO+"/");
conn.start();
String resp = agent.processAddCommand("add constSend = " + ConstRateAdaptor.class.getCanonicalName() +
" testData "+ SEND_RATE + " 0");
assertTrue("adaptor_constSend".equals(resp));
Thread.sleep(END2END_TEST_SECS * 1000);
//do the shutdown directly, here, so that acks are still processed.
assertNotNull(agent.getAdaptor("adaptor_constSend"));
long bytesOutput = agent.getAdaptor("adaptor_constSend").shutdown(AdaptorShutdownPolicy.GRACEFULLY);
Thread.sleep(CLIENT_SCANPERIOD + SERVER_SCANPERIOD + ROTATEPERIOD + 3000);
String[] stat = agent.getAdaptorList().get("adaptor_constSend").split(" ");
long bytesCommitted = Long.valueOf(stat[stat.length -1]);
long bytesPerSec = bytesOutput / (1000 * END2END_TEST_SECS);
System.out.println("data rate was " + bytesPerSec + " kb /second");
//all data should be committed
System.out.println(bytesCommitted + " bytes committed");
System.out.println(bytesOutput + " bytes output");
System.out.println("difference is " + (bytesOutput - bytesCommitted));
ChukwaWriter w = collector.getWriter();
long bytesWritten = ((SeqFileWriter)w).getBytesWritten();
System.out.println("collector wrote " + bytesWritten);
assertEquals(bytesCommitted, bytesOutput);
assertEquals(bytesWritten, bytesCommitted);
//We need a little imprecision here, since the send rate is a bit bursty,
//and since some acks got lost after the adaptor was stopped.
assertTrue(bytesPerSec > 9 * SEND_RATE/ 1000 / 10);
AsyncAckSender sender = (AsyncAckSender)conn.getSender();
assertEquals(0, sender.adaptorReset.getResetCount());
agent.shutdown();
collectorServ.stop();
conn.shutdown();
Thread.sleep(5000); //for collector to shut down
TestDirTailingAdaptor.nukeDirContents(new File(outputDirectory));
} catch (Exception e) {
e.printStackTrace();
fail(e.toString());
}
}
}
| 8,056 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/CaptureWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.collector;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.writer.ChukwaWriter;
import org.apache.hadoop.chukwa.datacollection.writer.WriterException;
import org.apache.hadoop.conf.Configuration;
/**
* Dumps received chunks into a public static array.
* This class is intended for unit tests, only.
*/
public class CaptureWriter implements ChukwaWriter {
public static ArrayList<Chunk> outputs = new ArrayList<Chunk>();
@Override
public CommitStatus add(List<Chunk> chunks) throws WriterException {
synchronized (outputs) {
for (Chunk c : chunks)
outputs.add(c);
}
return COMMIT_OK;
}
@Override
public void close() throws WriterException {
}
@Override
public void init(Configuration c) throws WriterException {
}
}
| 8,057 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestBackpressure.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.collector;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector;
import org.apache.hadoop.chukwa.datacollection.connector.http.HttpConnector;
import org.apache.hadoop.chukwa.datacollection.sender.RetryListOfCollectors;
import org.apache.hadoop.chukwa.datacollection.writer.NullWriter;
import org.apache.hadoop.chukwa.datacollection.writer.PipelineStageWriter;
import org.apache.hadoop.chukwa.util.ConstRateAdaptor;
import org.apache.hadoop.conf.Configuration;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.ServletHolder;
public class TestBackpressure extends TestCase {
int PORTNO = 9991;
/**
* NOTE THAT WRITE-RATE * POST SIZE MUST BE GREATER THAN TEST DURATION
*
* Default max post size is 2 MB; need to process that several times during test.
*/
int TEST_DURATION_SECS = 40;
int WRITE_RATE_KB = 200; //kb/sec
int SEND_RATE = 2500* 1000; //bytes/sec
int MIN_ACCEPTABLE_PERCENT = 60;
public void testBackpressure() throws Exception {
Configuration conf = new Configuration();
conf.set("chukwaCollector.writerClass", NullWriter.class
.getCanonicalName());
conf.set(NullWriter.RATE_OPT_NAME, ""+WRITE_RATE_KB);//kb/sec
conf.setInt(HttpConnector.MIN_POST_INTERVAL_OPT, 100);
conf.setInt("constAdaptor.sleepVariance", 1);
conf.setInt("constAdaptor.minSleep", 50);
conf.setInt("chukwaAgent.control.port", 0);
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
RetryListOfCollectors clist = new RetryListOfCollectors(conf);
clist.add("http://localhost:"+PORTNO+"/chukwa");
HttpConnector conn = new HttpConnector(agent);
conn.setCollectors(clist);
conn.start();
Server server = new Server(PORTNO);
Context root = new Context(server, "/", Context.SESSIONS);
root.addServlet(new ServletHolder(new ServletCollector(conf)), "/*");
server.start();
server.setStopAtShutdown(false);
Thread.sleep(1000);
agent.processAddCommand("add adaptor_constSend = " + ConstRateAdaptor.class.getCanonicalName() +
" testData "+ SEND_RATE + " 0");
assertNotNull(agent.getAdaptor("adaptor_constSend"));
Thread.sleep(TEST_DURATION_SECS * 1000);
String[] stat = agent.getAdaptorList().get("adaptor_constSend").split(" ");
long kbytesPerSec = Long.valueOf(stat[stat.length -1]) / TEST_DURATION_SECS / 1000;
System.out.println("data rate was " + kbytesPerSec + " kb /second");
assertTrue(kbytesPerSec < WRITE_RATE_KB); //write rate should throttle sends
assertTrue(kbytesPerSec > 0.8 * MIN_ACCEPTABLE_PERCENT* WRITE_RATE_KB / 100);//an assumption, but should hold true
agent.shutdown();
}
}
| 8,058 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/TestCmd.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.agent;
import org.apache.hadoop.chukwa.datacollection.adaptor.Adaptor;
import org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent.AlreadyRunningException;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.chukwa.datacollection.test.ConsoleOutConnector;
import org.apache.hadoop.conf.Configuration;
import junit.framework.TestCase;
import java.net.*;
import java.io.*;
public class TestCmd extends TestCase {
public void testAddCmdWithParam() {
ChukwaAgent agent;
try {
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
agent = ChukwaAgent.getAgent(conf);
agent.start();
ConsoleOutConnector conn = new ConsoleOutConnector(agent, true);
conn.start();
String l = agent
.processAddCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor chukwaTestAdaptorType 0 my param1 param2 /var/log/messages 114027");
assertTrue(l != null);
Adaptor adaptor = agent.getAdaptor(l);
ChukwaTestAdaptor chukwaTestAdaptor = (ChukwaTestAdaptor) adaptor;
assertTrue("error in type",
"chukwaTestAdaptorType".intern() == chukwaTestAdaptor.getType()
.intern());
assertTrue("error in param", "0 my param1 param2 /var/log/messages"
.intern() == chukwaTestAdaptor.getParams().intern());
assertTrue("error in startOffset", 114027l == chukwaTestAdaptor
.getStartOffset());
agent.stopAdaptor(l, false);
agent.shutdown();
Thread.sleep(2000);
} catch (InterruptedException e) {
} catch (AlreadyRunningException e) {
e.printStackTrace();
fail(e.toString());
}
}
public void testAddCmdWithoutParam1() {
ChukwaAgent agent;
try {
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
agent = ChukwaAgent.getAgent(conf);
agent.start();
ConsoleOutConnector conn = new ConsoleOutConnector(agent, true);
conn.start();
String name = agent
.processAddCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor chukwaTestAdaptorType 114027");
assertTrue(name != null);
Adaptor adaptor = agent.getAdaptor(name);
ChukwaTestAdaptor chukwaTestAdaptor = (ChukwaTestAdaptor) adaptor;
assertTrue("error in type",
"chukwaTestAdaptorType".intern() == chukwaTestAdaptor.getType()
.intern());
assertTrue("error in param", "".intern() == chukwaTestAdaptor.getParams()
.intern());
assertTrue("error in startOffset", 114027l == chukwaTestAdaptor
.getStartOffset());
agent.stopAdaptor(name, false);
agent.shutdown();
Thread.sleep(2000);
} catch (InterruptedException e) {
} catch (AlreadyRunningException e) {
e.printStackTrace();
fail(e.toString());
}
}
public void testAddCmdWithoutParam2() {
ChukwaAgent agent;
try {
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
agent = ChukwaAgent.getAgent(conf);
agent.start();
ConsoleOutConnector conn = new ConsoleOutConnector(agent, true);
conn.start();
String n = agent
.processAddCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor"
+ " chukwaTestAdaptorType 0 114027");
assertTrue(n != null);
Adaptor adaptor = agent.getAdaptor(n);
ChukwaTestAdaptor chukwaTestAdaptor = (ChukwaTestAdaptor) adaptor;
assertTrue("error in type",
"chukwaTestAdaptorType".intern() == chukwaTestAdaptor.getType()
.intern());
assertTrue("error in param", "0".intern() == chukwaTestAdaptor
.getParams().intern());
assertTrue("error in startOffset", 114027l == chukwaTestAdaptor
.getStartOffset());
agent.stopAdaptor(n, false);
agent.shutdown();
Thread.sleep(2000);
} catch (InterruptedException e) {
} catch (AlreadyRunningException e) {
e.printStackTrace();
fail(e.toString());
}
}
public void testStopAll() throws Exception{
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
ChunkCatcherConnector chunks = new ChunkCatcherConnector();
chunks.start();
agent.processAddCommand(
"ADD adaptor1 = org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor"
+ " chukwaTestAdaptorType 0");
agent.processAddCommand(
"ADD adaptor2 = org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor"
+ " chukwaTestAdaptorType 0");
assertEquals(2, agent.adaptorCount());
Socket s = new Socket("localhost", agent.getControllerPort());
PrintWriter bw = new PrintWriter(new OutputStreamWriter(s.getOutputStream()));
bw.println("stopAll");
bw.flush();
InputStreamReader in = new InputStreamReader(s.getInputStream());
in.read();
assertEquals(0, agent.adaptorCount());
agent.shutdown();
}
}
| 8,059 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/TestAgentConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.agent;
import java.io.*;
import org.apache.hadoop.chukwa.datacollection.test.ConsoleOutConnector;
import org.apache.hadoop.conf.Configuration;
import junit.framework.TestCase;
public class TestAgentConfig extends TestCase {
public void testInitAdaptors_vs_Checkpoint() {
try {
// create two target files, foo and bar
File foo = File.createTempFile("foo", "test");
foo.deleteOnExit();
PrintStream ps = new PrintStream(new FileOutputStream(foo));
ps.println("foo");
ps.close();
File bar = File.createTempFile("bar", "test");
bar.deleteOnExit();
ps = new PrintStream(new FileOutputStream(bar));
ps.println("bar");
ps.close();
// initially, read foo
File initialAdaptors = File.createTempFile("initial", "adaptors");
initialAdaptors.deleteOnExit();
ps = new PrintStream(new FileOutputStream(initialAdaptors));
ps.println("add adaptor_testAdaptor= org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 raw 0 "
+ foo.getAbsolutePath() + " 0 ");
ps.close();
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
conf.set("chukwaAgent.initial_adaptors", initialAdaptors
.getAbsolutePath());
File checkpointDir = File.createTempFile("chukwatest", "checkpoint");
checkpointDir.delete();
checkpointDir.mkdir();
checkpointDir.deleteOnExit();
conf.set("chukwaAgent.checkpoint.dir", checkpointDir.getAbsolutePath());
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
ConsoleOutConnector conn = new ConsoleOutConnector(agent, true);
conn.start();
assertEquals(1, agent.adaptorCount());// check that we processed initial
// adaptors
assertNotNull(agent.getAdaptor("adaptor_testAdaptor"));
assertTrue(agent.getAdaptor("adaptor_testAdaptor").getCurrentStatus().contains("foo"));
System.out
.println("---------------------done with first run, now stopping");
agent.shutdown();
Thread.sleep(2000);
assertEquals(0, agent.adaptorCount());
// at this point, there should be a checkpoint file with a tailer reading
// foo.
// we're going to rewrite initial adaptors to read bar; after reboot
// we should be looking at both foo andn bar.
ps = new PrintStream(new FileOutputStream(initialAdaptors, false));// overwrite
ps.println("add bar= org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 raw 0 "
+ bar.getAbsolutePath() + " 0 ");
ps.close();
System.out.println("---------------------restarting");
agent = ChukwaAgent.getAgent(conf);
agent.start();
conn = new ConsoleOutConnector(agent, true);
conn.start();
assertEquals(2, agent.adaptorCount());// check that we processed initial
// adaptors
assertNotNull(agent.getAdaptor("adaptor_testAdaptor"));
assertTrue(agent.getAdaptor("adaptor_testAdaptor").getCurrentStatus().contains("foo"));
agent.shutdown();
Thread.sleep(2000);
System.out.println("---------------------done");
} catch (Exception e) {
e.printStackTrace();
fail(e.toString());
}
}
public void testNoCheckpoints() {
try {
String tmpdir = System.getProperty("test.build.data", "/tmp");
File NONCE_DIR = new File(tmpdir, "/test_chukwa_checkpoints");
if (NONCE_DIR.exists()) {
for (File f : NONCE_DIR.listFiles())
f.delete();
NONCE_DIR.delete();
}
// assertFalse(NONCE_DIR.exists());
Configuration conf = new Configuration();
conf.set("chukwaAgent.checkpoint.dir", NONCE_DIR.getAbsolutePath());
conf.setBoolean("chukwaAgent.checkpoint.enabled", true);
conf.setInt("chukwaAgent.control.port", 0);
System.out.println("\n\n===checkpoints enabled, dir does not exist:");
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
assertEquals(0, agent.getAdaptorList().size());
agent.shutdown();
Thread.sleep(2000);
assertTrue(NONCE_DIR.exists());
for (File f : NONCE_DIR.listFiles())
f.delete();
System.out
.println("\n\n===checkpoints enabled, dir exists but is empty:");
agent = ChukwaAgent.getAgent(conf);
agent.start();
assertEquals(0, agent.getAdaptorList().size());
agent.shutdown();
Thread.sleep(2000);
for (File f : NONCE_DIR.listFiles())
f.delete();
System.out
.println("\n\n===checkpoints enabled, dir exists with zero-length file:");
(new File(NONCE_DIR, "chukwa_checkpoint_0")).createNewFile();
agent = ChukwaAgent.getAgent(conf);
agent.start();
assertEquals(0, agent.getAdaptorList().size());
agent.processAddCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor testdata 0");
agent.shutdown();
Thread.sleep(2000);
assertTrue(new File(NONCE_DIR, "chukwa_checkpoint_1").exists());
System.out
.println("\n\n===checkpoints enabled, dir exists with valid checkpoint");
agent = ChukwaAgent.getAgent(conf);
agent.start();
assertEquals(1, agent.getAdaptorList().size());
agent.shutdown();
Thread.sleep(2000);
// checkpoint # increments by one on boot and reload
assertTrue(new File(NONCE_DIR, "chukwa_checkpoint_2").exists());
} catch (Exception e) {
fail(e.toString());
}
}
}
| 8,060 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/TestChukwaSsl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.agent;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.ChunkQueue;
import org.apache.hadoop.chukwa.datacollection.DataFactory;
import org.apache.hadoop.conf.Configuration;
import static org.apache.hadoop.chukwa.datacollection.agent.ChukwaConstants.*;
import junit.framework.TestCase;
public class TestChukwaSsl extends TestCase{
String keyStoreFile = "chukwa.store";
@Override
protected void setUp() throws IOException, InterruptedException{
String[] cmd = new String[]{System.getenv("JAVA_HOME")+"/bin/keytool", "-genkeypair", "-keyalg", "RSA",
"-alias", "monitoring", "-validity", "36500", "-keystore", keyStoreFile, "-keysize", "1024",
"-keypass", "chukwa", "-storepass", "chukwa", "-dname", "cn=*,ou=chukwa,o=apache,c=US", "-storetype", "jks"
};
Process p = Runtime.getRuntime().exec(cmd);
p.waitFor();
if(p.exitValue() != 0){
BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream()));
String line;
while((line = reader.readLine()) != null){
System.out.println("Output:"+line);
}
reader = new BufferedReader(new InputStreamReader(p.getErrorStream()));
while((line = reader.readLine()) != null){
System.out.println("Error:"+line);
}
}
System.out.println("keytool exit value:" + p.exitValue());
}
@Override
protected void tearDown(){
new File(keyStoreFile).delete();
}
public void testRestServer() throws Exception{
//keystore generated using the following command
//keytool -genkeypair -keyalg RSA -alias monitoring -validity 36500 -keystore src/test/resources/chukwa.store -keysize 1024 -keypass chukwa -storepass chukwa -dname "cn=*, ou=chukwa, o=apache, c=US" -storetype jks
Configuration conf = new Configuration();
conf.set(SSL_ENABLE, "true");
String keystore = new File(ClassLoader.getSystemResource("chukwa.store").getFile()).getAbsolutePath();
System.out.println("keystore = "+keystore);
String commonPassword = "chukwa";
conf.set(KEYSTORE_STORE, keystore);
conf.set(KEYSTORE_PASSWORD, commonPassword);
conf.set(KEYSTORE_KEY_PASSWORD, commonPassword);
conf.set(TRUSTSTORE_STORE, keystore);
conf.set(TRUST_PASSWORD, commonPassword);
/*
//optional properties
String storeType = "pkcs12";
String sslProtocol = "TLS";
conf.set(KEYSTORE_TYPE, storeType);
conf.set(TRUSTSTORE_TYPE, storeType);
conf.set(SSL_PROTOCOL, sslProtocol);
*/
//start agent, which starts chukwa rest server
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
System.out.println("Started ChukwaRestServer");
testSecureRestAdaptor(agent);
agent.shutdown();
System.out.println("Stopped ChukwaRestServer");
}
private void testSecureRestAdaptor(ChukwaAgent agent) {
//add rest adaptor to collect the agent adaptor info through https
agent.processAddCommand("add RestAdaptor DebugProcessor https://localhost:9090/rest/v2/adaptor 5 0");
assertEquals(1, agent.adaptorCount());
final ChunkQueue eventQueue = DataFactory.getInstance().getEventQueue();
final List<Chunk> chunks = new ArrayList<Chunk>();
Thread collector = new Thread(){
@Override
public void run(){
try {
eventQueue.collect(chunks, 1);
} catch (InterruptedException e) {
}
}
};
//wait 10s and interrupt the collector
collector.start();
try {
collector.join(10000);
} catch (InterruptedException e) {
}
collector.interrupt();
//make sure we collected atleast 1 chunk
assertTrue(chunks.size() > 0);
for(Chunk chunk: chunks){
String data = new String(chunk.getData());
System.out.println("Collected chunk - " + data);
}
}
}
| 8,061 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/TestAgent.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.agent;
import java.util.ArrayList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
import org.apache.hadoop.chukwa.datacollection.test.ConsoleOutConnector;
import junit.framework.TestCase;
//Note this test takes a minimum of
// 20 * 2 + 6* 20 = 160 seconds.
public class TestAgent extends TestCase {
public void testStopAndStart() {
try {
Configuration conf = new Configuration();
conf.setInt("chukwaAgent.control.port", 0);
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
ConsoleOutConnector conn = new ConsoleOutConnector(agent, true);
conn.start();
int portno = agent.getControllerPort();
ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
for (int i = 1; i < 20; ++i) {
String adaptorId = cli.add(
"org.apache.hadoop.chukwa.util.ConstRateAdaptor", "raw" + i, "2000"
+ i, 0);
assertNotNull(adaptorId);
Thread.sleep(2000);
cli.removeAll();
}
agent.shutdown();
conn.shutdown();
Thread.sleep(2000);
} catch (Exception e) {
e.printStackTrace();
fail(e.toString());
}
}
public void testMultiStopAndStart() {
try {
Configuration conf = new Configuration();
conf.setInt("chukwaAgent.control.port", 0);
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
ConsoleOutConnector conn = new ConsoleOutConnector(agent, true);
conn.start();
int count = agent.adaptorCount();
for (int trial = 0; trial < 20; ++trial) {
ArrayList<String> runningAdaptors = new ArrayList<String>();
for (int i = 1; i < 7; ++i) {
String l = agent
.processAddCommand("add org.apache.hadoop.chukwa.util.ConstRateAdaptor raw"
+ i + " 2000" + i + " 0");
assertTrue(l != null);
runningAdaptors.add(l);
}
Thread.sleep(1000);
for (String l : runningAdaptors)
agent.stopAdaptor(l, false);
Thread.sleep(5000);
assertTrue(agent.adaptorCount() == count);
}
agent.shutdown();
Thread.sleep(2000);
} catch (Exception e) {
e.printStackTrace();
fail(e.toString());
}
}
public void testLogRotate() {
}
}
| 8,062 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/TestChunkQueue.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.agent;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.ChunkImpl;
import org.apache.hadoop.chukwa.datacollection.ChunkQueue;
import org.apache.hadoop.chukwa.datacollection.DataFactory;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent.AlreadyRunningException;
import org.apache.hadoop.conf.Configuration;
import junit.framework.TestCase;
public class TestChunkQueue extends TestCase {
private final byte[] data = "this is a chunk".getBytes();
List<Chunk> putList, getList;
int NUM_CHUNKS = 10;
int QUEUE_SIZE = 6;
int QUEUE_LIMIT = data.length * QUEUE_SIZE;
ChukwaAgent agent = null;
Configuration conf = null;
final String CHUNK_QUEUE_LIMIT = "chukwaAgent.chunk.queue.limit";
final String CHUNK_QUEUE = "chukwaAgent.chunk.queue";
DataFactory df = DataFactory.getInstance();
@Override
protected void setUp() throws AlreadyRunningException {
if(agent == null){
agent = ChukwaAgent.getAgent();
agent.start();
}
conf = agent.getConfiguration();
conf.set(CHUNK_QUEUE_LIMIT, Integer.toString(QUEUE_LIMIT));
putList = new ArrayList<Chunk>(10);
for (int i = 1; i <= NUM_CHUNKS; i++) {
Chunk c = new ChunkImpl("DataType", "StreamName", (long) i, data, null);
putList.add(c);
}
}
@Override
protected void tearDown() {
if(agent != null){
agent.shutdown();
}
}
public void testMemLimitQueue() {
conf.set(CHUNK_QUEUE, "org.apache.hadoop.chukwa.datacollection.agent.MemLimitQueue");
ChunkQueue mlq = df.createEventQueue();
testBlockingNature(mlq);
}
public void testNonBlockingMemLimitQueue() {
conf.set(CHUNK_QUEUE, "org.apache.hadoop.chukwa.datacollection.agent.NonBlockingMemLimitQueue");
ChunkQueue nbmlq = df.createEventQueue();
testNonBlockingNature(nbmlq);
}
/**
* Putter thread gets a list of chunks and adds all of them
* to the ChunkQueue
*/
private class Putter extends Thread {
List<Chunk> chunks;
ChunkQueue q;
Putter(List<Chunk> chunks, ChunkQueue q) {
this.chunks = chunks;
this.q = q;
}
public void run() {
try {
for (Chunk c : chunks) {
q.add(c);
}
} catch (InterruptedException e) {
}
}
}
/**
* Getter thread collects all the chunks from the
* ChunkQueue indefinitely
*/
private class Getter extends Thread {
List<Chunk> chunks;
ChunkQueue q;
Getter(List<Chunk> chunks, ChunkQueue q) {
this.chunks = chunks;
this.q = q;
}
public void run() {
try {
while (true) {
q.collect(chunks, Integer.MAX_VALUE);
}
} catch (InterruptedException e) {
}
}
}
private void joinThread(Thread t, int timeout) {
try {
t.join(timeout);
} catch (InterruptedException e) {
}
}
/**
* This test makes sure that the putter thread blocks when queue is full
*
* @param ChunkQueue
* q
*/
private void testBlockingNature(ChunkQueue q) {
Putter putter = new Putter(putList, q);
putter.start();
joinThread(putter, 3000);
if (!putter.isAlive()) {
fail("Blocking queue semantics not implemented");
}
assertTrue("Could not verify queue size after put", q.size() == QUEUE_SIZE);
getList = new ArrayList<Chunk>();
Getter getter = new Getter(getList, q);
getter.start();
joinThread(getter, 3000);
assertTrue("Could not verify queue size after get", q.size() == 0);
// make sure we got all chunks
assertTrue("Could not verify all chunks got drained after get",
getList.size() == NUM_CHUNKS);
putter.interrupt();
getter.interrupt();
}
/**
* This test makes sure that the putter thread does not blocks when queue is
* full. This test does not check if the queue implementation uses a circular
* buffer to retain the most recent chunks or discards new incoming chunks
*
* @param ChunkQueue
* q
*/
private void testNonBlockingNature(ChunkQueue q) {
Putter putter = new Putter(putList, q);
putter.start();
joinThread(putter, 3000);
if (putter.isAlive()) {
fail("Non Blocking queue semantics not implemented");
}
assertTrue("Could not verify queue size after put", q.size() == QUEUE_SIZE);
getList = new ArrayList<Chunk>();
Getter getter = new Getter(getList, q);
getter.start();
joinThread(getter, 3000);
assertTrue("Could not verify all chunks got drained after get",
q.size() == 0);
// make sure we got only the chunks
assertTrue("Could not verify chunks after get",
getList.size() == QUEUE_SIZE);
putter.interrupt();
getter.interrupt();
}
}
| 8,063 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/agent | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/rest/TestAdaptorController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.agent.rest;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.mortbay.jetty.servlet.ServletHolder;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.ServletHandler;
import org.mortbay.jetty.Server;
import javax.servlet.ServletException;
import javax.servlet.Servlet;
import javax.ws.rs.core.MediaType;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import com.sun.jersey.spi.container.servlet.ServletContainer;
/**
* Tests the basic functionality of the AdaptorController.
*/
public class TestAdaptorController extends TestCase {
protected Log log = LogFactory.getLog(getClass());
Server jettyServer;
ChukwaAgent agent;
Servlet servlet;
MockHttpServletRequest request;
MockHttpServletResponse response;
StringBuilder sb;
String adaptor;
protected void setUp() throws Exception {
String path = System.getenv("CHUKWA_LOG_DIR");
String[] checkpointNames = new File(path).list(new FilenameFilter() {
public boolean accept(File dir, String name) {
String checkPointBaseName = "chukwa_agent_checkpoint";
return name.startsWith(checkPointBaseName);
}
});
for(String cpn : checkpointNames) {
File checkpoint = new File(path+"/"+cpn);
if(!checkpoint.delete()) {
Assert.fail("Fail to clean up existing check point file: "+ cpn);
}
}
agent = ChukwaAgent.getAgent();
agent.start();
ServletHolder servletHolder = new ServletHolder(ServletContainer.class);
servletHolder.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
"com.sun.jersey.api.core.PackagesResourceConfig");
servletHolder.setInitParameter("com.sun.jersey.config.property.packages",
"org.apache.hadoop.chukwa.datacollection.agent.rest");
servletHolder.setServletHandler(new ServletHandler());
jettyServer = new Server();
Context root = new Context(jettyServer, "/foo/bar", Context.SESSIONS);
root.setAttribute("ChukwaAgent", agent);
root.addServlet(servletHolder, "/*");
jettyServer.start();
jettyServer.setStopAtShutdown(true);
servlet = servletHolder.getServlet();
request = new MockHttpServletRequest();
request.setContextPath("/foo/bar");
response = new MockHttpServletResponse();
adaptor = agent.processAddCommandE("add org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor SomeDataType 0");
sb = new StringBuilder();
}
protected void tearDown() throws Exception {
agent.getAdaptor(adaptor);
agent.shutdown();
jettyServer.stop();
}
public void testGetJSON() throws IOException, ServletException {
request.setServletPath("/adaptor");
request.setRequestURI(request.getContextPath() + request.getServletPath());
request.addHeader("Accept", "application/json");
request.setMethod("GET");
servlet.service(request, response);
//assert agent
assertEquals("Incorrect total number of adaptors", 1, agent.adaptorCount());
}
private void assertOccurs(String message, int occurances, String text, String match) {
int index = -1;
for(int i = 0; i < occurances; i++) {
index = text.indexOf(match, index + 1);
assertTrue(message + ": " + text, index != -1);
}
}
public void testGetXml() throws IOException, ServletException {
request.setServletPath("/adaptor");
request.setRequestURI(request.getContextPath() + request.getServletPath());
request.addHeader("Accept", "application/xml");
request.setMethod("GET");
servlet.service(request, response);
// assert response
assertXmlResponse(response, 1);
//assert agent
assertEquals("Incorrect total number of adaptors", 1, agent.adaptorCount());
}
public void testGetInvalidViewType() throws IOException, ServletException {
request.setServletPath("/adaptor");
request.setRequestURI(request.getContextPath() + request.getServletPath());
request.addHeader("Accept", "unsupportedViewType");
request.setMethod("GET");
servlet.service(request, response);
// assert response
assertEquals("Unexpected response status", 406, response.getStatus());
}
public void testDeleteAdaptor() throws IOException, ServletException {
String adaptorId = agent.getAdaptorList().keySet().iterator().next();
request.setServletPath("/adaptor/" + adaptorId);
request.setRequestURI(request.getContextPath() + request.getServletPath());
//assert agent
assertEquals("Incorrect total number of adaptors", 1, agent.adaptorCount());
request.setMethod("DELETE");
servlet.service(request, response);
// assert response
assertEquals("Unexpected response status", 200, response.getStatus());
//assert agent
assertEquals("Incorrect total number of adaptors", 0, agent.adaptorCount());
}
public void testAddAdaptor() throws IOException, ServletException {
request.setServletPath("/adaptor");
request.setRequestURI(request.getContextPath() + request.getServletPath());
request.addHeader("Content-Type", MediaType.APPLICATION_JSON);
request.addHeader("Accept", MediaType.APPLICATION_JSON);
request.setMethod("POST");
request.setContent("{ \"dataType\" : \"SomeDataType\", \"adaptorClass\" : \"org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor\", \"adaptorParams\" : \"1000\", \"offset\" : 5555 }".getBytes());
//assert agent
assertEquals("Incorrect total number of adaptors", 1, agent.adaptorCount());
String initialAdaptorId = agent.getAdaptorList().keySet().iterator().next();
servlet.service(request, response);
// assert response
String responseContent = assertJSONResponse(response, 1);
String newAdaptorId = null;
for (String id : agent.getAdaptorList().keySet()) {
if (id != initialAdaptorId) {
newAdaptorId = id;
break;
}
}
//assert agent
assertEquals("Incorrect total number of adaptors", 2, agent.adaptorCount());
assertOccurs("Response did not contain adaptorId", 1, responseContent, newAdaptorId);
//assert agent
assertEquals("Incorrect total number of adaptors", 2, agent.adaptorCount());
// fire a doGet to assert that the servlet shows 2 adaptors
request = new MockHttpServletRequest();
response = new MockHttpServletResponse();
request.setServletPath("/adaptor");
request.setRequestURI(request.getContextPath() + request.getServletPath());
request.addHeader("Accept", MediaType.APPLICATION_XML);
request.addHeader("Content-Type", MediaType.APPLICATION_XML);
request.setMethod("GET");
servlet.service(request, response);
// assert response
assertXmlResponse(response, 2);
}
private String assertJSONResponse(MockHttpServletResponse response,
int adaptorCount)
throws UnsupportedEncodingException {
String responseContent = response.getContentAsString();
assertEquals("Unexpected response status", 200, response.getStatus());
JSONObject json = (JSONObject) JSONValue.parse(responseContent);
String adaptorClass = (String) json.get("adaptorClass");
String dataType = (String) json.get("dataType");
assertEquals("Response text doesn't include adaptor class",
"org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor", adaptorClass);
assertEquals("Response text doesn't include data type",
"SomeDataType", dataType);
return responseContent;
}
private String assertXmlResponse(MockHttpServletResponse response,
int adaptorCount)
throws UnsupportedEncodingException {
String responseContent = response.getContentAsString();
// assert response
assertEquals("Unexpected response status", 200, response.getStatus());
//Content it correct when executed via an HTTP client, but it doesn't seem
//to get set by the servlet
assertOccurs("Response XML doesn't include correct adaptor_count", adaptorCount,
responseContent, "adaptorCount>");
assertOccurs("Response XML doesn't include adaptorClass", adaptorCount, responseContent,
"<adaptorClass>org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor</adaptorClass>");
assertOccurs("Response XML doesn't include dataType", adaptorCount, responseContent,
"<dataType>SomeDataType</dataType>");
return responseContent;
}
}
| 8,064 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestUDPAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
import org.apache.hadoop.chukwa.datacollection.agent.AdaptorManager;
import org.apache.hadoop.chukwa.*;
import java.net.*;
public class TestUDPAdaptor extends TestCase implements ChunkReceiver {
volatile boolean receivedOK = false;
String STR = "a short string";
public void testUDP() throws Exception {
UDPAdaptor u = new UDPAdaptor();
u.parseArgs("Test", "0", AdaptorManager.NULL);
u.start("id", "Test", 0, this);
DatagramSocket send = new DatagramSocket();
byte[] buf = STR.getBytes();
DatagramPacket p = new DatagramPacket(buf, buf.length);
p.setSocketAddress(new InetSocketAddress("127.0.0.1",u.portno));
send.send(p);
synchronized(this) {
wait(1000);
}
assertTrue(receivedOK);
}
public void add(Chunk c) {
assertTrue(c.getDataType().equals("Test"));
assertEquals(c.getSeqID(), c.getData().length);
assertTrue(STR.equals(new String(c.getData())));
receivedOK= true;
synchronized(this) {
notify();
}
}
}
| 8,065 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestOozieAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
import org.apache.hadoop.chukwa.datacollection.agent.AdaptorManager;
import org.json.simple.JSONObject;
import org.junit.Test;
import junit.framework.TestCase;
public class TestOozieAdaptor extends TestCase implements ChunkReceiver {
volatile boolean receivedOK = false;
public String str = null;
@Test
public void testMessageReceivedOk() throws Exception {
OozieAdaptor oozieAdaptor = new OozieAdaptor();
oozieAdaptor.parseArgs("TestOozieAdaptor", "0", AdaptorManager.NULL);
oozieAdaptor.start("id", "TestOozieAdaptor", 0, this);
JSONObject json = composeMessage();
int lengthReturned = oozieAdaptor.addChunkToReceiver(json.toString()
.getBytes());
assertEquals(84, lengthReturned); // 84 is the length of json string
synchronized (this) {
wait(1000);
}
assertTrue(receivedOK);
}
@SuppressWarnings("unchecked")
private JSONObject composeMessage() {
JSONObject json = new JSONObject();
json.put("oozie.jvm.used.memory", 10);
json.put("oozie.jvm.free.memory", 90);
json.put("oozie.jvm.total.memory", 100);
str = json.toString();
return json;
}
@Override
public void add(Chunk C) throws InterruptedException {
assertTrue(C.getDataType().equals("TestOozieAdaptor"));
assertEquals(C.getSeqID(), C.getData().length);
byte[] data = C.getData();
String s = new String(data);
assertTrue(str.equals(s));
receivedOK = true;
synchronized (this) {
notify();
}
}
}
| 8,066 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestAddAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.conf.Configuration;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
public class TestAddAdaptor extends TestCase {
ChukwaAgent agent = null;
File baseDir;
public void testJmxAdd() throws IOException,
ChukwaAgent.AlreadyRunningException, InterruptedException {
Configuration conf = new Configuration();
baseDir = new File(System.getProperty("test.build.data", "/tmp"))
.getCanonicalFile();
File checkpointDir = new File(baseDir, "addAdaptorTestCheckpoints");
createEmptyDir(checkpointDir);
conf.set("chukwaAgent.checkpoint.dir", checkpointDir.getCanonicalPath());
conf.set("chukwaAgent.checkpoint.name", "checkpoint_");
conf.setInt("chukwaAgent.control.port", 0);
conf.setInt("chukwaAgent.http.port", 9090);
conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
agent = ChukwaAgent.getAgent(conf);
agent.start();
assertEquals(0, agent.adaptorCount());
System.out.println("adding jmx adaptor");
String id = agent
.processAddCommand("add JMXAdaptor DebugProcessor localhost 0 60 hadoop:* 0");
assertEquals(1, agent.adaptorCount());
System.out.println("shutting down jmx adaptor");
agent.stopAdaptor(id, true);
assertEquals(0, agent.adaptorCount());
String rest_url = "http://localhost:9090/rest/v2/adaptor";
System.out.println("adding jmx adaptor using rest url - " + rest_url);
String dataType = "DebugProcessor", adaptorClass = "JMXAdaptor", adaptorParams = "localhost 0 60 hadoop:*", offset = "0";
String adaptor_json = "{\"dataType\":\"" + dataType
+ "\", \"adaptorClass\":\"" + adaptorClass
+ "\", \"adaptorParams\" : \"" + adaptorParams + "\", \"offset\" : \""
+ offset + "\" }";
System.out.println(adaptor_json);
Client client = Client.create();
WebResource resource = client.resource(rest_url);
ClientResponse response = resource.type("application/json").post(
ClientResponse.class, adaptor_json);
if (response.getStatus() != 200 && response.getStatus() != 201) {
fail("Add adaptor through REST failed : HTTP error code : "
+ response.getStatus());
}
assertEquals(1, agent.adaptorCount());
String result = response.getEntity(String.class);
try {
JSONObject json = (JSONObject) JSONValue.parse(result);
id = (String) json.get("id");
} catch (Exception e) {
fail("Failed to parse response from add. Complete response is:\n"
+ result);
}
System.out.println("shutting down jmx adaptor with id:" + id
+ " through rest");
resource = client.resource(rest_url + "/" + id);
response = resource.delete(ClientResponse.class);
if (response.getStatus() != 200 && response.getStatus() != 201) {
fail("Delete adaptor through REST failed : HTTP error code : "
+ response.getStatus());
}
assertEquals(0, agent.adaptorCount());
agent.shutdown();
Thread.sleep(1500);
nukeDirContents(checkpointDir);
checkpointDir.delete();
}
protected void tearDown(){
if(agent != null){
agent.shutdown();
}
}
//returns true if dir exists
public static boolean nukeDirContents(File dir) {
if(dir.exists()) {
if(dir.isDirectory()) {
for(File f: dir.listFiles()) {
nukeDirContents(f);
f.delete();
}
} else
dir.delete();
return true;
}
return false;
}
public static void createEmptyDir(File dir) {
if(!nukeDirContents(dir))
dir.mkdir();
assertTrue(dir.isDirectory() && dir.listFiles().length == 0);
}
}
| 8,067 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/ChukwaTestAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
public class ChukwaTestAdaptor extends AbstractAdaptor {
private String params = null;
private long startOffset = 0l;
@Override
public String getCurrentStatus() {
// TODO Auto-generated method stub
return type + " " + params + " " + startOffset;
}
@Override
public String parseArgs(String s) {
params = s;
return s;
}
@Override
public void start(long offset) throws AdaptorException {
this.startOffset = offset;
System.out.println("adaptorId [" + adaptorID + "]");
System.out.println("type [" + type + "]");
System.out.println("params [" + params + "]");
System.out.println("startOffset [" + startOffset + "]");
}
public String getParams() {
return params;
}
public void setParams(String params) {
this.params = params;
}
public long getStartOffset() {
return startOffset;
}
@Override
public long shutdown(AdaptorShutdownPolicy shutdownPolicy)
throws AdaptorException {
return 0;
}
}
| 8,068 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestBufferingWrappers.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import static org.apache.hadoop.chukwa.util.TempFileUtil.makeTestFile;
import java.io.File;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.conf.Configuration;
public class TestBufferingWrappers extends TestCase {
Configuration conf = new Configuration();
static File baseDir;
ChunkCatcherConnector chunks;
public TestBufferingWrappers() throws IOException {
baseDir = new File(System.getProperty("test.build.data", "/tmp"));
conf.setInt("chukwaAgent.control.port", 0);
conf.set("chukwaAgent.checkpoint.dir", baseDir.getCanonicalPath());
conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
conf.setInt("chukwaAgent.adaptor.fileadaptor.timeoutperiod", 100);
conf.setInt("chukwaAgent.adaptor.context.switch.time", 100);
chunks = new ChunkCatcherConnector();
chunks.start();
}
public void testMBResendAfterStop() throws Exception{
resendAfterStop("MemBuffered");
}
public void testWriteaheadResendAfterStop() throws Exception{
resendAfterStop("WriteaheadBuffered");
}
//start a wrapped FileAdaptor. Pushes a chunk. Stop it and restart.
//chunk hasn't been acked, so should get pushed again.
//we delete the file and also change the data type each time through the loop
//to make sure we get the cached chunk.
public void resendAfterStop(String adaptor) throws IOException,
ChukwaAgent.AlreadyRunningException, InterruptedException {
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
String ADAPTORID = "adaptor_test" + System.currentTimeMillis();
String STR = "test data";
int PORTNO = 9878;
DatagramSocket send = new DatagramSocket();
byte[] buf = STR.getBytes();
DatagramPacket p = new DatagramPacket(buf, buf.length);
p.setSocketAddress(new InetSocketAddress("127.0.0.1",PORTNO));
assertEquals(0, agent.adaptorCount());
String name =agent.processAddCommand("add "+ ADAPTORID + " = "+adaptor+" UDPAdaptor raw "+PORTNO+ " 0");
assertEquals(name, ADAPTORID);
Thread.sleep(500);
send.send(p);
for(int i=0; i< 5; ++i) {
Chunk c = chunks.waitForAChunk(5000);
System.out.println("received " + i);
assertNotNull(c);
String dat = new String(c.getData());
assertTrue(dat.equals(STR));
assertTrue(c.getDataType().equals("raw"));
assertEquals(c.getSeqID(), STR.length());
agent.stopAdaptor(name, AdaptorShutdownPolicy.RESTARTING);
Thread.sleep(500); //for socket to deregister
name =agent.processAddCommand("add "+ADAPTORID + " = "+adaptor+" UDPAdaptor raw "+PORTNO + " 0");
assertEquals(name, ADAPTORID);
}
Chunk c = chunks.waitForAChunk(5000);
Thread.sleep(500);
buf = "different data".getBytes();
p = new DatagramPacket(buf, buf.length);
p.setSocketAddress(new InetSocketAddress("127.0.0.1",PORTNO));
send.send(p);
c = chunks.waitForAChunk(5000);
assertNotNull(c);
assertEquals(buf.length + STR.length(), c.getSeqID());
agent.stopAdaptor(name, true);
assertEquals(0, agent.adaptorCount());
Thread.sleep(500);//before re-binding
agent.shutdown();
}
}
| 8,069 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestExecAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import junit.framework.TestCase;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.connector.*;
import org.apache.hadoop.chukwa.datacollection.test.*;
public class TestExecAdaptor extends TestCase {
Connector chunks;
private ChukwaAgent agent;
@Override
protected void setUp() throws ChukwaAgent.AlreadyRunningException, InterruptedException {
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
agent = ChukwaAgent.getAgent(conf);
agent.start();
}
@Override
protected void tearDown() {
this.agent.shutdown();
}
public void testWithPs() throws InterruptedException {
ChunkCatcherConnector chunks = new ChunkCatcherConnector();
chunks.start();
String psAgentID = agent.processAddCommand(
"add exec= org.apache.hadoop.chukwa.datacollection.adaptor.ExecAdaptor ps 500 ps aux 0");
Chunk c = chunks.waitForAChunk();
System.out.println(new String(c.getData()));
assertNotNull(psAgentID);
}
/*
* Buzz in a loop, starting ls every 100 ms.
* Length of loop controlled by sleep statement near bottom of function
*/
public void testForLeaks() throws ChukwaAgent.AlreadyRunningException, InterruptedException {
chunks = new ConsoleOutConnector(agent, false);
chunks.start();
assertEquals(0, agent.adaptorCount());
String lsID = agent.processAddCommand(
"add exec= org.apache.hadoop.chukwa.datacollection.adaptor.ExecAdaptor Listing 100 /bin/sleep 1 0");
Thread.sleep( 5*1000); //RAISE THIS to test longer
System.out.println("stopped ok");
}
}
| 8,070 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestRestAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
import org.json.simple.JSONObject;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.ServletHolder;
public class TestRestAdaptor extends TestCase implements ChunkReceiver {
private Server jettyServer = null;
private JSONObject metricsMap = new JSONObject();
private static String args = "http://localhost:9090/metrics/instrumentation/data 2";
private static long offset = 0;
@Before
public void setUp() throws Exception {
metricsMap.put("FreeSpace", "10GB");
metricsMap.put("UsedSpace", "90GB");
metricsMap.put("maps_killed", "20");
jettyServer = new Server(9090);
Context root = new Context(jettyServer, "/metrics/instrumentation/data",
Context.SESSIONS);
root.addServlet(new ServletHolder(new RestServlet()), "/*");
System.out.println(" Rest Server starting..");
jettyServer.start();
jettyServer.setStopAtShutdown(true);
}
@Test
public void testMessageReceivedOk() throws Exception {
RestAdaptor restAdaptor = new RestAdaptor();
restAdaptor.parseArgs(args);
restAdaptor.start("id", "TestRestAdaptor", 0, this);
Thread.sleep(2000); // wait for processing
}
@Override
public void add(Chunk event) throws InterruptedException {
offset += event.getData().length;
assertTrue(event.getDataType().equals("TestRestAdaptor"));
assertEquals(event.getSeqID(), offset);
assertTrue(metricsMap.toString().equals(new String(event.getData())));
}
@After
public void tearDown() throws Exception {
if (jettyServer != null) {
jettyServer.stop();
}
}
private class RestServlet extends HttpServlet {
private static final long serialVersionUID = -8007387020169769539L;
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
response.getWriter().write(metricsMap.toString());
}
}
}
| 8,071 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestDirTailingAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.conf.*;
import org.apache.log4j.Level;
import junit.framework.TestCase;
public class TestDirTailingAdaptor extends TestCase {
ChukwaAgent agent;
File baseDir;
static final int SCAN_INTERVAL = 1000;
/**
* This test is exactly the same as testDirTailed except that it applies filtering and<br/>
* creates a file that should not be read inorder to test the filter.
* @throws IOException
* @throws ChukwaAgent.AlreadyRunningException
* @throws InterruptedException
*/
public void testDirTailerFiltering() throws IOException,
ChukwaAgent.AlreadyRunningException, InterruptedException {
DirTailingAdaptor.log.setLevel(Level.DEBUG);
Configuration conf = new Configuration();
baseDir = new File(System.getProperty("test.build.data", "/tmp")).getCanonicalFile();
File checkpointDir = new File(baseDir, "dirtailerTestCheckpoints");
createEmptyDir(checkpointDir);
conf.setInt("adaptor.dirscan.intervalMs", SCAN_INTERVAL);
conf.set("chukwaAgent.checkpoint.dir", checkpointDir.getCanonicalPath());
conf.set("chukwaAgent.checkpoint.name", "checkpoint_");
conf.setInt("chukwaAgent.control.port", 0);
conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
agent = ChukwaAgent.getAgent(conf);
agent.start();
File emptyDir = new File(baseDir, "emptyDir2");
createEmptyDir(emptyDir);
assertEquals(0, agent.adaptorCount());
//check filtering with empty directory
agent.processAddCommand("add emptydir2= DirTailingAdaptor raw " + emptyDir + " *file filetailer.CharFileTailingAdaptorUTF8 0");
assertEquals(1, agent.adaptorCount());
File dirWithFile = new File(baseDir, "dir3");
dirWithFile.delete();
assertFalse("temp directory not empty",dirWithFile.exists());
//this file should be found by the filter
dirWithFile.mkdir();
File inDir = File.createTempFile("atemp", "file", dirWithFile);
inDir.deleteOnExit();
//This file should not be found by the filter
File noreadFile = File.createTempFile("atemp", "noread", dirWithFile);
noreadFile.deleteOnExit();
//apply filter *file
agent.processAddCommand("add dir3= DirTailingAdaptor raw " + dirWithFile + " *file filetailer.CharFileTailingAdaptorUTF8 0");
Thread.sleep(3000);
assertEquals(3, agent.adaptorCount());
agent.shutdown();
conf.setBoolean("chukwaAgent.checkpoint.enabled", true);
Thread.sleep(1500); //wait a little bit to make sure new file ts is > last checkpoint time.
File anOldFile = File.createTempFile("oldXYZ","file", dirWithFile);
File aNewFile = File.createTempFile("new", "file", dirWithFile);
anOldFile.deleteOnExit();
aNewFile.deleteOnExit();
anOldFile.setLastModified(10);//just after epoch
agent = ChukwaAgent.getAgent(conf); //restart agent.
agent.start();
Thread.sleep(3 * SCAN_INTERVAL); //wait a bit for the new file to be detected.
assertTrue(aNewFile.exists());
//make sure we started tailing the new, not the old, file.
for(Map.Entry<String, String> adaptors : agent.getAdaptorList().entrySet()) {
System.out.println(adaptors.getKey() +": " + adaptors.getValue());
assertFalse(adaptors.getValue().contains("oldXYZ"));
}
Thread.sleep(3 * SCAN_INTERVAL); //wait a bit for the new file to be detected.
//should be four adaptors: the DirTailer on emptyDir, the DirTailer on the full dir,
//and FileTailers for File inDir and file newfile and not the noread file.
assertEquals(4, agent.adaptorCount());
agent.shutdown();
Thread.sleep(1500); //wait a little bit to make sure new file ts is > last checkpoint time.
nukeDirContents(checkpointDir);//nuke dir
checkpointDir.delete();
emptyDir.delete();
nukeDirContents(dirWithFile);
dirWithFile.delete();
}
public void testDirTailer() throws IOException,
ChukwaAgent.AlreadyRunningException, InterruptedException {
DirTailingAdaptor.log.setLevel(Level.DEBUG);
Configuration conf = new Configuration();
baseDir = new File(System.getProperty("test.build.data", "/tmp")).getCanonicalFile();
File checkpointDir = new File(baseDir, "dirtailerTestCheckpoints");
createEmptyDir(checkpointDir);
conf.setInt("adaptor.dirscan.intervalMs", SCAN_INTERVAL);
conf.set("chukwaAgent.checkpoint.dir", checkpointDir.getCanonicalPath());
conf.set("chukwaAgent.checkpoint.name", "checkpoint_");
conf.setInt("chukwaAgent.control.port", 0);
conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
boolean retry = true;
while(retry) {
try {
retry = false;
agent = ChukwaAgent.getAgent(conf);
agent.start();
} catch(Exception e) {
retry = true;
}
}
File emptyDir = new File(baseDir, "emptyDir");
createEmptyDir(emptyDir);
assertEquals(0, agent.adaptorCount());
agent.processAddCommand("add emptydir= DirTailingAdaptor raw " + emptyDir + " filetailer.CharFileTailingAdaptorUTF8 0");
assertEquals(1, agent.adaptorCount());
File dirWithFile = new File(baseDir, "dir2");
dirWithFile.delete();
assertFalse("temp directory not empty",dirWithFile.exists());
dirWithFile.mkdir();
File inDir = File.createTempFile("atemp", "file", dirWithFile);
inDir.deleteOnExit();
agent.processAddCommand("add dir2= DirTailingAdaptor raw " + dirWithFile + " *file filetailer.CharFileTailingAdaptorUTF8 0");
Thread.sleep(3000);
assertEquals(3, agent.adaptorCount());
System.out.println("DirTailingAdaptor looks OK before restart");
agent.shutdown();
conf.setBoolean("chukwaAgent.checkpoint.enabled", true);
Thread.sleep(1500); //wait a little bit to make sure new file ts is > last checkpoint time.
File anOldFile = File.createTempFile("oldXYZ","file", dirWithFile);
File aNewFile = File.createTempFile("new", "file", dirWithFile);
anOldFile.deleteOnExit();
aNewFile.deleteOnExit();
anOldFile.setLastModified(10);//just after epoch
agent = ChukwaAgent.getAgent(conf); //restart agent.
agent.start();
Thread.sleep(3 * SCAN_INTERVAL); //wait a bit for the new file to be detected.
assertTrue(aNewFile.exists());
//make sure we started tailing the new, not the old, file.
for(Map.Entry<String, String> adaptors : agent.getAdaptorList().entrySet()) {
System.out.println(adaptors.getKey() +": " + adaptors.getValue());
assertFalse(adaptors.getValue().contains("oldXYZ"));
}
//should be four adaptors: the DirTailer on emptyDir, the DirTailer on the full dir,
//and FileTailers for File inDir and file newfile
Thread.sleep(3 * SCAN_INTERVAL); //wait a bit for the new file to be detected.
assertEquals(4, agent.adaptorCount());
agent.shutdown();
nukeDirContents(checkpointDir);//nuke dir
checkpointDir.delete();
emptyDir.delete();
nukeDirContents(dirWithFile);
dirWithFile.delete();
}
//returns true if dir exists
public static boolean nukeDirContents(File dir) {
if(dir.exists()) {
if(dir.isDirectory()) {
for(File f: dir.listFiles()) {
nukeDirContents(f);
f.delete();
}
} else
dir.delete();
return true;
}
return false;
}
public static void createEmptyDir(File dir) {
if(!nukeDirContents(dir))
dir.mkdir();
assertTrue(dir.isDirectory() && dir.listFiles().length == 0);
}
}
| 8,072 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestHeartbeatAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import java.io.DataInputStream;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent.AlreadyRunningException;
import org.apache.hadoop.chukwa.datacollection.connector.PipelineConnector;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.hadoop.conf.Configuration;
import org.json.simple.parser.JSONParser;
import junit.framework.TestCase;
public class TestHeartbeatAdaptor extends TestCase {
private volatile boolean shutdown = false;
private final int port = 4321;
public void testPingAdaptor() throws IOException, InterruptedException, AlreadyRunningException{
ChukwaAgent agent = ChukwaAgent.getAgent();
agent.start();
Configuration conf = agent.getConfiguration();
conf.set("chukwa.http.writer.host", "localhost");
conf.set("chukwa.http.writer.port", String.valueOf(port));
conf.set("chukwa.pipeline", "org.apache.hadoop.chukwa.datacollection.writer.HttpWriter");
agent.connector = new PipelineConnector();
agent.connector.start();
System.out.println("Started connector");
String adaptor = agent.processAddCommand("add HeartbeatAdaptor DefaultProcessor (ChukwaStatusChecker, HttpStatusChecker Invalid.component http://localhost:4322, HttpStatusChecker Chukwa.rest.server http://localhost:9090/rest/v2) 3 0");
//assertTrue(agent.adaptorCount() == 1);
if(agent.connector != null){
agent.connector.shutdown();
}
LocalServer server = new LocalServer();
server.start();
try {
server.join(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
if(server.getFailMessage() != null){
fail(server.getFailMessage());
}
assertTrue(server.messageCount > 0);
server.interrupt();
agent.stopAdaptor(adaptor, false);
agent.shutdown();
}
class LocalServer extends Thread {
ServerSocket sock;
String failMessage = null;
int messageCount = 0;
LocalServer() throws IOException{
sock = new ServerSocket();
sock.setReuseAddress(true);
sock.bind(new InetSocketAddress(port));
System.out.println("Started local server");
}
//calling fail() from this thread will not cause testcase to fail. So propagate error to main thread.
String getFailMessage(){
return failMessage;
}
int getMessageCount(){
return messageCount;
}
@Override
public void run(){
while(!shutdown){
try {
Socket socket = sock.accept();
DataInputStream dis = new DataInputStream(socket.getInputStream());
int size;
try{
while((size = dis.readInt()) > 0){
if(size > 1024){
fail();
}
messageCount++;
byte[] buffer = new byte[size];
dis.read(buffer);
String data = new String(buffer);
System.out.println("Received:"+data);
JSONParser json = new JSONParser();
//make sure we have a parseable json
json.parse(data);
}
} catch(java.io.EOFException e){
System.out.println("reached end of stream, so closing this socket");
} finally {
socket.close();
}
} catch (Exception e) {
failMessage = ExceptionUtil.getStackTrace(e);
}
}
}
}
}
| 8,073 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestSyslogAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
import org.apache.hadoop.chukwa.datacollection.agent.AdaptorManager;
import org.apache.hadoop.chukwa.*;
import java.net.*;
public class TestSyslogAdaptor extends TestCase implements ChunkReceiver {
volatile boolean receivedOK = false;
String STR = "<142>Syslog formatted message.";
/**
* Test Sending syslog message through port 9095.
* @throws Exception
*/
public void testSyslog() throws Exception {
SyslogAdaptor u = new SyslogAdaptor();
u.parseArgs("Test", "9095", AdaptorManager.NULL);
u.start("id", "Test", 0, this);
DatagramSocket send = new DatagramSocket();
byte[] buf = STR.getBytes();
DatagramPacket p = new DatagramPacket(buf, buf.length);
p.setSocketAddress(new InetSocketAddress("127.0.0.1",u.portno));
send.send(p);
send.close();
synchronized(this) {
wait(1000);
}
assertTrue(receivedOK);
}
/**
* Test Facility name overwrite from LOCAL1 to HADOOP.
*/
public void add(Chunk c) {
System.out.print(c.getDataType());
assertTrue(c.getDataType().equals("HADOOP"));
assertEquals(c.getSeqID(), c.getData().length);
assertTrue(STR.equals(new String(c.getData())));
receivedOK= true;
synchronized(this) {
notify();
}
}
}
| 8,074 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestFileAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import junit.framework.TestCase;
import static org.apache.hadoop.chukwa.util.TempFileUtil.*;
public class TestFileAdaptor extends TestCase {
Configuration conf = new Configuration();
static File baseDir;
File testFile;
ChunkCatcherConnector chunks;
public TestFileAdaptor() throws IOException {
baseDir = new File(System.getProperty("test.build.data", "/tmp"));
conf.setInt("chukwaAgent.control.port", 0);
conf.set("chukwaAgent.checkpoint.dir", baseDir.getCanonicalPath());
conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
conf.setInt("chukwaAgent.adaptor.fileadaptor.timeoutperiod", 100);
conf.setInt("chukwaAgent.adaptor.context.switch.time", 100);
testFile = makeTestFile("test", 10, baseDir);
chunks = new ChunkCatcherConnector();
chunks.start();
}
public void testOnce() throws IOException,
ChukwaAgent.AlreadyRunningException, InterruptedException {
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
assertEquals(0, agent.adaptorCount());
String name =agent.processAddCommand("add test = FileAdaptor raw " +testFile.getCanonicalPath() + " 0");
assertEquals(1, agent.adaptorCount());
assertEquals(name, "adaptor_test");
Chunk c = chunks.waitForAChunk(5000);
assertNotNull(c);
String dat = new String(c.getData());
assertTrue(dat.startsWith("0 abcdefghijklmnopqrstuvwxyz"));
assertTrue(dat.endsWith("9 abcdefghijklmnopqrstuvwxyz\n"));
assertTrue(c.getDataType().equals("raw"));
agent.shutdown();
}
public void testRepeatedly() throws IOException,
ChukwaAgent.AlreadyRunningException, InterruptedException {
int tests = 10; //SHOULD SET HIGHER AND WATCH WITH lsof to find leaks
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
for(int i=0; i < tests; ++i) {
if(i % 100 == 0)
System.out.println("buzzed " + i + " times");
assertEquals(0, agent.adaptorCount());
String name = agent.processAddCommand("add test = FileAdaptor raw " +testFile.getCanonicalPath() + " 0");
assertEquals(1, agent.adaptorCount());
assertEquals(name, "adaptor_test");
Chunk c = chunks.waitForAChunk(5000);
assertNotNull(c);
String dat = new String(c.getData());
assertTrue(dat.startsWith("0 abcdefghijklmnopqrstuvwxyz"));
assertTrue(dat.endsWith("9 abcdefghijklmnopqrstuvwxyz\n"));
assertTrue(c.getDataType().equals("raw"));
while(agent.adaptorCount() > 0) {
agent.stopAdaptor("adaptor_test", false);
Thread.sleep(1000);
}
}
agent.shutdown();
}
}
| 8,075 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestSocketAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import junit.framework.TestCase;
public class TestSocketAdaptor extends TestCase {
public void testBindRetry() {
int port = 9181;
int delay = 10000;
ServerSocket sock1 = null;
ServerSocket sock2 = null;
SocketAdaptor adaptor = new SocketAdaptor();
SocketAdaptor.Dispatcher disp = adaptor.new Dispatcher(port);
// test failure case
try {
sock1 = new ServerSocket();
sock1.setReuseAddress(true);
sock1.bind(new InetSocketAddress(port));
System.out.println("Bound to " + port);
assertTrue(sock1.isBound());
} catch (IOException e) {
fail("IOException binding to " + port);
}
// now try binding to the same port through SocketAdaptor
// making sure we retry until the specified time of 10s
long startTime = System.currentTimeMillis();
try {
sock2 = new ServerSocket();
sock2.setReuseAddress(true);
disp.bindWithExponentialBackoff(sock2, port, delay);
// we should not reach this statement
assertTrue(!sock2.isBound());
} catch (IOException ioe) {
long retryInterval = System.currentTimeMillis() - startTime;
System.out.println("Retried number of milliseconds :" + retryInterval);
if (retryInterval < delay) {
fail("SocketAdaptor did not retry bind for milliseconds:" + delay);
}
} finally {
try {
if (sock1 != null)
sock1.close();
} catch (IOException ignore) {
}
}
// test successful case
startTime = System.currentTimeMillis();
try {
disp.bindWithExponentialBackoff(sock2, port, delay);
} catch (IOException ioe) {
fail("IOException when trying to bind for the second time");
}
assertTrue(sock2.isBound());
System.out.println("Binding successful in milliseconds:"
+ (System.currentTimeMillis() - startTime));
if (sock2 != null) {
try {
sock2.close();
} catch (IOException ignore) {
}
}
}
}
| 8,076 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/JMX/QueueSample.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.JMX;
import java.util.Date;
public class QueueSample {
private final Date date;
private final int size;
private final String head;
public QueueSample(Date date, int size, String head) {
this.date = date;
this.size = size;
this.head = head;
}
public Date getDate() {
return date;
}
public int getSize() {
return size;
}
public String getHead() {
return head;
}
}
| 8,077 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/JMX/TestJMXAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.JMX;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ArrayBlockingQueue;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.ChunkQueue;
import org.apache.hadoop.chukwa.datacollection.DataFactory;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.conf.Configuration;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
public class TestJMXAdaptor extends TestCase{
MBeanServer mbs;
ChukwaAgent agent;
File baseDir, checkpointDir;
@Override
protected void setUp() throws Exception {
super.setUp();
mbs = JMXAgent.getMBeanServerInstance();
baseDir = new File(System.getProperty("test.build.data", "/tmp")).getCanonicalFile();
checkpointDir = new File(baseDir, "addAdaptorTestCheckpoints");
createEmptyDir(checkpointDir);
Configuration conf = new Configuration();
conf.set("chukwaAgent.checkpoint.dir", checkpointDir.getCanonicalPath());
conf.set("chukwaAgent.checkpoint.name", "checkpoint_");
conf.setInt("chukwaAgent.control.port", 9093);
conf.setInt("chukwaAgent.http.port", 9090);
conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
agent = ChukwaAgent.getAgent(conf);
agent.start();
}
public void testJMXAdaptor() {
MXBeanImpl mxbean = null;
try{
mxbean = new MXBeanImpl();
ObjectName name = new ObjectName("chukwa:type=test");
mbs.registerMBean(mxbean, name);
} catch(Exception e){
e.printStackTrace();
fail("Failed to instantiate and register test mbean");
}
Map<Integer, String> m = new HashMap<Integer, String>() {
private static final long serialVersionUID = 1L;
{
put(1, "a");
put(2, "b");
put(3, "c");
}
};
Queue<String> queue = new ArrayBlockingQueue<String>(10);
queue.add("Message1");
queue.add("Message2");
queue.add("Message3");
String[] sarray = new String[] {"Screw", "you", "guys", "I'm", "going", "home"};
mxbean.setQueue(queue);
mxbean.setInt(20);
mxbean.setMap(m);
mxbean.setString("TestString");
mxbean.setStringArray(sarray);
assertEquals(0, agent.adaptorCount());
System.out.println("adding jmx adaptor");
String id = agent.processAddCommand("add JMXAdaptor DebugProcessor localhost 10100 10 chukwa:* 0");
assertEquals(1, agent.adaptorCount());
//A thread that can block on ChunkQueue and can be interrupted
class Collector implements Runnable {
String fail = null;
public String getFailMessage(){
return fail;
}
public void run(){
try {
ChunkQueue eventQueue = DataFactory.getInstance().getEventQueue();
List<Chunk> evts = new ArrayList<Chunk>();
eventQueue.collect(evts, 1);
// Expected - {"CompositeType":"3","String":"TestString","StringArray":6,"Map":"3","Int":20}
for (Chunk e : evts) {
String data = new String(e.getData());
JSONObject obj = (JSONObject) JSONValue.parse(data);
assertEquals(obj.get("CompositeType"), "3");
assertEquals(obj.get("String"), "TestString");
assertEquals(obj.get("StringArray"), "6");
assertEquals(obj.get("Map"), "3");
assertEquals(obj.get("Int").toString(), "20");
System.out.println("Verified all data collected by JMXAdaptor");
}
} catch (InterruptedException e1) {
e1.printStackTrace();
fail = "JMXAdaptor failed to collect all data; it was interrupted";
} catch (AssertionFailedError e2) {
e2.printStackTrace();
fail = "Assert failed while verifying JMX data- "+e2.getMessage();
} catch (Exception e3) {
e3.printStackTrace();
fail = "Exception in collector thread. Check the test output for stack trace";
}
}
}
try {
Collector worker = new Collector();
Thread t = new Thread(worker);
t.start();
t.join(20000);
if(t.isAlive()){
t.interrupt();
fail("JMXAdaptor failed to collect data after 20s. Check agent log and surefire report");
}
String failMessage = worker.getFailMessage();
if(failMessage != null){
fail(failMessage);
}
} catch(Exception e){
e.printStackTrace();
fail("Exception in TestJMXAdaptor");
}
System.out.println("shutting down jmx adaptor");
agent.stopAdaptor(id, true);
assertEquals(0, agent.adaptorCount());
}
//returns true if dir exists
public static boolean nukeDirContents(File dir) {
if(dir.exists()) {
if(dir.isDirectory()) {
for(File f: dir.listFiles()) {
nukeDirContents(f);
f.delete();
}
} else
dir.delete();
return true;
}
return false;
}
public static void createEmptyDir(File dir) {
if(!nukeDirContents(dir))
dir.mkdir();
assertTrue(dir.isDirectory() && dir.listFiles().length == 0);
}
@Override
protected void tearDown() throws Exception {
nukeDirContents(checkpointDir);
checkpointDir.delete();
super.tearDown();
}
}
| 8,078 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/JMX/JMXAgent.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.JMX;
import java.lang.management.ManagementFactory;
import javax.management.MBeanServer;
public class JMXAgent {
private MBeanServer mbs = null;
static JMXAgent agent = null;
private JMXAgent(){
mbs = ManagementFactory.getPlatformMBeanServer();
}
public static MBeanServer getMBeanServerInstance(){
if(agent == null){
agent = new JMXAgent();
}
return agent.mbs;
}
} | 8,079 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/JMX/MXBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.JMX;
import java.util.Map;
public interface MXBean {
//Opentype SimpleType
//CompositeType
//ArrayType
//TabularType
public int getInt();
public String getString();
public String[] getStringArray();
public Map<Integer,String> getMap();
public QueueSample getCompositeType();
}
| 8,080 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/JMX/MXBeanImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.JMX;
import java.util.Date;
import java.util.Map;
import java.util.Queue;
public class MXBeanImpl implements MXBean {
private Queue<String> q;
private int i;
private String s;
private String[] sarray;
private Map<Integer, String> m;
public MXBeanImpl() {
q = null;
i = -1;
s = null;
sarray = null;
m = null;
}
public void setQueue(Queue<String> queue){
this.q = queue;
}
public void setInt(int i) {
this.i = i;
}
public void setString(String s) {
this.s = s;
}
public void setStringArray(String[] sarray) {
this.sarray = sarray;
}
public void setMap(Map<Integer, String> m) {
this.m = m;
}
public int getInt() {
return i;
}
public String getString() {
return s;
}
public String[] getStringArray() {
return sarray;
}
public Map<Integer, String> getMap() {
return m;
}
public QueueSample getCompositeType() {
synchronized(q) {
return new QueueSample(new Date(), q.size(), q.peek());
}
}
}
| 8,081 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestStartAtOffset.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.adaptor.*;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.conf.Configuration;
import junit.framework.TestCase;
public class TestStartAtOffset extends TestCase {
ChunkCatcherConnector chunks;
public TestStartAtOffset() {
chunks = new ChunkCatcherConnector();
chunks.start();
}
public void startAtOffset() throws IOException, InterruptedException,
ChukwaAgent.AlreadyRunningException {
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
conf.setInt("chukwaAgent.adaptor.context.switch.time", 100);
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
File testFile = makeTestFile();
int startOffset = 0; // skip first line
String adaptorId = agent
.processAddCommand("add org.apache.hadoop.chukwa.datacollection.adaptor"+
"filetailer.CharFileTailingAdaptorUTF8 "
+ "lines " + startOffset + " " + testFile + " " + startOffset);
assertTrue(adaptorId != null);
System.out.println("getting a chunk...");
Chunk c = chunks.waitForAChunk();
System.out.println("got chunk");
while (!c.getDataType().equals("lines")) {
c = chunks.waitForAChunk();
}
assertTrue(c.getSeqID() == testFile.length() + startOffset);
System.out.println("RecordOffsets length:" + c.getRecordOffsets().length);
assertTrue(c.getRecordOffsets().length == 80); // 80 lines in the file.
int recStart = 0;
for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
String record = new String(c.getData(), recStart,
c.getRecordOffsets()[rec] - recStart + 1);
System.out.println("record " + rec + " was: " + record);
assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
recStart = c.getRecordOffsets()[rec] + 1;
}
assertTrue(c.getDataType().equals("lines"));
agent.stopAdaptor(adaptorId, false);
agent.shutdown();
}
public void testStartAfterOffset() throws IOException, InterruptedException,
ChukwaAgent.AlreadyRunningException {
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
conf.setInt("chukwaAgent.adaptor.context.switch.time", 100);
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
File testFile = makeTestFile();
int startOffset = 0;
String adaptorId = agent
.processAddCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 "
+ "lines "
+ startOffset
+ " "
+ testFile
+ " "
+ (startOffset + 29));
assertTrue(adaptorId != null);
System.out.println("getting a chunk...");
Chunk c = chunks.waitForAChunk();
System.out.println("got chunk");
while (!c.getDataType().equals("lines")) {
c = chunks.waitForAChunk();
}
assertTrue(c.getSeqID() == testFile.length() + startOffset);
assertTrue(c.getRecordOffsets().length == 79);// 80 lines in test file,
// minus the one we skipped
int recStart = 0;
for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
String record = new String(c.getData(), recStart,
c.getRecordOffsets()[rec] - recStart + 1);
System.out.println("record " + rec + " was: " + record);
assertTrue(record.equals((rec + 1) + " abcdefghijklmnopqrstuvwxyz\n"));
recStart = c.getRecordOffsets()[rec] + 1;
}
assertTrue(c.getDataType().equals("lines"));
agent.stopAdaptor(adaptorId, false);
agent.shutdown();
}
private File makeTestFile() throws IOException {
return org.apache.hadoop.chukwa.util.TempFileUtil.makeTestFile();
}
}
| 8,082 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestRCheckAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Map;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
import org.apache.hadoop.chukwa.datacollection.agent.AdaptorManager;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent.AlreadyRunningException;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.conf.Configuration;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorShutdownPolicy;
import org.apache.hadoop.chukwa.datacollection.adaptor.TestDirTailingAdaptor;
import org.apache.log4j.Level;
import org.mortbay.log.Log;
public class TestRCheckAdaptor extends TestCase implements ChunkReceiver {
ChunkCatcherConnector chunks;
ChukwaAgent agent;
public TestRCheckAdaptor() {
chunks = new ChunkCatcherConnector();
chunks.start();
}
@Override
protected void setUp() throws InterruptedException {
Configuration conf = new ChukwaConfiguration();
conf.set("", "org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector");
try {
agent = ChukwaAgent.getAgent(conf);
agent.start();
Thread.sleep(2000);
Map<String, String> adaptorList = agent.getAdaptorList();
for(String id : adaptorList.keySet()) {
agent.stopAdaptor(id, false);
}
} catch (AlreadyRunningException e) {
fail("Agent is already running.");
}
}
@Override
protected void tearDown() throws InterruptedException {
agent.shutdown();
Thread.sleep(2000);
}
public void testBaseCases() throws IOException, InterruptedException,
ChukwaAgent.AlreadyRunningException {
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
conf.setInt("chukwaAgent.adaptor.context.switch.time", 100);
File baseDir = new File(System.getProperty("test.build.data", "/tmp") + "/rcheck");
TestDirTailingAdaptor.createEmptyDir(baseDir);
File tmpOutput = new File(baseDir, "rotateTest.1");
PrintWriter pw = new PrintWriter(new FileOutputStream(tmpOutput));
pw.println("First");
pw.close();
Thread.sleep(1000);//to make sure mod dates are distinguishing.
tmpOutput = new File(baseDir, "rotateTest");
pw = new PrintWriter(new FileOutputStream(tmpOutput));
pw.println("Second");
pw.close();
String adaptorID = agent.processAddCommand("add lr = filetailer.RCheckFTAdaptor test " + tmpOutput.getAbsolutePath() + " 0");
assertNotNull(adaptorID);
Chunk c = chunks.waitForAChunk();
assertNotNull(c);
assertTrue(c.getData().length == 6);
assertTrue("First\n".equals(new String(c.getData())));
c = chunks.waitForAChunk();
assertNotNull(c);
assertTrue(c.getData().length == 7);
assertTrue("Second\n".equals(new String(c.getData())));
pw = new PrintWriter(new FileOutputStream(tmpOutput, true));
pw.println("Third");
pw.close();
c = chunks.waitForAChunk();
assertNotNull(c);
assertTrue(c.getData().length == 6);
assertTrue("Third\n".equals(new String(c.getData())));
Thread.sleep(1500);
tmpOutput.renameTo(new File(baseDir, "rotateTest.2"));
pw = new PrintWriter(new FileOutputStream(tmpOutput, true));
pw.println("Fourth");
pw.close();
c = chunks.waitForAChunk();
assertNotNull(c);
System.out.println("got " + new String(c.getData()));
assertTrue("Fourth\n".equals(new String(c.getData())));
Thread.sleep(1500);
tmpOutput.renameTo(new File(baseDir, "rotateTest.3"));
Thread.sleep(400);
pw = new PrintWriter(new FileOutputStream(tmpOutput, true));
pw.println("Fifth");
pw.close();
c = chunks.waitForAChunk();
assertNotNull(c);
System.out.println("got " + new String(c.getData()));
assertTrue("Fifth\n".equals(new String(c.getData())));
}
public void testContinuously() throws Exception {
File baseDir = new File(System.getProperty("test.build.data", "/tmp") + "/rcheck");
TestDirTailingAdaptor.createEmptyDir(baseDir);
File tmpOutput = new File(baseDir, "continuousTest");
PrintWriter pw = new PrintWriter(new FileOutputStream(tmpOutput, true));
//LWFTAdaptor.tailer.SAMPLE_PERIOD_MS = 2000;
// RCheckFTAdaptor.log.setLevel(Level.DEBUG);
RCheckFTAdaptor rca = new RCheckFTAdaptor();
rca.parseArgs("Test", tmpOutput.getAbsolutePath(), AdaptorManager.NULL);
rca.start("id", "Test", 0, this);
Thread.sleep(1000);
for(int i= 0; i < 200; ++i) {
Thread.sleep(120);
pw.println("This is line:" + i);
if( i % 5 == 0)
pw.flush();
if(i % 20 == 0) {
System.err.println("rotating");
pw.close();
tmpOutput.renameTo( new File(baseDir, "continuousTest."+(i/10)));
pw = new PrintWriter(new FileOutputStream(tmpOutput, true));
}
}
Thread.sleep(1000);
rca.shutdown(AdaptorShutdownPolicy.HARD_STOP);
}
volatile int nextExpectedLine = 0;
@Override
public void add(Chunk event) throws InterruptedException {
String[] lines = new String(event.getData()).split("\n");
System.err.println("got chunk; " + lines.length + " lines " + event.getData().length + " bytes");
for(String line: lines) {
String n = line.substring(line.indexOf(':')+1);
int i = Integer.parseInt(n);
if(i != nextExpectedLine) {
System.err.println("lines out of order: saw " + i + " expected " + nextExpectedLine);
fail();
}
nextExpectedLine = i+1;
}
}
}
| 8,083 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptorPreserveLines.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
import static org.apache.hadoop.chukwa.util.TempFileUtil.makeTestFile;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.UnsupportedEncodingException;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestFileTailingAdaptorPreserveLines {
private static File testFile;
// private static String adaptorToTest = "FileTailingAdaptor";
// private static String adaptorToTest = "CharFileTailingAdaptorUTF8";
private static String adaptorToTest = "FileTailingAdaptorPreserveLines";
private static ChukwaConfiguration conf;
private ChukwaAgent agent;
private String adaptorId;
private ChunkCatcherConnector chunks;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
File baseDir = new File(System.getProperty("test.build.data", "/tmp"));
testFile = makeTestFile("TestFileTailingAdaptorPreserveLines", 10,
baseDir);
conf = new ChukwaConfiguration();
conf.setInt("chukwaAgent.fileTailingAdaptor.maxReadSize", 130);
}
/**
* @throws Exception
*/
@Before
public void setUp() throws Exception {
agent = ChukwaAgent.getAgent(conf);
agent.start();
chunks = new ChunkCatcherConnector();
chunks.start();
adaptorId = agent.processAddCommand("add adaptor_test =" + "filetailer."
+ adaptorToTest + " TestFileTailingAdaptorPreserveLines "
+ testFile.getCanonicalPath() + " 0");
}
@After
public void tearDown() throws Exception {
agent.stopAdaptor(adaptorId, false);
agent.shutdown();
chunks.clear();
chunks.shutdown();
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
if (testFile != null) {
testFile.delete();
}
}
/**
* Check that chunk does not break lines (which is the problem of
* FileTailingAdaptor adaptor)
*
* @throws UnsupportedEncodingException
*/
@Test
public void testDontBreakLines() throws UnsupportedEncodingException {
Chunk c = chunks.waitForAChunk(5000);
String data = new String(c.getData(), "UTF-8");
String[] lines = data.split("\\r?\\n");
// Check that length of the last line is the same as the
// one of the first line. Otherwise, it means the last
// line has been cut
assertEquals(lines[0].length(), lines[lines.length - 1].length());
}
/**
* Check that second chunk contains the data that just follow the first
* chunk's data
*
* @throws UnsupportedEncodingException
*/
@Test
public void testSecondChunkDataFollowsFirstChunkData()
throws UnsupportedEncodingException {
Chunk c = chunks.waitForAChunk(5000);
String data = new String(c.getData(), "UTF-8");
String[] lines1 = data.split("\\r?\\n");
c = chunks.waitForAChunk(5000);
data = new String(c.getData(), "UTF-8");
String[] lines2 = data.split("\\r?\\n");
int numLastLineChunk1 = (int) (lines1[lines1.length - 1].charAt(0));
int numLastLineChunk2 = (int) (lines2[0].charAt(0));
// Check that lines numbers are successive between
// last line of first chunk and first line of second chunk
assertEquals(numLastLineChunk1, numLastLineChunk2 - 1);
}
/**
* Check that chunk only has one set record offset although it has more than 2
* lines (which is the contrary of CharFileTailingAdaptorUTF8)
*
* @throws UnsupportedEncodingException
*/
@Test
public void testOnlyOneSetRecordOffset()
throws UnsupportedEncodingException {
Chunk c = chunks.waitForAChunk(5000);
String data = new String(c.getData(), "UTF-8");
String[] lines = data.split("\\r?\\n");
// Check that we have more than two lines
assertTrue(lines.length > 2);
int[] offsets_i = c.getRecordOffsets();
// Check that we only have one offset
assertEquals(1, offsets_i.length);
}
}
| 8,084 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestRawAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
import java.io.*;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import java.util.Map;
import java.util.Iterator;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.adaptor.*;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.conf.Configuration;
import static org.apache.hadoop.chukwa.util.TempFileUtil.*;
public class TestRawAdaptor extends TestCase {
ChunkCatcherConnector chunks;
public TestRawAdaptor() {
chunks = new ChunkCatcherConnector();
chunks.start();
}
public void testRawAdaptor() throws Exception {
System.out.println("testing raw fta");
runTest("FileTailingAdaptor");
}
public void testLWRawAdaptor() throws Exception {
System.out.println("testing lightweight fta");
runTest("LWFTAdaptor");
}
public void testRotAdaptor() throws Exception {
System.out.println("testing lightweight fta");
runTest("LWFTAdaptor");
}
public void runTest(String name) throws IOException, InterruptedException,
ChukwaAgent.AlreadyRunningException {
// Remove any adaptor left over from previous run
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
conf.setInt("chukwaAgent.adaptor.context.switch.time", 100);
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
File testFile = makeTestFile("chukwaRawTest", 80,
new File(System.getProperty("test.build.data", "/tmp")));
String adaptorId = agent
.processAddCommand("add org.apache.hadoop.chukwa.datacollection.adaptor."
+"filetailer." + name
+ " raw " + testFile + " 0");
assertNotNull(adaptorId);
Chunk c = chunks.waitForAChunk(1000);
assertNotNull(c);
assertEquals(testFile.length(), c.getData().length);
assertTrue(c.getDataType().equals("raw"));
assertTrue(c.getRecordOffsets().length == 1);
assertTrue(c.getSeqID() == testFile.length());
c = chunks.waitForAChunk(1000);
assertNull(c);
agent.stopAdaptor(adaptorId, false);
agent.shutdown();
}
}
| 8,085 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestCharFileTailingAdaptorUTF8.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.conf.Configuration;
import static org.apache.hadoop.chukwa.util.TempFileUtil.*;
public class TestCharFileTailingAdaptorUTF8 extends TestCase {
ChunkCatcherConnector chunks;
File baseDir;
public TestCharFileTailingAdaptorUTF8() {
baseDir = new File(System.getProperty("test.build.data", "/tmp"));
chunks = new ChunkCatcherConnector();
chunks.start();
}
public void testCrSepAdaptor() throws IOException, InterruptedException,
ChukwaAgent.AlreadyRunningException {
Configuration conf = new Configuration();
conf.set("chukwaAgent.control.port", "0");
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
File testFile = makeTestFile("chukwaTest", 80,baseDir);
String adaptorId = agent
.processAddCommand("add adaptor_test = org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8"
+ " lines " + testFile + " 0");
assertTrue(adaptorId.equals("adaptor_test"));
System.out.println("getting a chunk...");
Chunk c = chunks.waitForAChunk();
assertTrue(c.getSeqID() == testFile.length());
assertTrue(c.getRecordOffsets().length == 80);
int recStart = 0;
for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
String record = new String(c.getData(), recStart,
c.getRecordOffsets()[rec] - recStart + 1);
assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
recStart = c.getRecordOffsets()[rec] + 1;
}
assertTrue(c.getDataType().equals("lines"));
agent.stopAdaptor(adaptorId, false);
agent.shutdown();
Thread.sleep(2000);
}
}
| 8,086 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileExpirationPolicy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
import org.apache.hadoop.conf.Configuration;
public class TestFileExpirationPolicy extends TestCase {
public void testExpiration() {
ChukwaAgent agent = null;
try {
Configuration conf = new ChukwaConfiguration();
conf.set("chukwaAgent.control.port", "0");
agent = ChukwaAgent.getAgent(conf);
agent.start();
FileTailingAdaptor.GRACEFUL_PERIOD = 30 * 1000;
String adaptorId = agent
.processAddCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8NewLineEscaped MyType 0 /myWrongPath"
+ System.currentTimeMillis() + " 0");
assertTrue(adaptorId != null);
assertNotNull(agent.getAdaptor(adaptorId));
Thread.sleep(FileTailingAdaptor.GRACEFUL_PERIOD + 10000);
assertNull(agent.getAdaptor(adaptorId));
} catch (Exception e) {
Assert.fail("Exception in TestFileExpirationPolicy");
} finally {
if (agent != null) {
agent.shutdown();
try {
Thread.sleep(2000);
} catch (Exception ex) {
}
}
}
}
public void testExpirationOnFileThatHasBennDeleted() {
ChukwaAgent agent = null;
File testFile = null;
try {
File tempDir = new File(System.getProperty("test.build.data", "/tmp"));
if (!tempDir.exists()) {
tempDir.mkdirs();
}
String logFile = tempDir.getPath() + "/chukwatestExpiration.txt";
testFile = makeTestFile(logFile, 8000);
Configuration conf = new ChukwaConfiguration();
conf.set("chukwaAgent.control.port", "0");
agent = ChukwaAgent.getAgent(conf);
agent.start();
// Remove any adaptor left over from previous run
ChukwaAgentController cli = new ChukwaAgentController("localhost", agent.getControllerPort());
cli.removeAll();
// sleep for some time to make sure we don't get chunk from existing
// streams
Thread.sleep(5000);
assertTrue(testFile.canRead() == true);
FileTailingAdaptor.GRACEFUL_PERIOD = 30 * 1000;
String adaptorId = agent
.processAddCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8NewLineEscaped MyType 0 "
+ logFile + " 0");
assertTrue(adaptorId != null);
assertNotNull(agent.getAdaptor(adaptorId));
Thread.sleep(10000);
testFile.delete();
Thread.sleep(FileTailingAdaptor.GRACEFUL_PERIOD + 10000);
assertNull(agent.getAdaptor(adaptorId));
agent.shutdown();
Thread.sleep(2000);
} catch (Exception e) {
Assert.fail("Exception in TestFileExpirationPolicy");
} finally {
if (agent != null) {
agent.shutdown();
try {
Thread.sleep(2000);
} catch (Exception ex) {
}
}
}
}
private File makeTestFile(String name, int size) throws IOException {
File tmpOutput = new File(name);
FileOutputStream fos = new FileOutputStream(tmpOutput);
PrintWriter pw = new PrintWriter(fos);
for (int i = 0; i < size; ++i) {
pw.print(i + " ");
pw.println("abcdefghijklmnopqrstuvwxyz");
}
pw.flush();
pw.close();
return tmpOutput;
}
}
| 8,087 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
import java.io.*;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import java.util.Map;
import java.util.Iterator;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.adaptor.*;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.conf.Configuration;
import static org.apache.hadoop.chukwa.util.TempFileUtil.*;
public class TestFileTailingAdaptors extends TestCase {
ChunkCatcherConnector chunks;
Configuration conf = new Configuration();
File baseDir, testFile;
public TestFileTailingAdaptors() throws IOException {
chunks = new ChunkCatcherConnector();
chunks.start();
baseDir = new File(System.getProperty("test.build.data", "/tmp"));
conf.set("chukwaAgent.checkpoint.dir", baseDir.getCanonicalPath());
conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
conf.setInt("chukwaAgent.adaptor.context.switch.time", 100);
conf.set("chukwaAgent.control.port", "0");
testFile = makeTestFile("chukwaCrSepTest", 80, baseDir);
}
public void testCrSepAdaptor() throws IOException, InterruptedException,
ChukwaAgent.AlreadyRunningException {
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
// Remove any adaptor left over from previous run
// sleep for some time to make sure we don't get chunk from existing streams
Thread.sleep(5000);
assertEquals(0, agent.adaptorCount());
String adaptorId = agent
.processAddCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8"
+ " lines " + testFile + " 0");
assertNotNull(adaptorId);
assertEquals(1, agent.adaptorCount());
System.out.println("getting a chunk...");
Chunk c = chunks.waitForAChunk();
System.out.println("got chunk");
while (!c.getDataType().equals("lines")) {
c = chunks.waitForAChunk();
}
assertTrue(c.getSeqID() == testFile.length());
assertTrue(c.getRecordOffsets().length == 80);
int recStart = 0;
for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
String record = new String(c.getData(), recStart,
c.getRecordOffsets()[rec] - recStart + 1);
assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
recStart = c.getRecordOffsets()[rec] + 1;
}
assertTrue(c.getDataType().equals("lines"));
agent.stopAdaptor(adaptorId, false);
agent.shutdown();
Thread.sleep(2000);
}
public void testRepeatedlyOnBigFile() throws IOException,
ChukwaAgent.AlreadyRunningException, InterruptedException {
int tests = 10; // SHOULD SET HIGHER AND WATCH WITH lsof to find leaks
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
for (int i = 0; i < tests; ++i) {
if (i % 100 == 0)
System.out.println("buzzed " + i + " times");
assertEquals(0, agent.adaptorCount());
agent
.processAddCommand("add adaptor_test = filetailer.FileTailingAdaptor raw "
+ testFile.getCanonicalPath() + " 0");
assertEquals(1, agent.adaptorCount());
Chunk c = chunks.waitForAChunk();
String dat = new String(c.getData());
assertTrue(dat.startsWith("0 abcdefghijklmnopqrstuvwxyz"));
assertTrue(dat.endsWith("9 abcdefghijklmnopqrstuvwxyz\n"));
assertTrue(c.getDataType().equals("raw"));
if (agent.adaptorCount() > 0)
agent.stopAdaptor("adaptor_test", false);
}
agent.shutdown();
}
public void testOffsetInAdaptorName() throws IOException,
ChukwaAgent.AlreadyRunningException, InterruptedException {
File testFile = makeTestFile("foo", 120, baseDir);
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
assertEquals(0, agent.adaptorCount());
agent.processAddCommand("add test = filetailer.FileTailingAdaptor raw "
+ testFile.getCanonicalPath() + " 0");
assertEquals(1, agent.adaptorCount());
Thread.sleep(2000);
agent.processAddCommand("add test = filetailer.FileTailingAdaptor raw "
+ testFile.getCanonicalPath() + " 0");
assertEquals(1, agent.adaptorCount());
chunks.clear();
agent.shutdown();
}
/**
* Test that LWFTAdaptor updates lastSlurpTime so that FileTailingAdaptor does
* not trigger an infinite loop and that slurp() is not called by
* FileTailingAdaptor if file is not updated (see CHUKWA-668)
*
* @throws IOException
* @throws ChukwaAgent.AlreadyRunningException
* @throws InterruptedException
*/
public void testSlurpTimeUpdated() throws IOException,
ChukwaAgent.AlreadyRunningException, InterruptedException {
ChukwaAgent agent = ChukwaAgent.getAgent(conf);
agent.start();
File testFile = makeTestFile("fooSlurp", 0, baseDir);
long startTime = System.currentTimeMillis();
String adaptorId = agent.processAddCommand("add adaptor_test ="
+ "filetailer.FileTailingAdaptor slurp " + testFile.getCanonicalPath()
+ " 0");
FileTailingAdaptor fta = (FileTailingAdaptor) agent.getAdaptor(adaptorId);
Thread.sleep(500);
long initializedSlurpTimeValue = fta.lastSlurpTime;
assertTrue(initializedSlurpTimeValue > startTime); // initialized to current
// time
makeTestFile("fooSlurp", 2, baseDir);
Chunk c = chunks.waitForAChunk();
Thread.sleep(2000);
// lastSlurpTime has been updated because a slurp was done
long secondSlurpTimeValue = fta.lastSlurpTime;
assertTrue(secondSlurpTimeValue > initializedSlurpTimeValue);
assertEquals(fta.fileReadOffset, c.getData().length);
assertEquals(fta.fileReadOffset, fta.reader.length());
Thread.sleep(2000);
// ensure we don't try to slurp if file is not updated
assertEquals(fta.lastSlurpTime, secondSlurpTimeValue);
if (agent.adaptorCount() > 0)
agent.stopAdaptor("adaptor_test", false);
agent.shutdown();
}
}
| 8,088 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
import static org.apache.hadoop.chukwa.util.TempFileUtil.makeTestFile;
import static org.junit.Assert.*;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent.AlreadyRunningException;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.junit.After;
import org.junit.Test;
public class TestFileTailer {
private ChukwaAgent agent;
private String adaptorId;
private File testFile;
@After
public void tearDown() throws Exception {
agent.stopAdaptor(adaptorId, false);
agent.shutdown();
if (testFile != null) {
testFile.delete();
}
}
@Test
public void testDontSleepIfHasMoreData() throws AlreadyRunningException, IOException, InterruptedException {
int DEFAULT_SAMPLE_PERIOD_MS = 1000 * 2;
ChukwaConfiguration cc = new ChukwaConfiguration();
cc.setInt("chukwaAgent.fileTailingAdaptor.maxReadSize", 18); // small in order to have hasMoreData=true
// (with 26 letters we should have 2 chunks)
agent = ChukwaAgent.getAgent(cc);
agent.start();
ChunkCatcherConnector chunks = new ChunkCatcherConnector();
chunks.start();
File baseDir = new File(System.getProperty("test.build.data", "/tmp"));
testFile = makeTestFile("testDontSleepIfHasMoreData", 1, baseDir); // insert 26 letters on file
long startTime = System.currentTimeMillis();
adaptorId = agent.processAddCommand("add adaptor_test ="
+ "filetailer.FileTailingAdaptor testDontSleepIfHasMoreData "
+ testFile.getCanonicalPath() + " 0");
chunks.waitForAChunk();
chunks.waitForAChunk();
long endTime = System.currentTimeMillis();
assertTrue( endTime - startTime < DEFAULT_SAMPLE_PERIOD_MS ); // ensure that everything finishes very fast
// faster than SAMPLE_PERIOD_MS (ie: we don't sleep)
}
}
| 8,089 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestLogRotate.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
import java.io.*;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import java.util.Map;
import java.util.Iterator;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.adaptor.*;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import static org.apache.hadoop.chukwa.util.TempFileUtil.*;
public class TestLogRotate extends TestCase {
ChunkCatcherConnector chunks;
public TestLogRotate() {
chunks = new ChunkCatcherConnector();
chunks.start();
}
public void testLogRotate() throws IOException, InterruptedException,
ChukwaAgent.AlreadyRunningException {
ChukwaAgent agent = ChukwaAgent.getAgent();
agent.start();
// Remove any adaptor left over from previous run
ChukwaConfiguration cc = new ChukwaConfiguration();
int portno = cc.getInt("chukwaAgent.control.port", 9093);
ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
cli.removeAll();
// sleep for some time to make sure we don't get chunk from existing streams
Thread.sleep(5000);
File testFile = makeTestFile("chukwaLogRotateTest", 80);
String adaptorId = agent
.processAddCommand("add lr =org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8"
+ " lines " + testFile + " 0");
assertTrue(adaptorId.equals("adaptor_lr"));
System.out.println("getting a chunk...");
Chunk c = chunks.waitForAChunk();
System.out.println("got chunk");
while (!c.getDataType().equals("lines")) {
c = chunks.waitForAChunk();
}
assertTrue(c.getSeqID() == testFile.length());
assertTrue(c.getRecordOffsets().length == 80);
int recStart = 0;
for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
String record = new String(c.getData(), recStart,
c.getRecordOffsets()[rec] - recStart + 1);
System.out.println("record " + rec + " was: " + record);
assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
recStart = c.getRecordOffsets()[rec] + 1;
}
assertTrue(c.getDataType().equals("lines"));
testFile = makeTestFile("chukwaLogRotateTest", 40);
c = chunks.waitForAChunk();
System.out.println("got chunk");
while (!c.getDataType().equals("lines")) {
c = chunks.waitForAChunk();
}
// assertTrue(c.getSeqID() == testFile.length());
assertTrue(c.getRecordOffsets().length == 40);
recStart = 0;
for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
String record = new String(c.getData(), recStart,
c.getRecordOffsets()[rec] - recStart + 1);
System.out.println("record " + rec + " was: " + record);
assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
recStart = c.getRecordOffsets()[rec] + 1;
}
assertTrue(c.getDataType().equals("lines"));
agent.stopAdaptor(adaptorId, false);
agent.shutdown();
Thread.sleep(2000);
}
}
| 8,090 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptorBigRecord.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
import junit.framework.Assert;
import junit.framework.TestCase;
public class TestFileTailingAdaptorBigRecord extends TestCase {
ChunkCatcherConnector chunks;
public void testBigRecord() {
File f = null;
try {
File tempDir = new File(System.getProperty("test.build.data", "/tmp"));
if (!tempDir.exists()) {
tempDir.mkdirs();
}
String logFile = tempDir.getPath() + "/Chukwa-bigRecord.txt";
f = makeTestFile(logFile);
chunks = new ChunkCatcherConnector();
chunks.start();
// Remove any adaptor left over from previous run
ChukwaConfiguration cc = new ChukwaConfiguration();
cc.set("chukwaAgent.control.port", "0");
cc.setInt("chukwaAgent.fileTailingAdaptor.maxReadSize", 55);
ChukwaAgent agent = ChukwaAgent.getAgent(cc);
agent.start();
int portno = agent.getControllerPort();
while (portno == -1) {
Thread.sleep(1000);
portno = agent.getControllerPort();
}
// System.out.println("Port number:" + portno);
ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
cli.removeAll();
// sleep for some time to make sure we don't get chunk from existing
// streams
Thread.sleep(5000);
String adaptorId = agent
.processAddCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8NewLineEscaped"
+ " BigRecord " + logFile + " 0");
assertNotNull(adaptorId);
boolean record8Found = false;
Chunk c = null;
// Keep reading until record8
// If the adaptor is stopped then Junit will fail with a timeOut
while (!record8Found) {
c = chunks.waitForAChunk();//only wait three minutes
String data = new String(c.getData());
if (c.getDataType().equals("BigRecord")
&& data.indexOf("8 abcdefghijklmnopqrstuvwxyz") >= 0) {
record8Found = true;
}
}
agent.stopAdaptor(adaptorId, true);
agent.shutdown();
Thread.sleep(2000);
} catch (Exception e) {
Assert.fail("Exception in testBigRecord: " + e.getMessage());
} finally {
if (f != null) {
f.delete();
}
}
}
private File makeTestFile(String name) throws IOException {
File tmpOutput = new File(name);
FileOutputStream fos = new FileOutputStream(tmpOutput);
PrintWriter pw = new PrintWriter(fos);
for (int i = 0; i < 5; ++i) {
pw.print(i + " ");
pw.println("abcdefghijklmnopqrstuvwxyz");
}
pw.print("6 ");
for (int i = 0; i < 10; ++i) {
pw.print("abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz");
}
pw.print("\n");
pw.print("7 ");
pw.println("abcdefghijklmnopqrstuvwxyz");
pw.print("8 ");
pw.println("abcdefghijklmnopqrstuvwxyz");
pw.flush();
pw.close();
return tmpOutput;
}
}
| 8,091 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/TestJMSAdaptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.jms;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
import org.apache.hadoop.chukwa.datacollection.agent.AdaptorManager;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.activemq.ActiveMQConnection;
import javax.jms.Message;
import javax.jms.TopicConnection;
import javax.jms.TopicSession;
import javax.jms.Session;
import javax.jms.Topic;
import javax.jms.TopicPublisher;
/**
* Tests the functionality of JMSAdapter and JMSTextMessageTransformer
*/
public class TestJMSAdaptor extends TestCase implements ChunkReceiver {
String DATA_TYPE = "Test";
String MESSAGE_PAYLOAD = "Some JMS message payload";
TopicConnection connection = null;
TopicSession session = null;
TopicPublisher publisher = null;
int bytesReceived = 0;
int messagesReceived = 0;
protected void setUp() throws Exception {
connection = ActiveMQConnection.makeConnection("vm://localhost");
session = connection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE);
Topic topic = session.createTopic("test.topic");
publisher = session.createPublisher(topic);
messagesReceived = 0;
bytesReceived = 0;
}
protected void tearDown() throws Exception {
session.close();
connection.close();
}
public void testJMSTextMessage() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE, "vm://localhost -t test.topic",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message not received", 1, messagesReceived);
}
public void testJMSTextMessageWithTransformer() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE, "vm://localhost -t test.topic -x org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSTextMessageTransformer",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message not received", 1, messagesReceived);
}
public void testJMSTextMessageWithSelector() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE,
"vm://localhost -t test.topic -s \"foo='bar'\"",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
publisher.publish(message);
message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("foo", "bar");
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message not received", 1, messagesReceived);
}
public void testJMSTextMessageWithMultiWordSelector() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE,
"vm://localhost -t test.topic -s \"foo='bar' and bar='foo'\"",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
publisher.publish(message);
message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("foo", "bar");
publisher.publish(message);
message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("foo", "bar");
message.setStringProperty("bar", "foo");
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message not received", 1, messagesReceived);
}
public void add(Chunk c) {
bytesReceived += c.getData().length;
assertEquals("Unexpected data length",
MESSAGE_PAYLOAD.length(), c.getData().length);
assertEquals("Unexpected data type", DATA_TYPE, c.getDataType());
assertEquals("Chunk sequenceId should be total bytes received.",
bytesReceived, c.getSeqID());
assertEquals("Unexpected message payload",
MESSAGE_PAYLOAD, new String(c.getData()));
messagesReceived++;
}
} | 8,092 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/TestJMSMessagePropertyTransformer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.adaptor.jms;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
import org.apache.hadoop.chukwa.datacollection.agent.AdaptorManager;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.activemq.ActiveMQConnection;
import javax.jms.Message;
import javax.jms.TopicConnection;
import javax.jms.TopicSession;
import javax.jms.Session;
import javax.jms.Topic;
import javax.jms.TopicPublisher;
import java.util.ArrayList;
/**
* Tests the functionality JMSMessagePropertyTransformer.
*/
public class TestJMSMessagePropertyTransformer extends TestCase implements ChunkReceiver {
String DATA_TYPE = "Test";
String MESSAGE_PAYLOAD = "Some JMS message payload";
TopicConnection connection = null;
TopicSession session = null;
TopicPublisher publisher = null;
ArrayList<String> chunkPayloads;
int bytesReceived = 0;
protected void setUp() throws Exception {
connection = ActiveMQConnection.makeConnection("vm://localhost");
session = connection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE);
Topic topic = session.createTopic("test.topic");
publisher = session.createPublisher(topic);
chunkPayloads = new ArrayList<String>();
bytesReceived = 0;
}
protected void tearDown() throws Exception {
session.close();
connection.close();
}
public void testJMSMessageProperties() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE, "vm://localhost -t test.topic " +
"-x org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSMessagePropertyTransformer " +
"-p \"foo,bar,num\"",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("bar", "bar_value");
message.setStringProperty("bat", "bat_value");
message.setStringProperty("foo", "foo_value");
message.setIntProperty("num", 1);
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message not received", 1, chunkPayloads.size());
assertEquals("Incorrect chunk payload found",
"foo_value\tbar_value\t1", chunkPayloads.get(0));
}
public void testJMSMessagePropertiesNoQuotes() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE, "vm://localhost -t test.topic " +
"-x org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSMessagePropertyTransformer " +
"-p foo,bar,num",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("bar", "bar_value");
message.setStringProperty("bat", "bat_value");
message.setStringProperty("foo", "foo_value");
message.setIntProperty("num", 1);
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message not received", 1, chunkPayloads.size());
assertEquals("Incorrect chunk payload found",
"foo_value\tbar_value\t1", chunkPayloads.get(0));
}
public void testJMSMessagePropertiesWithDelimiter() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE, "vm://localhost -t test.topic " +
"-x org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSMessagePropertyTransformer " +
"-p \"foo,bar,num -d ' '\"",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("bar", "bar_value");
message.setStringProperty("bat", "bat_value");
message.setStringProperty("foo", "foo_value");
message.setIntProperty("num", 1);
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message not received", 1, chunkPayloads.size());
assertEquals("Incorrect chunk payload found", "foo_value bar_value 1", chunkPayloads.get(0));
}
public void testJMSMessagePropertiesWithNoQuotesDelimiter() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE, "vm://localhost -t test.topic " +
"-x org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSMessagePropertyTransformer " +
"-p \"foo,bar,num -d ^^^\"",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("bar", "bar_value");
message.setStringProperty("bat", "bat_value");
message.setStringProperty("foo", "foo_value");
message.setIntProperty("num", 1);
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message not received", 1, chunkPayloads.size());
assertEquals("Incorrect chunk payload found", "foo_value^^^bar_value^^^1", chunkPayloads.get(0));
}
public void testJMSMessagePropertiesWithMultiWordDelimiter() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE, "vm://localhost -t test.topic " +
"-x org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSMessagePropertyTransformer " +
"-p \"foo,bar,num -d '[ insert between values ]'\"",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("bar", "bar_value");
message.setStringProperty("bat", "bat_value");
message.setStringProperty("foo", "foo_value");
message.setIntProperty("num", 1);
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message not received", 1, chunkPayloads.size());
assertEquals("Incorrect chunk payload found",
"foo_value[ insert between values ]bar_value[ insert between values ]1",
chunkPayloads.get(0));
}
public void testJMSPropMissingWithAllRequired() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE, "vm://localhost -t test.topic " +
"-x org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSMessagePropertyTransformer " +
"-p \"foo,bar,num\"",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("bar", "bar_value");
message.setStringProperty("bat", "bat_value");
message.setIntProperty("num", 1);
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message should not have been received", 0, chunkPayloads.size());
}
public void testJMSPropMissingWithSomeRequired() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE, "vm://localhost -t test.topic " +
"-x org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSMessagePropertyTransformer " +
"-p \"foo,bar,num -r foo\"",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("bar", "bar_value");
message.setStringProperty("bat", "bat_value");
message.setIntProperty("num", 1);
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message should not have been received", 0, chunkPayloads.size());
}
public void testJMSPropMissingWithSomeRequired2() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE, "vm://localhost -t test.topic " +
"-x org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSMessagePropertyTransformer " +
"-p \"foo,bar,num -r foo\"",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("foo", "foo_value");
message.setStringProperty("bat", "bat_value");
message.setIntProperty("num", 1);
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message not received", 1, chunkPayloads.size());
assertEquals("Incorrect chunk payload found", "foo_value\t\t1", chunkPayloads.get(0));
}
public void testJMSPropfoundWithSomeRequired() throws Exception {
JMSAdaptor adaptor = new JMSAdaptor();
adaptor.parseArgs(DATA_TYPE, "vm://localhost -t test.topic " +
"-x org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSMessagePropertyTransformer " +
"-p \"foo,bar,num -r foo\"",
AdaptorManager.NULL);
adaptor.start("id", DATA_TYPE, 0, this);
Message message = session.createTextMessage(MESSAGE_PAYLOAD);
message.setStringProperty("bar", "bar_value");
message.setStringProperty("bat", "bat_value");
message.setStringProperty("foo", "foo_value");
message.setIntProperty("num", 1);
publisher.publish(message);
synchronized(this) {
wait(1000);
}
assertEquals("Message not received", 1, chunkPayloads.size());
assertEquals("Incorrect chunk payload found", "foo_value\tbar_value\t1", chunkPayloads.get(0));
}
public void add(Chunk c) {
bytesReceived += c.getData().length;
assertEquals("Unexpected data type", DATA_TYPE, c.getDataType());
assertEquals("Chunk sequenceId should be total bytes received.",
bytesReceived, c.getSeqID());
chunkPayloads.add(new String(c.getData()));
}
} | 8,093 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/controller/TestAgentClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.controller;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
import org.apache.hadoop.chukwa.datacollection.connector.Connector;
import org.apache.hadoop.chukwa.datacollection.connector.http.HttpConnector;
import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
import java.io.IOException;
import java.util.Map;
import junit.framework.TestCase;
public class TestAgentClient extends TestCase {
Configuration config;
ChukwaAgent agent;
ChukwaAgentController c;
Connector connector;
// consoleConnector = new ConsoleOutConnector(agent);
protected void setUp() throws ChukwaAgent.AlreadyRunningException {
config = new Configuration();
agent = ChukwaAgent.getAgent(config);
agent.start();
c = new ChukwaAgentController();
connector = new ChunkCatcherConnector();
connector.start();
}
protected void tearDown() {
System.out.println("in tearDown()");
connector.shutdown();
}
public void testAddFile() {
String appType = "junit_addFileTest";
String params = "testFile";
try {
// add the fileTailer to the agent using the client
System.out.println("Adding adaptor with filename: " + params);
String adaptorID = c.addFile(appType, params);
System.out.println("Successfully added adaptor, id is:" + adaptorID);
// do a list on the agent to see if the adaptor has been added for this
// file
Map<String, ChukwaAgentController.Adaptor> listResult = c.list();
assertTrue(listResult.containsKey(adaptorID));
} catch (IOException e) {
e.printStackTrace();
}
}
}
| 8,094 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestChukwaWriters.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.writer;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.List;
import java.util.Date;
import java.text.SimpleDateFormat;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.ChunkBuilder;
import org.apache.hadoop.chukwa.datacollection.writer.localfs.LocalWriter;
import org.apache.hadoop.chukwa.datacollection.writer.parquet.ChukwaParquetWriter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.avro.AvroParquetReader;
public class TestChukwaWriters extends TestCase{
public void testWriters() {
try {
File tempDir = new File(System.getProperty("test.build.data", "/tmp"));
if (!tempDir.exists()) {
tempDir.mkdirs();
}
String outputDirectory = tempDir.getPath() + "/testChukwaWriters_testWriters_" + System.currentTimeMillis() + "/";
Configuration confParquetWriter = new Configuration();
confParquetWriter.set("chukwaCollector.rotateInterval", "300000");
confParquetWriter.set("writer.hdfs.filesystem", "file:///");
String parquetWriterOutputDir = outputDirectory +"/parquetWriter/parquetOutputDir";
confParquetWriter.set(ChukwaParquetWriter.OUTPUT_DIR_OPT, parquetWriterOutputDir );
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
ChukwaWriter parquetWriter = new ChukwaParquetWriter(confParquetWriter);
List<Chunk> chunksParquetWriter = new LinkedList<Chunk>();
List<Chunk> chunksLocalWriter = new LinkedList<Chunk>();
for(int i=0;i<10;i++) {
ChunkBuilder cb1 = new ChunkBuilder();
cb1.addRecord(("record-" +i) .getBytes());
cb1.addRecord("foo" .getBytes());
cb1.addRecord("bar".getBytes());
cb1.addRecord("baz".getBytes());
chunksParquetWriter.add(cb1.getChunk());
ChunkBuilder cb2 = new ChunkBuilder();
cb2.addRecord(("record-" +i) .getBytes());
cb2.addRecord("foo" .getBytes());
cb2.addRecord("bar".getBytes());
cb2.addRecord("baz".getBytes());
chunksLocalWriter.add(cb2.getChunk());
}
Thread.sleep(5000);
parquetWriter.add(chunksParquetWriter);
parquetWriter.close();
String parquetWriterFile = null;
File directory = new File(parquetWriterOutputDir);
String[] files = directory.list();
for(String file: files) {
if ( file.endsWith(".done") ){
parquetWriterFile = parquetWriterOutputDir + File.separator + file;
break;
}
}
Assert.assertFalse(parquetWriterFile == null);
String parquetWriterDump = dumpArchive(fs,conf,parquetWriterFile);
Configuration confLocalWriter = new Configuration();
confLocalWriter.set("writer.hdfs.filesystem", "file:///");
String localWriterOutputDir = outputDirectory +"/localWriter/localOutputDir";
confLocalWriter.set("chukwaCollector.localOutputDir",localWriterOutputDir);
confLocalWriter.set("chukwaCollector.rotateInterval", "300000");
confLocalWriter.set("chukwaCollector.minPercentFreeDisk", "2");//so unit tests pass on
//machines with mostly-full disks
ChukwaWriter localWriter = new LocalWriter(confLocalWriter);
String localWriterFile = null;
localWriter.init(confLocalWriter);
Thread.sleep(5000);
localWriter.add(chunksLocalWriter);
localWriter.close();
directory = new File(localWriterOutputDir);
files = directory.list();
for(String file: files) {
if ( file.endsWith(".done") ){
localWriterFile = localWriterOutputDir + File.separator + file;
break;
}
}
Assert.assertFalse(localWriterFile == null);
String localWriterDump = dumpArchive(fs,conf,localWriterFile);
Assert.assertTrue(parquetWriterDump.intern() == localWriterDump.intern());
File fOutputDirectory = new File(outputDirectory);
fOutputDirectory.delete();
} catch (Throwable e) {
e.printStackTrace();
Assert.fail("Exception in TestChukwaWriters," + e.getMessage());
}
}
protected String dumpArchive(FileSystem fs,Configuration conf, String file) throws Throwable {
AvroParquetReader<GenericRecord> reader = null;
try {
reader = new AvroParquetReader<GenericRecord>(conf, new Path(file));
StringBuilder sb = new StringBuilder();
while (true) {
GenericRecord record = reader.read();
if(record == null) {
break;
}
sb.append("DataType: " + record.get("dataType"));
sb.append("StreamName: " + record.get("stream"));
sb.append("SeqId: " + record.get("seqId"));
sb.append("\t\t =============== ");
sb.append("Cluster : " + record.get("tags"));
sb.append("DataType : " + record.get("dataType"));
sb.append("Source : " + record.get("source"));
sb.append("Application : " + record.get("stream"));
sb.append("SeqID : " + record.get("seqId"));
byte[] data = ((ByteBuffer)record.get("data")).array();
sb.append("Data : " + new String(data));
return sb.toString();
}
} catch (Throwable e) {
Assert.fail("Exception while reading ParquetFile"+ e.getMessage());
throw e;
}
finally {
if (reader != null) {
reader.close();
}
}
return null;
}
/**
* Test to check the calculation of the delay interval for rotation in
* ParquetFileWriter. It uses an array of known currentTimestamps and their
* corresponding expectedRotateTimestamps (the next timestamp when the
* rotation should happen). The actual timestamp of next rotation is
* calculated by adding delay (obtained from getDelayForFixedInterval()) to
* the currentTimestamp.
*/
public void testFixedIntervalOffsetCalculation(){
try {
String tmpDir = System.getProperty("test.build.data", "/tmp");
long ts = System.currentTimeMillis();
String dataDir = tmpDir + "/TestChukwaWriters_" + ts;
Configuration conf = new Configuration();
conf.set("chukwaCollector.outputDir", dataDir + "/log/");
ChukwaParquetWriter parquetWriter = new ChukwaParquetWriter(conf);
SimpleDateFormat formatter = new SimpleDateFormat("yyyy/MM/dd hh:mm:ssZ");
//rotateInterval >> offsetInterval
long rotateInterval = 300000; //5 min
long offsetInterval = 60000; //1 min
long currentTimestamps[] = new long[5] ;
long expectedRotateTimestamps[] = new long[5];
Date date = formatter.parse("2011/06/15 01:05:00+0000");
currentTimestamps[0] = date.getTime();
expectedRotateTimestamps[0] = 1308100260000L; //2011/06/15 01:11:00
date = formatter.parse("2011/06/15 01:06:00+0000");
currentTimestamps[1] = date.getTime();
expectedRotateTimestamps[1] = 1308100260000L; //2011/06/15 01:11:00
date = formatter.parse("2011/06/15 01:02:00+0000");
currentTimestamps[2] = date.getTime();
expectedRotateTimestamps[2] = 1308099960000L; //2011/06/15 01:06:00
date = formatter.parse("2011/06/15 01:04:00+0000");
currentTimestamps[3] = date.getTime();
expectedRotateTimestamps[3] = 1308099960000L; //2011/06/15 01:06:00
//edge case, when there is a change in the "hour"
date = formatter.parse("2011/06/15 01:56:00+0000");
currentTimestamps[4] = date.getTime();
expectedRotateTimestamps[4] = 1308103260000L; //2011/06/15 02:01:00
int i=0;
long expectedDelay = 0;
long actualRotateTimestamp = 0;
for(; i<5; i++){
expectedDelay = parquetWriter.getDelayForFixedInterval(
currentTimestamps[i], rotateInterval, offsetInterval);
actualRotateTimestamp = currentTimestamps[i] + expectedDelay;
Assert.assertTrue("Incorrect value for delay",
(actualRotateTimestamp==expectedRotateTimestamps[i]));
}
//rotateInterval > offsetInterval
rotateInterval = 60000; //1 min
offsetInterval = 30000; //30 sec
date = formatter.parse("2011/06/15 01:05:00+0000");
currentTimestamps[0] = date.getTime();
expectedRotateTimestamps[0] = 1308099990000L; //2011/06/15 01:06:30
date = formatter.parse("2011/06/15 01:04:30+0000");
currentTimestamps[1] = date.getTime();
expectedRotateTimestamps[1] = 1308099930000L; //2011/06/15 01:05:30
date = formatter.parse("2011/06/15 01:05:30+0000");
currentTimestamps[2] = date.getTime();
expectedRotateTimestamps[2] = 1308099990000L; //2011/06/15 01:06:30
date = formatter.parse("2011/06/15 01:04:00+0000");
currentTimestamps[3] = date.getTime();
expectedRotateTimestamps[3] = 1308099930000L; //2011/06/15 01:05:30
//edge case, when there is a change in the "hour"
date = formatter.parse("2011/06/15 01:59:30+0000");
currentTimestamps[4] = date.getTime();
expectedRotateTimestamps[4] = 1308103230000L; //2011/06/15 02:00:30
for(i=0; i<5; i++){
expectedDelay = parquetWriter.getDelayForFixedInterval(
currentTimestamps[i], rotateInterval, offsetInterval);
actualRotateTimestamp = currentTimestamps[i] + expectedDelay;
Assert.assertTrue("Incorrect value for delay",
(actualRotateTimestamp==expectedRotateTimestamps[i]));
}
//rotateInterval = offsetInterval
rotateInterval = 60000; //1 min
offsetInterval = 60000; //1 min
date = formatter.parse("2011/06/15 01:02:00+0000");
currentTimestamps[0] = date.getTime();
expectedRotateTimestamps[0] = 1308099840000L; //2011/06/15 01:04:00
date = formatter.parse("2011/06/15 01:02:30+0000");
currentTimestamps[1] = date.getTime();
expectedRotateTimestamps[1] = 1308099840000L; //2011/06/15 01:04:00
//edge case, when there is a change in the "hour"
date = formatter.parse("2011/06/15 01:59:30+0000");
currentTimestamps[2] = date.getTime();
expectedRotateTimestamps[2] = 1308103260000L; //2011/06/15 02:01:00
for(i=0; i<3; i++){
expectedDelay = parquetWriter.getDelayForFixedInterval(
currentTimestamps[i], rotateInterval, offsetInterval);
actualRotateTimestamp = currentTimestamps[i] + expectedDelay;
Assert.assertTrue("Incorrect value for delay",
(actualRotateTimestamp==expectedRotateTimestamps[i]));
}
//rotateInterval < offsetInterval
rotateInterval = 60000; //1 min
offsetInterval = 120000; //2 min
date = formatter.parse("2011/06/15 01:02:00+0000");
currentTimestamps[0] = date.getTime();
expectedRotateTimestamps[0] = 1308099900000L; //2011/06/15 01:05:00
date = formatter.parse("2011/06/15 01:02:30+0000");
currentTimestamps[1] = date.getTime();
expectedRotateTimestamps[1] = 1308099900000L; //2011/06/15 01:05:00
//edge case, when there is a change in the "hour"
date = formatter.parse("2011/06/15 01:59:30+0000");
currentTimestamps[2] = date.getTime();
expectedRotateTimestamps[2] = 1308103320000L; //2011/06/15 02:02:00
for(i=0; i<3; i++){
expectedDelay = parquetWriter.getDelayForFixedInterval(
currentTimestamps[i], rotateInterval, offsetInterval);
actualRotateTimestamp = currentTimestamps[i] + expectedDelay;
Assert.assertTrue("Incorrect value for delay",
(actualRotateTimestamp==expectedRotateTimestamps[i]));
}
} catch (Throwable e) {
e.printStackTrace();
Assert.fail("Exception in TestChukwaWriters - " +
"testFixedIntervalOffsetCalculation()," + e.getMessage());
}
}
}
| 8,095 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestClientAck.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.writer;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.datacollection.writer.ClientAck;
public class TestClientAck extends TestCase {
public void testWait4AckTimeOut() {
ClientAck clientAck = new ClientAck();
long startDate = System.currentTimeMillis();
clientAck.wait4Ack();
long now = System.currentTimeMillis();
long duration = now - startDate;
duration = duration - clientAck.getTimeOut();
Assert.assertTrue("should not wait nore than " + clientAck.getTimeOut()
+ " + 7sec", duration < 7000);
Assert.assertEquals(ClientAck.KO_LOCK, clientAck.getStatus());
}
}
| 8,096 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestSocketTee.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.writer;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.chukwa.ChunkImpl;
import org.apache.hadoop.chukwa.Chunk;
import java.util.ArrayList;
import org.apache.hadoop.chukwa.datacollection.collector.CaptureWriter;
import org.apache.hadoop.io.IOUtils;
import java.net.*;
import java.io.*;
public class TestSocketTee extends TestCase{
public void testSocketTee() throws Exception {
Configuration conf = new Configuration();
conf.set("chukwa.pipeline",
SocketTeeWriter.class.getCanonicalName()+","// note comma
+ CaptureWriter.class.getCanonicalName());
conf.set("chukwa.writerClass",
PipelineStageWriter.class.getCanonicalName());
PipelineStageWriter psw = new PipelineStageWriter(conf);
System.out.println("pipeline established; now pushing a chunk");
ArrayList<Chunk> l = new ArrayList<Chunk>();
l.add(new ChunkImpl("dt", "name", 1, new byte[] {'a'}, null));
psw.add(l);
//push a chunk through. It should get written, but the socket tee shouldn't do anything.
assertEquals(1, CaptureWriter.outputs.size());
//now connect and set up a filter.
System.out.println("connecting to localhost");
Socket s = new Socket("localhost", SocketTeeWriter.DEFAULT_PORT);
// s.setSoTimeout(2000);
DataOutputStream dos = new DataOutputStream (s.getOutputStream());
dos.write((SocketTeeWriter.WRITABLE + " datatype=dt3\n").getBytes());
DataInputStream dis = new DataInputStream(s.getInputStream());
System.out.println("command send");
dis.readFully(new byte[3]);
//push a chunk not matching filter -- nothing should happen.
l = new ArrayList<Chunk>();
l.add(new ChunkImpl("dt2", "name", 1, new byte[] {'b'}, null));
psw.add(l);
assertEquals(2, CaptureWriter.outputs.size());
System.out.println("sent nonmatching chunk");
//and now one that does match -- data should be available to read off the socket
l = new ArrayList<Chunk>();
l.add(new ChunkImpl("dt3", "name", 1, new byte[] {'c'}, null));
psw.add(l);
assertEquals(3, CaptureWriter.outputs.size());
System.out.println("sent matching chunk");
System.out.println("reading...");
ChunkImpl chunk = ChunkImpl.read(dis);
assertTrue(chunk.getDataType().equals("dt3"));
System.out.println(chunk);
dis.close();
dos.close();
s.close();
Socket s2 = new Socket("localhost", SocketTeeWriter.DEFAULT_PORT);
s2.getOutputStream().write((SocketTeeWriter.RAW+" content=.*d.*\n").getBytes());
dis = new DataInputStream(s2.getInputStream());
dis.readFully(new byte[3]); //read "OK\n"
l = new ArrayList<Chunk>();
l.add(new ChunkImpl("dt3", "name", 1, new byte[] {'d'}, null));
psw.add(l);
assertEquals(4, CaptureWriter.outputs.size());
int len = dis.readInt();
assertTrue(len == 1);
byte[] data = new byte[100];
int read = dis.read(data);
assertTrue(read == 1);
assertTrue(data[0] == 'd');
s2.close();
dis.close();
l = new ArrayList<Chunk>();
l.add(new ChunkImpl("dt3", "name", 3, new byte[] {'c', 'a', 'd'}, null));
psw.add(l);
assertEquals(5, CaptureWriter.outputs.size());
Socket s3 = new Socket("localhost", SocketTeeWriter.DEFAULT_PORT);
s3.getOutputStream().write((SocketTeeWriter.ASCII_HEADER+" all\n").getBytes());
dis = new DataInputStream(s3.getInputStream());
dis.readFully(new byte[3]); //read "OK\n"
l = new ArrayList<Chunk>();
chunk= new ChunkImpl("dataTypeFoo", "streamName", 4, new byte[] {'t','e','x','t'}, null);
chunk.setSource("hostNameFoo");
l.add(chunk);
psw.add(l);
assertEquals(6, CaptureWriter.outputs.size());
len = dis.readInt();
data = new byte[len];
IOUtils.readFully(dis, data, 0, len);
String rcvd = new String(data);
System.out.println("got " + read+"/" +len +" bytes: " + rcvd);
assertEquals("hostNameFoo dataTypeFoo streamName 4\ntext", rcvd);
s3.close();
dis.close();
}
}
| 8,097 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestChukwaParquetWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.writer;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.ChunkBuilder;
import org.apache.hadoop.chukwa.datacollection.writer.parquet.ChukwaParquetWriter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import org.apache.parquet.avro.AvroReadSupport;
import org.apache.parquet.hadoop.ParquetReader;
import org.junit.Assert;
import junit.framework.TestCase;
public class TestChukwaParquetWriter extends TestCase {
private final static Logger LOG = Logger.getLogger(TestChukwaParquetWriter.class);
/**
* Test records are written properly.
*/
public void testWrite() {
// Write 10 chunks
ArrayList<Chunk> chunks = new ArrayList<Chunk>();
for(int i=0;i<10;i++) {
ChunkBuilder c = new ChunkBuilder();
c.addRecord(ByteBuffer.allocate(Integer.SIZE).putInt(i).array());
chunks.add(c.getChunk());
}
try {
Configuration conf = new Configuration();
String outputPath = System.getProperty("test.log.dir")+"/testParquet";
conf.set("chukwaCollector.outputDir", outputPath);
ChukwaWriter parquetWriter = new ChukwaParquetWriter(conf);
parquetWriter.add(chunks);
parquetWriter.close();
FileSystem fs = FileSystem.get(conf);
// Verify 10 chunks are written
Path file = new Path(outputPath);
FileStatus[] status = fs.listStatus(file);
for(FileStatus finfo : status) {
if(finfo.getPath().getName().contains(".done")) {
LOG.info("File name: "+finfo.getPath().getName());
LOG.info("File Size: " + finfo.getLen());
ParquetReader<GenericRecord> pr = ParquetReader.builder(new AvroReadSupport<GenericRecord>(), finfo.getPath()).build();
for(int i=0; i< 10; i++) {
GenericRecord nextRecord = pr.read();
int expected = ByteBuffer.wrap(chunks.get(i).getData()).getInt();
LOG.info("expected: " + expected);
ByteBuffer content = (ByteBuffer) nextRecord.get("data");
int actual = content.getInt();
LOG.info("actual: " + actual);
Assert.assertSame(expected, actual);
}
}
fs.delete(finfo.getPath(), true);
}
} catch (WriterException e) {
Assert.fail(e.getMessage());
} catch (IOException e) {
Assert.fail(e.getMessage());
}
}
/**
* Test file rotation interval.
*/
public void testRotate() {
// Write 10 chunks
ArrayList<Chunk> chunks = new ArrayList<Chunk>();
for(int i=0;i<10;i++) {
ChunkBuilder c = new ChunkBuilder();
c.addRecord(ByteBuffer.allocate(Integer.SIZE).putInt(i).array());
chunks.add(c.getChunk());
}
try {
Configuration conf = new Configuration();
String outputPath = System.getProperty("test.log.dir")+"/testParquetRotate";
conf.set("chukwaCollector.outputDir", outputPath);
conf.setLong("chukwaCollector.rotateInterval", 3000L);
ChukwaWriter parquetWriter = new ChukwaParquetWriter(conf);
for(int i=0; i<2; i++) {
parquetWriter.add(chunks);
try {
Thread.sleep(3000L);
} catch (InterruptedException e) {
Assert.fail(e.getMessage());
}
}
parquetWriter.close();
FileSystem fs = FileSystem.get(conf);
// Verify 10 chunks are written
Path file = new Path(outputPath);
FileStatus[] status = fs.listStatus(file);
Assert.assertTrue(status.length >= 2);
} catch (WriterException e) {
Assert.fail(e.getMessage());
} catch (IOException e) {
Assert.fail(e.getMessage());
}
}
}
| 8,098 |
0 | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection | Create_ds/chukwa/core/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestHBaseWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.datacollection.writer;
import java.util.ArrayList;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.ChunkImpl;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.datacollection.writer.hbase.HBaseWriter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.log4j.Logger;
public class TestHBaseWriter extends TestCase{
static Logger log = Logger.getLogger(TestHBaseWriter.class);
private HBaseTestingUtility util;
private HBaseWriter hbw;
private Configuration conf;
private byte[] columnFamily = Bytes.toBytes("TestColumnFamily");
private byte[] qualifier = Bytes.toBytes("Key");
private byte[] expectedValue = Bytes.toBytes("Value");
private byte[] table = Bytes.toBytes("Test");
private byte[] test = Bytes.toBytes("1234567890 Key Value");
private ChukwaConfiguration cc;
long timestamp = 1234567890;
public TestHBaseWriter() {
cc = new ChukwaConfiguration();
}
public void setUp() {
try {
util = new HBaseTestingUtility();
util.startMiniZKCluster();
util.startMiniCluster();
conf = util.getConfiguration();
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("Test"));
HColumnDescriptor family = new HColumnDescriptor(columnFamily);
desc.addFamily(family);
util.getHBaseAdmin().createTable(desc);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
public void tearDown() throws Exception {
util.shutdownMiniCluster();
util.shutdownMiniZKCluster();
}
public void testWriters() {
ArrayList<Chunk> chunks = new ArrayList<Chunk>();
chunks.add(new ChunkImpl("TextParser", "name", timestamp, test, null));
try {
cc.set("hbase.demux.package", "org.apache.chukwa.datacollection.writer.test.demux");
cc.set("TextParser","org.apache.hadoop.chukwa.datacollection.writer.test.demux.TextParser");
hbw = new HBaseWriter(cc, conf);
hbw.init(cc);
if(hbw.add(chunks)!=ChukwaWriter.COMMIT_OK) {
Assert.fail("Commit status is not OK.");
}
Table testTable = util.getConnection().getTable(TableName.valueOf(table));
ResultScanner scanner = testTable.getScanner(columnFamily, qualifier);
for(Result res : scanner) {
Assert.assertEquals(new String(expectedValue), new String(res.getValue(columnFamily, qualifier)));
}
// Cleanup and return
scanner.close();
testTable.close();
// Compare data in Hbase with generated chunks
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
}
| 8,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.