_id
stringlengths
2
7
title
stringlengths
3
140
partition
stringclasses
3 values
text
stringlengths
73
34.1k
language
stringclasses
1 value
meta_information
dict
q167000
RisonParser._skipString
validation
protected void _skipString() throws IOException, JsonParseException { _tokenIncomplete = false; int inputPtr = _inputPtr; int inputLen = _inputEnd; char[] inputBuffer = _inputBuffer; while (true) { if (inputPtr >= inputLen) { _inputPtr = inputPtr; if (!loadMore()) { _reportInvalidEOF(": was expecting closing quote for a string value"); } inputPtr = _inputPtr; inputLen = _inputEnd; } char c = inputBuffer[inputPtr++]; if (c == INT_APOSTROPHE) { _inputPtr = inputPtr; break; } else if (c == '!') { _inputPtr = inputPtr; _decodeEscaped(); inputPtr = _inputPtr; inputLen = _inputEnd; } } }
java
{ "resource": "" }
q167001
RisonParser._matchToken
validation
protected void _matchToken(String matchStr, int i) throws IOException, JsonParseException { final int len = matchStr.length(); do { if (_inputPtr >= _inputEnd) { if (!loadMore()) { _reportInvalidEOFInValue(); } } if (_inputBuffer[_inputPtr] != matchStr.charAt(i)) { _reportInvalidToken(matchStr.substring(0, i), "'null', 'true', 'false' or NaN"); } ++_inputPtr; } while (++i < len); // but let's also ensure we either get EOF, or non-alphanum char... if (_inputPtr >= _inputEnd) { if (!loadMore()) { return; } } char c = _inputBuffer[_inputPtr]; if (c < '0' || c == ')') { // expected/allowed chars return; } // if letter, it's a problem tho if (IdentifierUtils.isIdCharLenient(c)) { _reportInvalidToken(matchStr.substring(0, i), "'null', 'true', 'false' or NaN"); } }
java
{ "resource": "" }
q167002
RisonParser._decodeBase64
validation
protected byte[] _decodeBase64(Base64Variant b64variant) throws IOException, JsonParseException { ByteArrayBuilder builder = _getByteArrayBuilder(); //main_loop: while (true) { // first, we'll skip preceding white space, if any char ch; do { if (_inputPtr >= _inputEnd) { loadMoreGuaranteed(); } ch = _inputBuffer[_inputPtr++]; } while (ch <= INT_SPACE); int bits = b64variant.decodeBase64Char(ch); if (bits < 0) { if (ch == INT_APOSTROPHE) { // reached the end, fair and square? return builder.toByteArray(); } bits = _decodeBase64Escape2(b64variant, ch, 0); if (bits < 0) { // white space to skip continue; } } int decodedData = bits; // then second base64 char; can't get padding yet, nor ws if (_inputPtr >= _inputEnd) { loadMoreGuaranteed(); } ch = _inputBuffer[_inputPtr++]; bits = b64variant.decodeBase64Char(ch); if (bits < 0) { bits = _decodeBase64Escape2(b64variant, ch, 1); } decodedData = (decodedData << 6) | bits; // third base64 char; can be padding, but not ws if (_inputPtr >= _inputEnd) { loadMoreGuaranteed(); } ch = _inputBuffer[_inputPtr++]; bits = b64variant.decodeBase64Char(ch); // First branch: can get padding (-> 1 byte) if (bits < 0) { if (bits != Base64Variant.BASE64_VALUE_PADDING) { // as per [JACKSON-631], could also just be 'missing' padding if (ch == INT_APOSTROPHE && !b64variant.usesPadding()) { decodedData >>= 4; builder.append(decodedData); return builder.toByteArray(); } bits = _decodeBase64Escape2(b64variant, ch, 2); } if (bits == Base64Variant.BASE64_VALUE_PADDING) { // Ok, must get more padding chars, then if (_inputPtr >= _inputEnd) { loadMoreGuaranteed(); } ch = _inputBuffer[_inputPtr++]; if (!b64variant.usesPaddingChar(ch)) { throw reportInvalidBase64Char(b64variant, ch, 3, "expected padding character '"+b64variant.getPaddingChar()+"'"); } // Got 12 bits, only need 8, need to shift decodedData >>= 4; builder.append(decodedData); continue; } // otherwise we got escaped other char, to be processed below } // Nope, 2 or 3 bytes decodedData = (decodedData << 6) | bits; // fourth and last base64 char; can be padding, but not ws if (_inputPtr >= _inputEnd) { loadMoreGuaranteed(); } ch = _inputBuffer[_inputPtr++]; bits = b64variant.decodeBase64Char(ch); if (bits < 0) { if (bits != Base64Variant.BASE64_VALUE_PADDING) { // as per [JACKSON-631], could also just be 'missing' padding if (ch == INT_APOSTROPHE && !b64variant.usesPadding()) { decodedData >>= 2; builder.appendTwoBytes(decodedData); return builder.toByteArray(); } bits = _decodeBase64Escape2(b64variant, ch, 3); } if (bits == Base64Variant.BASE64_VALUE_PADDING) { // With padding we only get 2 bytes; but we have // to shift it a bit so it is identical to triplet // case with partial output. // 3 chars gives 3x6 == 18 bits, of which 2 are // dummies, need to discard: decodedData >>= 2; builder.appendTwoBytes(decodedData); continue; } // otherwise we got escaped other char, to be processed below } // otherwise, our triplet is now complete decodedData = (decodedData << 6) | bits; builder.appendThreeBytes(decodedData); } }
java
{ "resource": "" }
q167003
IdentifierUtils.isIdStrict
validation
public static boolean isIdStrict(String string) { int len = string.length(); if (len == 0) { return false; } if (!isIdStartStrict(string.charAt(0))) { return false; } for (int i = 1; i < len; i++) { if (!isIdCharStrict(string.charAt(i))) { return false; } } return true; }
java
{ "resource": "" }
q167004
IdentifierUtils.isIdStrict
validation
public static boolean isIdStrict(char[] chars, int offset, int len) { if (len == 0) { return false; } int end = offset + len; if (!isIdStartStrict(chars[offset++])) { return false; } while (offset < end) { if (!isIdCharStrict(chars[offset++])) { return false; } } return true; }
java
{ "resource": "" }
q167005
RisonGenerator._writeString
validation
private void _writeString(char[] text, int offset, int len) throws IOException, JsonGenerationException { /* Let's just find longest spans of non-escapable * content, and for each see if it makes sense * to copy them, or write through */ len += offset; // -> len marks the end from now on while (offset < len) { int start = offset; while (true) { char c = text[offset]; if (c == '!' || c == '\'') { break; } if (++offset >= len) { break; } } // Short span? Better just copy it to buffer first: int newAmount = offset - start; if (newAmount < SHORT_WRITE) { // Note: let's reserve room for escaped char (up to 6 chars) if ((_outputTail + newAmount) > _outputEnd) { _flushBuffer(); } if (newAmount > 0) { System.arraycopy(text, start, _outputBuffer, _outputTail, newAmount); _outputTail += newAmount; } } else { // Nope: better just write through _flushBuffer(); _writer.write(text, start, newAmount); } // Was this the end? if (offset >= len) { // yup break; } // Nope, need to escape the char. char c = text[offset++]; _appendCharacterEscape('!', c); } }
java
{ "resource": "" }
q167006
RisonGenerator._prependOrWrite
validation
private int _prependOrWrite(char[] buffer, int ptr, char esc) throws IOException, JsonGenerationException { if (ptr > 0) { // fits, just prepend buffer[--ptr] = esc; } else { // won't fit, write _writer.write(esc); } return ptr; }
java
{ "resource": "" }
q167007
RisonGenerator._appendCharacterEscape
validation
private void _appendCharacterEscape(char esc, char ch) throws IOException, JsonGenerationException { if ((_outputTail + 1) >= _outputEnd) { _flushBuffer(); } _outputBuffer[_outputTail++] = esc; _outputBuffer[_outputTail++] = ch; }
java
{ "resource": "" }
q167008
ThriftToPig.setConversionProperties
validation
public static void setConversionProperties(Configuration conf) { if (conf != null) { useEnumId = conf.getBoolean(USE_ENUM_ID_CONF_KEY, false); LOG.debug("useEnumId is set to " + useEnumId); } }
java
{ "resource": "" }
q167009
ThriftToPig.toPigScript
validation
public static String toPigScript(Class<? extends TBase<?, ?>> thriftClass, Class<? extends LoadFunc> pigLoader) { StringBuilder sb = new StringBuilder(); /* we are commenting out explicit schema specification. The schema is * included mainly to help the readers of the pig script. Pig learns the * schema directly from the loader. * If explicit schema is not commented, we might have surprising results * when a Thrift class (possibly in control of another team) changes, * but the Pig script is not updated. Commenting it out avoids this. */ StringBuilder prefix = new StringBuilder(" -- "); sb.append("raw_data = load '$INPUT_FILES' using ") .append(pigLoader.getName()) .append("('") .append(thriftClass.getName()) .append("');\n") .append(prefix) .append("as "); prefix.append(" "); try { stringifySchema(sb, toSchema(thriftClass), DataType.TUPLE, prefix); } catch (FrontendException e) { throw new RuntimeException(e); } sb.append("\n"); return sb.toString(); }
java
{ "resource": "" }
q167010
ThriftToPig.stringifySchema
validation
public static void stringifySchema(StringBuilder sb, Schema schema, byte type, StringBuilder prefix) throws FrontendException{ // this is a modified version of {@link Schema#stringifySchema(StringBuilder, Schema, byte)} if (type == DataType.TUPLE) { sb.append("(") ; } else if (type == DataType.BAG) { sb.append("{") ; } prefix.append(" "); sb.append("\n").append(prefix); if (schema == null) { sb.append("null") ; } else { boolean isFirst = true ; for (int i=0; i< schema.size() ;i++) { if (!isFirst) { sb.append(",\n").append(prefix); } else { isFirst = false ; } FieldSchema fs = schema.getField(i) ; if(fs == null) { sb.append("null"); continue; } if (fs.alias != null) { sb.append(fs.alias); sb.append(": "); } if (DataType.isAtomic(fs.type)) { sb.append(DataType.findTypeName(fs.type)) ; } else if ( (fs.type == DataType.TUPLE) || (fs.type == DataType.BAG) ) { // safety net if (schema != fs.schema) { stringifySchema(sb, fs.schema, fs.type, prefix) ; } else { throw new AssertionError("Schema refers to itself " + "as inner schema") ; } } else if (fs.type == DataType.MAP) { sb.append(DataType.findTypeName(fs.type) + "["); if (fs.schema!=null) stringifySchema(sb, fs.schema, fs.type, prefix); sb.append("]"); } else { sb.append(DataType.findTypeName(fs.type)) ; } } } prefix.setLength(prefix.length()-2); sb.append("\n").append(prefix); if (type == DataType.TUPLE) { sb.append(")") ; } else if (type == DataType.BAG) { sb.append("}") ; } }
java
{ "resource": "" }
q167011
LuceneIndexRecordReader.openIndex
validation
protected IndexReader openIndex(Path path, Configuration conf) throws IOException { return DirectoryReader.open(new LuceneHdfsDirectory(path, path.getFileSystem(conf))); }
java
{ "resource": "" }
q167012
LuceneIndexRecordReader.getProgress
validation
@Override public float getProgress() { if (numIndexes < 1) { return 1.0f; } float indexProgress = (float) currentIndexPathIter.previousIndex() / (float) numIndexes; float queriesProgress = 1.0f; if (queries.size() > 0) { queriesProgress = (float) currentQueryIter.previousIndex() / (float) queries.size(); } queriesProgress *= 1.0f / numIndexes; return indexProgress + queriesProgress; }
java
{ "resource": "" }
q167013
ProtobufToPig.toTuple
validation
public Tuple toTuple(Message msg) { if (msg == null) { // Pig tuples deal gracefully with nulls. // Also, we can be called with null here in recursive calls. return null; } Descriptor msgDescriptor = msg.getDescriptorForType(); Tuple tuple = tupleFactory_.newTuple(msgDescriptor.getFields().size()); int curField = 0; try { // Walk through all the possible fields in the message. for (FieldDescriptor fieldDescriptor : msgDescriptor.getFields()) { // Get the set value, or the default value, or null. Object fieldValue = msg.getField(fieldDescriptor); if (fieldDescriptor.getType() == FieldDescriptor.Type.MESSAGE) { tuple.set(curField++, messageToTuple(fieldDescriptor, fieldValue)); } else { tuple.set(curField++, singleFieldToTuple(fieldDescriptor, fieldValue)); } } } catch (ExecException e) { LOG.warn("Could not convert msg " + msg + " to tuple", e); } return tuple; }
java
{ "resource": "" }
q167014
ProtobufToPig.messageToTuple
validation
@SuppressWarnings("unchecked") protected Object messageToTuple(FieldDescriptor fieldDescriptor, Object fieldValue) { if (fieldValue == null) { // protobufs unofficially ensures values are not null. just in case: return null; } assert fieldDescriptor.getType() == FieldDescriptor.Type.MESSAGE : "messageToTuple called with field of type " + fieldDescriptor.getType(); if (fieldDescriptor.isRepeated()) { // The protobuf contract is that if the field is repeated, then the object returned is actually a List // of the underlying datatype, which in this case is a nested message. List<Message> messageList = (List<Message>) (fieldValue != null ? fieldValue : Lists.newArrayList()); DataBag bag = new NonSpillableDataBag(messageList.size()); for (Message m : messageList) { bag.add(new ProtobufTuple(m)); } return bag; } else { return new ProtobufTuple((Message)fieldValue); } }
java
{ "resource": "" }
q167015
ProtobufToPig.singleFieldToTuple
validation
@SuppressWarnings("unchecked") protected Object singleFieldToTuple(FieldDescriptor fieldDescriptor, Object fieldValue) { assert fieldDescriptor.getType() != FieldDescriptor.Type.MESSAGE : "messageToFieldSchema called with field of type " + fieldDescriptor.getType(); if (fieldDescriptor.isRepeated()) { // The protobuf contract is that if the field is repeated, then the object returned is actually a List // of the underlying datatype, which in this case is a "primitive" like int, float, String, etc. // We have to make a single-item tuple out of it to put it in the bag. List<Object> fieldValueList = (List<Object>) (fieldValue != null ? fieldValue : Collections.emptyList()); DataBag bag = new NonSpillableDataBag(fieldValueList.size()); for (Object singleFieldValue : fieldValueList) { Object nonEnumFieldValue = coerceToPigTypes(fieldDescriptor, singleFieldValue); Tuple innerTuple = tupleFactory_.newTuple(1); try { innerTuple.set(0, nonEnumFieldValue); } catch (ExecException e) { // not expected throw new RuntimeException(e); } bag.add(innerTuple); } return bag; } else { return coerceToPigTypes(fieldDescriptor, fieldValue); } }
java
{ "resource": "" }
q167016
ProtobufToPig.coerceToPigTypes
validation
private Object coerceToPigTypes(FieldDescriptor fieldDescriptor, Object fieldValue) { if (fieldDescriptor.getType() == FieldDescriptor.Type.ENUM && fieldValue != null) { EnumValueDescriptor enumValueDescriptor = (EnumValueDescriptor)fieldValue; return enumValueDescriptor.getName(); } else if (fieldDescriptor.getType() == FieldDescriptor.Type.BOOL && fieldValue != null) { Boolean boolValue = (Boolean)fieldValue; return new Integer(boolValue ? 1 : 0); } else if (fieldDescriptor.getType() == FieldDescriptor.Type.BYTES && fieldValue != null) { ByteString bsValue = (ByteString)fieldValue; return new DataByteArray(bsValue.toByteArray()); } return fieldValue; }
java
{ "resource": "" }
q167017
ProtobufToPig.toSchema
validation
public Schema toSchema(Descriptor msgDescriptor) { Schema schema = new Schema(); try { // Walk through all the possible fields in the message. for (FieldDescriptor fieldDescriptor : msgDescriptor.getFields()) { if (fieldDescriptor.getType() == FieldDescriptor.Type.MESSAGE) { schema.add(messageToFieldSchema(fieldDescriptor)); } else { schema.add(singleFieldToFieldSchema(fieldDescriptor)); } } } catch (FrontendException e) { LOG.warn("Could not convert descriptor " + msgDescriptor + " to schema", e); } return schema; }
java
{ "resource": "" }
q167018
ProtobufToPig.messageToFieldSchema
validation
private FieldSchema messageToFieldSchema(FieldDescriptor fieldDescriptor) throws FrontendException { assert fieldDescriptor.getType() == FieldDescriptor.Type.MESSAGE : "messageToFieldSchema called with field of type " + fieldDescriptor.getType(); Schema innerSchema = toSchema(fieldDescriptor.getMessageType()); if (fieldDescriptor.isRepeated()) { Schema tupleSchema = new Schema(); tupleSchema.add(new FieldSchema(fieldDescriptor.getName() + "_tuple", innerSchema, DataType.TUPLE)); return new FieldSchema(fieldDescriptor.getName(), tupleSchema, DataType.BAG); } else { return new FieldSchema(fieldDescriptor.getName(), innerSchema, DataType.TUPLE); } }
java
{ "resource": "" }
q167019
ProtobufToPig.singleFieldToFieldSchema
validation
private FieldSchema singleFieldToFieldSchema(FieldDescriptor fieldDescriptor) throws FrontendException { assert fieldDescriptor.getType() != FieldDescriptor.Type.MESSAGE : "singleFieldToFieldSchema called with field of type " + fieldDescriptor.getType(); if (fieldDescriptor.isRepeated()) { Schema itemSchema = new Schema(); itemSchema.add(new FieldSchema(fieldDescriptor.getName(), null, getPigDataType(fieldDescriptor))); Schema itemTupleSchema = new Schema(); itemTupleSchema.add(new FieldSchema(fieldDescriptor.getName() + "_tuple", itemSchema, DataType.TUPLE)); return new FieldSchema(fieldDescriptor.getName() + "_bag", itemTupleSchema, DataType.BAG); } else { return new FieldSchema(fieldDescriptor.getName(), null, getPigDataType(fieldDescriptor)); } }
java
{ "resource": "" }
q167020
ProtobufToPig.toPigScript
validation
public String toPigScript(Descriptor msgDescriptor, String loaderClassName) { StringBuffer sb = new StringBuffer(); final int initialTabOffset = 3; sb.append("raw_data = load '$INPUT_FILES' using " + loaderClassName + "()").append("\n"); sb.append(tabs(initialTabOffset)).append("as (").append("\n"); sb.append(toPigScriptInternal(msgDescriptor, initialTabOffset)); sb.append(tabs(initialTabOffset)).append(");").append("\n").append("\n"); return sb.toString(); }
java
{ "resource": "" }
q167021
ProtobufToPig.toPigScriptInternal
validation
private StringBuffer toPigScriptInternal(Descriptor msgDescriptor, int numTabs) { StringBuffer sb = new StringBuffer(); try { // Walk through all the possible fields in the message. for (FieldDescriptor fieldDescriptor : msgDescriptor.getFields()) { // We have to add a comma after every line EXCEPT for the last, or Pig gets mad. boolean isLast = (fieldDescriptor == msgDescriptor.getFields().get(msgDescriptor.getFields().size() - 1)); if (fieldDescriptor.getType() == FieldDescriptor.Type.MESSAGE) { sb.append(messageToPigScript(fieldDescriptor, numTabs + 1, isLast)); } else { sb.append(singleFieldToPigScript(fieldDescriptor, numTabs + 1, isLast)); } } } catch (FrontendException e) { LOG.warn("Could not convert descriptor " + msgDescriptor + " to pig script", e); } return sb; }
java
{ "resource": "" }
q167022
ProtobufToPig.messageToPigScript
validation
private StringBuffer messageToPigScript(FieldDescriptor fieldDescriptor, int numTabs, boolean isLast) throws FrontendException { assert fieldDescriptor.getType() == FieldDescriptor.Type.MESSAGE : "messageToPigScript called with field of type " + fieldDescriptor.getType(); if (fieldDescriptor.isRepeated()) { return new StringBuffer().append(tabs(numTabs)).append(fieldDescriptor.getName()).append(": bag {").append("\n") .append(tabs(numTabs + 1)).append(fieldDescriptor.getName()).append("_tuple: tuple (").append("\n") .append(toPigScriptInternal(fieldDescriptor.getMessageType(), numTabs + 2)) .append(tabs(numTabs + 1)).append(")").append("\n") .append(tabs(numTabs)).append("}").append(isLast ? "" : ",").append("\n"); } else { return new StringBuffer().append(tabs(numTabs)).append(fieldDescriptor.getName()).append(": tuple (").append("\n") .append(toPigScriptInternal(fieldDescriptor.getMessageType(), numTabs + 1)) .append(tabs(numTabs)).append(")").append(isLast ? "" : ",").append("\n"); } }
java
{ "resource": "" }
q167023
ProtobufToPig.singleFieldToPigScript
validation
private StringBuffer singleFieldToPigScript(FieldDescriptor fieldDescriptor, int numTabs, boolean isLast) throws FrontendException { assert fieldDescriptor.getType() != FieldDescriptor.Type.MESSAGE : "singleFieldToPigScript called with field of type " + fieldDescriptor.getType(); if (fieldDescriptor.isRepeated()) { return new StringBuffer().append(tabs(numTabs)).append(fieldDescriptor.getName()).append("_bag: bag {").append("\n") .append(tabs(numTabs + 1)).append(fieldDescriptor.getName()).append("_tuple: tuple (").append("\n") .append(tabs(numTabs + 2)).append(fieldDescriptor.getName()).append(": ").append(getPigScriptDataType(fieldDescriptor)).append("\n") .append(tabs(numTabs + 1)).append(")").append("\n") .append(tabs(numTabs)).append("}").append(isLast ? "" : ",").append("\n"); } else { return new StringBuffer().append(tabs(numTabs)).append(fieldDescriptor.getName()).append(": ") .append(getPigScriptDataType(fieldDescriptor)).append(isLast ? "" : ",").append("\n"); } }
java
{ "resource": "" }
q167024
TStructDescriptor.getInstance
validation
public static TStructDescriptor getInstance(Class<? extends TBase<?, ?>> tClass) { synchronized (structMap) { TStructDescriptor desc = structMap.get(tClass); if (desc == null) { desc = new TStructDescriptor(); desc.tClass = tClass; structMap.put(tClass, desc); desc.build(tClass); } return desc; } }
java
{ "resource": "" }
q167025
TStructDescriptor.extractEnumMap
validation
static private Map<String, TEnum> extractEnumMap(Class<? extends TEnum> enumClass) { ImmutableMap.Builder<String, TEnum> builder = ImmutableMap.builder(); for(TEnum e : enumClass.getEnumConstants()) { builder.put(e.toString(), e); } return builder.build(); }
java
{ "resource": "" }
q167026
LuceneIndexOutputFormat.newIndexDirFilter
validation
public static PathFilter newIndexDirFilter(Configuration conf) { return new PathFilters.CompositePathFilter( PathFilters.newExcludeFilesFilter(conf), PathFilters.EXCLUDE_HIDDEN_PATHS_FILTER, new PathFilter() { @Override public boolean accept(Path path) { return path.getName().startsWith("index-"); } } ); }
java
{ "resource": "" }
q167027
Protobufs.useDynamicProtoMessage
validation
public static boolean useDynamicProtoMessage(Class<?> protoClass) { return protoClass == null || protoClass.getCanonicalName().equals(DynamicMessage.class.getCanonicalName()); }
java
{ "resource": "" }
q167028
Protobufs.getTypeRef
validation
public static<M extends Message> TypeRef<M> getTypeRef(String protoClassName) { return new TypeRef<M>(getProtobufClass(protoClassName)){}; }
java
{ "resource": "" }
q167029
LzoRecordReader.getProgress
validation
@Override public float getProgress() { if (start_ == end_) { return 0.0f; } else { return Math.min(1.0f, (pos_ - start_) / (float) (end_ - start_)); } }
java
{ "resource": "" }
q167030
BinaryWritable.serialize
validation
private byte[] serialize() { if (messageBytes == null && message != null) { checkConverter(); messageBytes = converter.toBytes(message); if (messageBytes == null) { // should we throw an IOException instead? LOG.warn("Could not serialize " + message.getClass()); } else { message = null; // so that message and messageBytes don't go out of // sync. } } return messageBytes; }
java
{ "resource": "" }
q167031
DeprecatedInputFormatWrapper.setInputFormat
validation
public static void setInputFormat(Class<?> realInputFormatClass, Configuration conf) { conf.setClass("mapred.input.format.class", DeprecatedInputFormatWrapper.class, org.apache.hadoop.mapred.InputFormat.class); HadoopUtils.setClassConf(conf, CLASS_CONF_KEY, realInputFormatClass); }
java
{ "resource": "" }
q167032
HadoopUtils.setClassConf
validation
public static void setClassConf(Configuration conf, String configKey, Class<?> clazz) { String existingClass = conf.get(configKey); String className = clazz.getName(); if (existingClass != null && !existingClass.equals(className)) { throw new RuntimeException( "Already registered a different thriftClass for " + configKey + ". old: " + existingClass + " new: " + className); } else { conf.set(configKey, className); } }
java
{ "resource": "" }
q167033
HadoopUtils.writeStringListToConfAsJson
validation
public static void writeStringListToConfAsJson(String key, List<String> list, Configuration conf) { Preconditions.checkNotNull(list); conf.set(key, JSONArray.toJSONString(list)); }
java
{ "resource": "" }
q167034
HadoopUtils.readStringListFromConfAsJson
validation
@SuppressWarnings("unchecked") public static List<String> readStringListFromConfAsJson(String key, Configuration conf) { String json = conf.get(key); if (json == null) { return null; } return Lists.<String>newArrayList(((JSONArray) JSONValue.parse(json))); }
java
{ "resource": "" }
q167035
HadoopUtils.writeStringListToConfAsBase64
validation
public static void writeStringListToConfAsBase64(String key, List<String> list, Configuration conf) { Preconditions.checkNotNull(list); Iterator<String> iter = list.iterator(); StringBuilder sb = new StringBuilder(); while(iter.hasNext()) { byte[] bytes = Base64.encodeBase64(iter.next().getBytes(Charsets.UTF_8), false); sb.append(new String(bytes, Charsets.UTF_8)); if (iter.hasNext()) { sb.append(','); } } conf.set(key, sb.toString()); }
java
{ "resource": "" }
q167036
HadoopUtils.readStringListFromConfAsBase64
validation
@SuppressWarnings("unchecked") public static List<String> readStringListFromConfAsBase64(String key, Configuration conf) { String b64List = conf.get(key); if (b64List == null) { return null; } List<String> strings = Lists.newArrayList(); for (String b64 : COMMA_SPLITTER.split(b64List)) { byte[] bytes = Base64.decodeBase64(b64.getBytes(Charsets.UTF_8)); strings.add(new String(bytes, Charsets.UTF_8)); } return strings; }
java
{ "resource": "" }
q167037
ThriftUtils.verifyAncestry
validation
private static void verifyAncestry(Class<?> tClass) { if (!TBase.class.isAssignableFrom(tClass)) { Utils.ensureClassLoaderConsistency(TBase.class, tClass.getClassLoader()); throw new ClassCastException(tClass.getName() + " is not a Thrift class"); } }
java
{ "resource": "" }
q167038
ThriftUtils.getFieldValue
validation
public static <M> M getFieldValue(Object containingObject, String fieldName, Class<M> fieldClass) { return getFieldValue(containingObject.getClass(), containingObject, fieldName, fieldClass); }
java
{ "resource": "" }
q167039
ThriftUtils.getFieldValue
validation
public static <M> M getFieldValue(Class<?> containingClass, String fieldName, Class<M> fieldClass) { return getFieldValue(containingClass, null, fieldName, fieldClass); }
java
{ "resource": "" }
q167040
ThriftUtils.getFieldValueType
validation
public static Class<?> getFieldValueType(Field field) { switch (field.getType()) { case TType.BOOL: return Boolean.class; case TType.BYTE: return Byte.class; case TType.DOUBLE: return Double.class; case TType.ENUM: return field.getEnumClass(); case TType.I16: return Short.class; case TType.I32: return Integer.class; case TType.I64: return Long.class; case TType.LIST: return List.class; case TType.MAP: return Map.class; case TType.SET: return Set.class; case TType.STOP: return null; case TType.STRING: return String.class; case TType.STRUCT: return field.gettStructDescriptor().getThriftClass(); case TType.VOID: return null; } return null; }
java
{ "resource": "" }
q167041
ThriftUtils.writeFieldNoTag
validation
public static void writeFieldNoTag(TProtocol proto, Field field, Object value) throws TException { if (value == null) { return; } Field innerField = null; switch (field.getType()) { case TType.LIST: innerField = field.getListElemField(); break; case TType.SET: innerField = field.getSetElemField(); break; case TType.MAP: innerField = field.getMapKeyField(); break; default: writeSingleFieldNoTag(proto, field, value); return; } // a map or a collection: if (field.getType() == TType.MAP) { Field valueField = field.getMapValueField(); Map<?, ?> map = (Map<?, ?>)value; proto.writeByte(innerField.getType()); proto.writeByte(valueField.getType()); proto.writeI32(map.size()); for(Entry<?, ?> entry : map.entrySet()) { writeSingleFieldNoTag(proto, innerField, entry.getKey()); writeSingleFieldNoTag(proto, valueField, entry.getValue()); } } else { // SET or LIST Collection<?> coll = (Collection<?>)value; proto.writeByte(innerField.getType()); proto.writeI32(coll.size()); for(Object v : coll) { writeSingleFieldNoTag(proto, innerField, v); } } }
java
{ "resource": "" }
q167042
PigToProtobuf.tupleToMessage
validation
public static Message tupleToMessage(Builder builder, Tuple tuple) { return tupleToMessage(builder, builder.getDescriptorForType().getFields(), tuple); }
java
{ "resource": "" }
q167043
PigToProtobuf.tupleFieldToSingleField
validation
private static Object tupleFieldToSingleField(FieldDescriptor fieldDescriptor, Object tupleField) { // type convertion should match with ProtobufToPig.getPigScriptDataType switch (fieldDescriptor.getType()) { case ENUM: return toEnumValueDescriptor(fieldDescriptor, (String) tupleField); case BOOL: return Boolean.valueOf((Integer)tupleField != 0); case BYTES: return ByteString.copyFrom(((DataByteArray)tupleField).get()); default: return tupleField; } }
java
{ "resource": "" }
q167044
PigToProtobuf.addField
validation
private static void addField(DescriptorProto.Builder builder, String name, int fieldId, Type type) { FieldDescriptorProto.Builder fdBuilder = FieldDescriptorProto.newBuilder() .setName(name) .setNumber(fieldId) .setType(type); builder.addField(fdBuilder.build()); }
java
{ "resource": "" }
q167045
PigToProtobuf.pigTypeToProtoType
validation
private static Type pigTypeToProtoType(byte pigTypeId) { switch(pigTypeId) { case DataType.BOOLEAN: return Type.TYPE_BOOL; case DataType.INTEGER: return Type.TYPE_INT32; case DataType.LONG: return Type.TYPE_INT64; case DataType.FLOAT: return Type.TYPE_FLOAT; case DataType.DOUBLE: return Type.TYPE_DOUBLE; case DataType.CHARARRAY: return Type.TYPE_STRING; case DataType.BYTEARRAY: return Type.TYPE_BYTES; default: throw new IllegalArgumentException("Unsupported Pig type passed (" + pigTypeId + ") where a simple type is expected while converting Pig to a dynamic Protobuf"); } }
java
{ "resource": "" }
q167046
PigToThrift.toThrift
validation
@SuppressWarnings("unchecked") private static TBase<?, ?> toThrift(TStructDescriptor tDesc, Tuple tuple) { int size = tDesc.getFields().size(); int tupleSize = tuple.size(); @SuppressWarnings("rawtypes") TBase tObj = newTInstance(tDesc.getThriftClass()); for(int i = 0; i<size && i<tupleSize; i++) { Object pObj; try { pObj = tuple.get(i); } catch (ExecException e) { throw new RuntimeException(e); } if (pObj != null) { Field field = tDesc.getFieldAt(i); try { tObj.setFieldValue(field.getFieldIdEnum(), toThriftValue(field, pObj)); } catch (Exception e) { String value = String.valueOf(tObj); final int max_length = 100; if (max_length < value.length()) { value = value.substring(0, max_length - 3) + "..."; } String type = tObj == null ? "unknown" : tObj.getClass().getName(); throw new RuntimeException(String.format( "Failed to set field '%s' using tuple value '%s' of type '%s' at index %d", field.getName(), value, type, i), e); } } // if tDesc is a union, at least one field needs to be non-null. // user is responsible for ensuring that. } return tObj; }
java
{ "resource": "" }
q167047
PigToThrift.toThriftValue
validation
@SuppressWarnings("unchecked") public static Object toThriftValue(Field thriftField, Object pigValue) { try { switch (thriftField.getType()) { case TType.BOOL: return Boolean.valueOf(((Integer)pigValue) != 0); case TType.BYTE : return ((Integer)pigValue).byteValue(); case TType.I16 : return Short.valueOf(((Integer)pigValue).shortValue()); case TType.STRING: return toStringType(pigValue); case TType.STRUCT: return toThrift(thriftField.gettStructDescriptor(), (Tuple)pigValue); case TType.MAP: return toThriftMap(thriftField, (Map<String, Object>)pigValue); case TType.SET: return toThriftSet(thriftField.getSetElemField(), (DataBag) pigValue); case TType.LIST: return toThriftList(thriftField.getListElemField(), (DataBag)pigValue); case TType.ENUM: return toThriftEnum(thriftField, (String) pigValue); default: // standard types : I32, I64, DOUBLE, etc. return pigValue; } } catch (Exception e) { // mostly a schema mismatch. LOG.warn(String.format( "Failed to set field '%s' of type '%s' with value '%s' of type '%s'", thriftField.getName(), ThriftUtils.getFieldValueType(thriftField).getName(), pigValue, pigValue.getClass().getName()), e); } return null; }
java
{ "resource": "" }
q167048
PigToThrift.newTInstance
validation
private static TBase<?, ?> newTInstance(Class<?> tClass) { try { return (TBase<?, ?>) tClass.newInstance(); } catch (Exception e) { // not expected. throw new RuntimeException(e); } }
java
{ "resource": "" }
q167049
DelegateCombineFileInputFormat.setCombinedInputFormatDelegate
validation
public static void setCombinedInputFormatDelegate(Configuration conf, Class<? extends InputFormat> clazz) { HadoopUtils.setClassConf(conf, COMBINED_INPUT_FORMAT_DELEGATE, clazz); }
java
{ "resource": "" }
q167050
LuceneIndexInputFormat.findSplits
validation
protected PriorityQueue<LuceneIndexInputSplit> findSplits(Configuration conf) throws IOException { PriorityQueue<LuceneIndexInputSplit> splits = new PriorityQueue<LuceneIndexInputSplit>(); List<Path> indexDirs = Lists.newLinkedList(); // find all indexes nested under all the input paths // (which happen to be directories themselves) for (Path path : inputPaths) { HdfsUtils.collectPaths(path, path.getFileSystem(conf), indexDirPathFilter, indexDirs); } // compute the size of each index // and create a single split per index for (Path indexDir : indexDirs) { long size = HdfsUtils.getDirectorySize(indexDir, indexDir.getFileSystem(conf)); splits.add(new LuceneIndexInputSplit(Lists.newLinkedList(Arrays.asList(indexDir)), size)); } return splits; }
java
{ "resource": "" }
q167051
LuceneIndexInputFormat.setInputPaths
validation
public static void setInputPaths(List<Path> paths, Configuration conf) throws IOException { Preconditions.checkNotNull(paths); Preconditions.checkArgument(!paths.isEmpty()); String[] pathStrs = new String[paths.size()]; int i = 0; for (Path p : paths) { FileSystem fs = p.getFileSystem(conf); pathStrs[i++] = fs.makeQualified(p).toString(); } conf.setStrings(INPUT_PATHS_KEY, pathStrs); }
java
{ "resource": "" }
q167052
LuceneIndexInputFormat.getInputPaths
validation
public static Path[] getInputPaths(Configuration conf) { String[] pathStrs = Preconditions.checkNotNull(conf.getStrings(INPUT_PATHS_KEY), "You must call LuceneIndexInputFormat.setInputPaths()"); Path[] paths = new Path[pathStrs.length]; for (int i = 0; i < pathStrs.length; i++) { paths[i] = new Path(pathStrs[i]); } return paths; }
java
{ "resource": "" }
q167053
CompositeInputSplit.add
validation
public void add(InputSplit split) throws IOException, InterruptedException { splits.add(split); totalSplitSizes += split.getLength(); locations = null; }
java
{ "resource": "" }
q167054
CompositeInputSplit.getLocations
validation
public String[] getLocations() throws IOException, InterruptedException { if (locations == null) { Map<String, Integer> hosts = new HashMap<String, Integer>(); for (InputSplit s : splits) { String[] hints = s.getLocations(); if (hints != null) { for (String host : hints) { Integer value = hosts.get(host); if (value == null) { value = 0; } value++; hosts.put(host, value); } } } if (hosts.size() < 5) { locations = hosts.keySet().toArray(new String[hosts.size()]); } else { Queue<Pair<String, Integer>> queue = new PriorityQueue<Pair<String, Integer>>(hosts.size(), new Comparator<Pair<String, Integer>>() { public int compare(Pair<String, Integer> o1, Pair<String, Integer> o2) { return -o1.getSecond().compareTo(o2.getSecond()); } }); for (Map.Entry<String, Integer> entry : hosts.entrySet()) { queue.add(new Pair<String, Integer>(entry.getKey(), entry.getValue())); } locations = new String[] { queue.remove().getFirst(), queue.remove().getFirst(), queue.remove().getFirst(), queue.remove().getFirst(), queue.remove().getFirst() }; } } return locations; }
java
{ "resource": "" }
q167055
PigTokenHelper.evaluateDelimiter
validation
public static byte evaluateDelimiter(String inputDelimiter) { if (inputDelimiter.length() == 1) { return inputDelimiter.getBytes()[0]; } else if (inputDelimiter.length() > 1 && inputDelimiter.charAt(0) == '\\') { switch (inputDelimiter.charAt(1)) { case 't': return (byte)'\t'; case 'x': case 'u': return Integer.valueOf(inputDelimiter.substring(2)).byteValue(); default: throw new IllegalArgumentException("Unknown delimiter " + inputDelimiter); } } else { throw new IllegalArgumentException("LzoTokenizedStorage delimeter must be a single character"); } }
java
{ "resource": "" }
q167056
ResourceSchemaUtil.createResourceFieldSchema
validation
public static ResourceFieldSchema createResourceFieldSchema(RequiredField field) throws IOException { ResourceFieldSchema schema = new ResourceFieldSchema().setName(field.getAlias()).setType(field.getType()); List<RequiredField> subFields = field.getSubFields(); if (subFields != null && !subFields.isEmpty()) { ResourceFieldSchema[] subSchemaFields = new ResourceFieldSchema[subFields.size()]; int i = 0; for (RequiredField subField : subFields) { subSchemaFields[i++] = createResourceFieldSchema(subField); } ResourceSchema subSchema = new ResourceSchema(); subSchema.setFields(subSchemaFields); schema.setSchema(subSchema); } return schema; }
java
{ "resource": "" }
q167057
ProtobufComparator.readFully
validation
public static void readFully(InputStream in, ByteArrayOutputStream out, byte[] buffer) { try { int numRead = 0; while ((numRead = in.read(buffer, 0, buffer.length)) != -1) { out.write(buffer, 0, numRead); } out.flush(); } catch (IOException e) { throw new RuntimeException(e); } }
java
{ "resource": "" }
q167058
LzoBaseRegexLoader.getNext
validation
@Override public Tuple getNext() throws IOException { if (reader == null) { return null; } Pattern pattern = getPattern(); Matcher matcher = pattern.matcher(""); Object lineObj; String line; Tuple t = null; // Read lines until a match is found, making sure there's no reading past the // end of the assigned byte range. try { while (reader.nextKeyValue()) { lineObj = reader.getCurrentValue(); if (lineObj == null) { break; } line = lineObj.toString(); matcher = matcher.reset(line); // Increment counters for the number of matched and unmatched lines. if (matcher.find()) { incrCounter(LzoBaseRegexLoaderCounters.MatchedRegexLines, 1L); t = tupleFactory_.newTuple(matcher.groupCount()); for (int i = 1; i <= matcher.groupCount(); i++) { if(matcher.group(i) != null) { t.set(i - 1, matcher.group(i)); } else { t.set(i - 1, ""); } } break; } else { incrCounter(LzoBaseRegexLoaderCounters.UnmatchedRegexLines, 1L); // TODO: stop doing this, as it can slow down the job. LOG.debug("No match for line " + line); } // If the read has walked beyond the end of the split, move on. } } catch (InterruptedException e) { int errCode = 6018; String errMsg = "Error while reading input"; throw new ExecException(errMsg, errCode, PigException.REMOTE_ENVIRONMENT, e); } return t; }
java
{ "resource": "" }
q167059
Codecs.createStandardBase64
validation
public static Base64 createStandardBase64() { /* with constructor Base64() in commons-codec-1.4 * encode() inserts a newline after every 76 characters. * Base64(0) disables that incompatibility. */ try { return Base64.class.getConstructor(int.class).newInstance(0); } catch (SecurityException e) { } catch (NoSuchMethodException e) { } catch (IllegalArgumentException e) { } catch (InstantiationException e) { } catch (IllegalAccessException e) { } catch (InvocationTargetException e) { } return new Base64(); }
java
{ "resource": "" }
q167060
ProtobufConverter.newInstance
validation
public static <M extends Message> ProtobufConverter<M> newInstance(Class<M> protoClass) { return new ProtobufConverter<M>(new TypeRef<M>(protoClass){}); }
java
{ "resource": "" }
q167061
BinaryBlockReader.readNext
validation
public boolean readNext(BinaryWritable<M> writable) throws IOException { byte[] blob = readNextProtoBytes(); if (blob != null) { writable.set(protoConverter_.fromBytes(blob)); return true; } return false; }
java
{ "resource": "" }
q167062
BinaryBlockReader.readNextProtoBytes
validation
public byte[] readNextProtoBytes() throws IOException { while (true) { if (!setupNewBlockIfNeeded()) { return null; } int blobIndex = curBlobs_.size() - numLeftToReadThisBlock_; numLeftToReadThisBlock_--; byte[] blob = curBlobs_.get(blobIndex).toByteArray(); if (blob.length == 0 && skipEmptyRecords) { continue; } return blob; } }
java
{ "resource": "" }
q167063
BinaryBlockReader.readNextProtoBytes
validation
public boolean readNextProtoBytes(BytesWritable writable) throws IOException { byte[] blob = readNextProtoBytes(); if (blob != null) { writable.set(blob, 0, blob.length); return true; } return false; }
java
{ "resource": "" }
q167064
HadoopCompat.newTaskAttemptContext
validation
public static TaskAttemptContext newTaskAttemptContext( Configuration conf, TaskAttemptID taskAttemptId) { return (TaskAttemptContext) newInstance(TASK_CONTEXT_CONSTRUCTOR, conf, taskAttemptId); }
java
{ "resource": "" }
q167065
HadoopCompat.newMapContext
validation
public static MapContext newMapContext(Configuration conf, TaskAttemptID taskAttemptID, RecordReader recordReader, RecordWriter recordWriter, OutputCommitter outputCommitter, StatusReporter statusReporter, InputSplit inputSplit) { return (MapContext) newInstance(MAP_CONTEXT_CONSTRUCTOR, conf, taskAttemptID, recordReader, recordWriter, outputCommitter, statusReporter, inputSplit); }
java
{ "resource": "" }
q167066
ThriftWritable.newInstance
validation
public static <M extends TBase<?, ?>> ThriftWritable<M> newInstance(Class<M> tClass) { return new ThriftWritable<M>(new TypeRef<M>(tClass){}); }
java
{ "resource": "" }
q167067
TypeRef.newInstance
validation
@SuppressWarnings("unchecked") public T newInstance() throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { if (constructor_ == null) { constructor_ = getRawClass().getConstructor(); } return (T)constructor_.newInstance(); }
java
{ "resource": "" }
q167068
TypeRef.safeNewInstance
validation
public T safeNewInstance() { try { return newInstance(); } catch (NoSuchMethodException e) { throw new IllegalArgumentException(e); } catch (IllegalAccessException e) { throw new IllegalArgumentException(e); } catch (InvocationTargetException e) { throw new IllegalArgumentException(e); } catch (InstantiationException e) { throw new IllegalArgumentException(e); } }
java
{ "resource": "" }
q167069
ThriftToDynamicProto.getBuilder
validation
public Message.Builder getBuilder(Class<? extends TBase<?, ?>> thriftClass) { return messageBuilderMap.get(protoMessageType(thriftClass)).clone(); }
java
{ "resource": "" }
q167070
ThriftToDynamicProto.mapEntryProtoBuilder
validation
private Message.Builder mapEntryProtoBuilder(TStructDescriptor descriptor, Field field) { return messageBuilderMap.get(mapProtoMessageType(descriptor, field)).clone(); }
java
{ "resource": "" }
q167071
ThriftToDynamicProto.mapDescriptorProtoBuilder
validation
private DescriptorProtos.DescriptorProto.Builder mapDescriptorProtoBuilder( Field field, String typeName) throws DescriptorValidationException { DescriptorProtos.DescriptorProto.Builder mapBuilder = DescriptorProtos.DescriptorProto.newBuilder().setName(typeName); Field keyField = field.getMapKeyField(); Field valueField = field.getMapValueField(); DescriptorProtos.FieldDescriptorProto.Builder keyBuilder = mapKeyProtoBuilder(); DescriptorProtos.FieldDescriptorProto.Builder valueBuilder = mapValueProtoBuilder(); setBuilderTypeFromField(keyField, keyBuilder); setBuilderTypeFromField(valueField, valueBuilder); mapBuilder.addField(keyBuilder.build()); mapBuilder.addField(valueBuilder.build()); return mapBuilder; }
java
{ "resource": "" }
q167072
ThriftToDynamicProto.resolveMessageTypeName
validation
private String resolveMessageTypeName(TStructDescriptor descriptor) throws DescriptorValidationException { String typeName = protoMessageType(descriptor.getThriftClass()); // Anytime we have a new message typeName, we make sure that we have a builder for it. // If not, we create one. DescriptorProtos.DescriptorProto.Builder builder = descriptorBuilderMap.get(typeName); if (builder == null) { builder = DescriptorProtos.DescriptorProto.newBuilder(); builder.setName(typeName); descriptorBuilderMap.put(typeName, builder); doSchemaMapping(builder, descriptor); } return typeName; }
java
{ "resource": "" }
q167073
ThriftToDynamicProto.convert
validation
@SuppressWarnings("unchecked") public Message convert(T thriftObj) { return doConvert((TBase<?, ?>) Preconditions.checkNotNull(thriftObj, "Can not convert a null object")); }
java
{ "resource": "" }
q167074
ThriftToDynamicProto.doConvert
validation
@SuppressWarnings("unchecked") public <F extends TFieldIdEnum> Message doConvert(TBase<?, F> thriftObj) { if (thriftObj == null) { return null; } Class<TBase<?, F>> clazz = (Class<TBase<?, F>>) thriftObj.getClass(); checkState(clazz); Message.Builder builder = getBuilder(clazz); TStructDescriptor fieldDesc = TStructDescriptor.getInstance(clazz); int fieldId = 0; for (Field tField : fieldDesc.getFields()) { // don't want to carry over default values from unset fields if (!thriftObj.isSet((F) tField.getFieldIdEnum()) || (!supportNestedObjects && hasNestedObject(tField))) { fieldId++; continue; } // recurse into the object if it's a struct, otherwise just add the field if (supportNestedObjects && tField.getType() == TType.STRUCT) { TBase<?, ?> fieldValue = (TBase<?, ?>) fieldDesc.getFieldValue(fieldId++, thriftObj); Message message = doConvert(fieldValue); if (message != null) { FieldDescriptor protoFieldDesc = builder.getDescriptorForType().findFieldByName( tField.getName()); builder.setField(protoFieldDesc, message); } } else { fieldId = convertField(thriftObj, builder, fieldDesc, fieldId, tField); } } return builder.build(); }
java
{ "resource": "" }
q167075
ThriftToDynamicProto.buildMapEntryMessage
validation
private Message buildMapEntryMessage(Message.Builder mapBuilder, Field field, Object mapKey, Object mapValue) { FieldDescriptor keyFieldDescriptor = mapBuilder.getDescriptorForType().findFieldByName(MAP_KEY_FIELD_NAME); FieldDescriptor valueFieldDescriptor = mapBuilder.getDescriptorForType().findFieldByName(MAP_VALUE_FIELD_NAME); boolean isKeyStruct = field.getMapKeyField().isStruct(); boolean isValueStruct = field.getMapValueField().isStruct(); Object convertedKey; if (isKeyStruct) { convertedKey = doConvert((TBase<?, ?>) mapKey); } else { convertedKey = sanitizeRawValue(mapKey, field.getMapKeyField()); } Object convertedValue; if (isValueStruct) { convertedValue = doConvert((TBase<?, ?>) mapValue); } else { convertedValue = sanitizeRawValue(mapValue, field.getMapValueField()); } mapBuilder.setField(keyFieldDescriptor, convertedKey); mapBuilder.setField(valueFieldDescriptor, convertedValue); return mapBuilder.build(); }
java
{ "resource": "" }
q167076
ThriftToDynamicProto.mapProtoMessageType
validation
private String mapProtoMessageType(TStructDescriptor descriptor, Field field) { return String.format("%s_%s", protoMessageType(descriptor.getThriftClass()), field.getName()); }
java
{ "resource": "" }
q167077
HdfsUtils.walkPath
validation
public static void walkPath(Path path, FileSystem fs, PathFilter filter, PathVisitor visitor) throws IOException { FileStatus fileStatus = fs.getFileStatus(path); if (filter.accept(path)) { visitor.visit(fileStatus); } if (fileStatus.isDir()) { FileStatus[] children = fs.listStatus(path); for (FileStatus childStatus : children) { walkPath(childStatus.getPath(), fs, filter, visitor); } } }
java
{ "resource": "" }
q167078
HdfsUtils.collectPaths
validation
public static void collectPaths(Path path, FileSystem fs, PathFilter filter, final List<Path> accumulator) throws IOException { walkPath(path, fs, filter, new PathVisitor() { @Override public void visit(FileStatus fileStatus) { accumulator.add(fileStatus.getPath()); } }); }
java
{ "resource": "" }
q167079
HdfsUtils.getDirectorySize
validation
public static long getDirectorySize(Path path, FileSystem fs, PathFilter filter) throws IOException { PathSizeVisitor visitor = new PathSizeVisitor(); PathFilter composite = new PathFilters.CompositePathFilter( PathFilters.newExcludeDirectoriesFilter(fs.getConf()), filter); walkPath(path, fs, composite, visitor); return visitor.getSize(); }
java
{ "resource": "" }
q167080
RCFileOutputFormat.setColumnNumber
validation
public static void setColumnNumber(Configuration conf, int columnNum) { assert columnNum > 0; conf.setInt(RCFile.COLUMN_NUMBER_CONF_STR, columnNum); }
java
{ "resource": "" }
q167081
CombinedSequenceFile.updateJobConfForLocalSettings
validation
private static void updateJobConfForLocalSettings(JobConf conf) { String localSetCompressionEnabled = conf.get(COMPRESS_ENABLE); if(localSetCompressionEnabled != null) { conf.set(MR_COMPRESS_ENABLE, localSetCompressionEnabled); } String localSetCompressionType = conf.get(COMPRESS_TYPE); if(localSetCompressionType != null) { conf.set(MR_COMPRESS_TYPE, localSetCompressionType); } String localSetCompressionCodec = conf.get(COMPRESS_CODEC); if(localSetCompressionCodec != null) { conf.set(MR_COMPRESS_CODEC, localSetCompressionCodec); } }
java
{ "resource": "" }
q167082
ProtobufReflectionUtil.parseMethodFor
validation
public static Method parseMethodFor(Class<Message> klass) { try { return klass.getMethod("parseDelimitedFrom", new Class[] {InputStream.class }); } catch (SecurityException e) { throw new RuntimeException(e); } catch (NoSuchMethodException e) { throw new RuntimeException(e); } }
java
{ "resource": "" }
q167083
ProtobufReflectionUtil.parseMessage
validation
public static Message parseMessage(Method parseMethod, InputStream in) { try { return (Message) parseMethod.invoke(null, in); } catch (IllegalArgumentException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { throw new RuntimeException(e); } }
java
{ "resource": "" }
q167084
ProtobufReflectionUtil.parseMessage
validation
public static Message parseMessage(Class<Message> klass, InputStream in) { Method parseMethod = parseMethodFor(klass); return parseMessage(parseMethod, in); }
java
{ "resource": "" }
q167085
LzoJsonStorage.putNext
validation
@Override @SuppressWarnings("unchecked") public void putNext(Tuple tuple) throws IOException { json.clear(); if (tuple != null && tuple.size() >= 1) { Map<String, Object> map = (Map<String, Object>) tuple.get(0); if (keysToKeep_ == null) { json.putAll(map); } else { for (Map.Entry<String, Object> entry : map.entrySet()) { if (keysToKeep_.contains(entry.getKey())) { json.put(entry.getKey(), entry.getValue()); } } } } try { writer.write(null, new Text(json.toString())); } catch (InterruptedException e) { // Under what circumstances does this happen? throw new IOException(e); } }
java
{ "resource": "" }
q167086
LzoProtobufB64LineOutputFormat.setClassConf
validation
public static <M extends Message> void setClassConf(Class<M> protoClass, Configuration jobConf) { Protobufs.setClassConf(jobConf, LzoProtobufB64LineOutputFormat.class, protoClass); }
java
{ "resource": "" }
q167087
MultiInputFormat.setTypeRef
validation
private void setTypeRef(Configuration conf) { String className = conf.get(CLASS_CONF_KEY); if (className == null) { throw new RuntimeException(CLASS_CONF_KEY + " is not set"); } Class<?> clazz = null; try { clazz = conf.getClassByName(className); } catch (ClassNotFoundException e) { throw new RuntimeException("failed to instantiate class '" + className + "'", e); } typeRef = new TypeRef<M>(clazz){}; }
java
{ "resource": "" }
q167088
AbstractThriftBinaryProtocol.checkContainerSize
validation
protected void checkContainerSize(int size) throws TProtocolException { if (size < 0) { throw new TProtocolException("Negative container size: " + size); } if (checkReadLength_ && (readLength_ - size) < 0) { throw new TProtocolException("Remaining message length is " + readLength_ + " but container size in underlying TTransport is set to at least: " + size); } }
java
{ "resource": "" }
q167089
StreamSearcher.setPattern
validation
public void setPattern(byte[] pattern) { pattern_ = Arrays.copyOf(pattern, pattern.length); borders_ = new int[pattern_.length + 1]; preProcess(); }
java
{ "resource": "" }
q167090
Strings.underscore
validation
public static String underscore(String word) { String firstPattern = "([A-Z]+)([A-Z][a-z])"; String secondPattern = "([a-z\\d])([A-Z])"; String replacementPattern = "$1_$2"; // Replace package separator with slash. word = word.replaceAll("\\.", "/"); // Replace $ with two underscores for inner classes. word = word.replaceAll("\\$", "__"); // Replace capital letter with _ plus lowercase letter. word = word.replaceAll(firstPattern, replacementPattern); word = word.replaceAll(secondPattern, replacementPattern); word = word.replace('-', '_'); word = word.toLowerCase(); return word; }
java
{ "resource": "" }
q167091
Strings.ordinalize
validation
public static String ordinalize(int n) { int mod100 = n % 100; if (mod100 == 11 || mod100 == 12 || mod100 == 13) { return String.valueOf(n) + "th"; } switch (n % 10) { case 1: return String.valueOf(n) + "st"; case 2: return String.valueOf(n) + "nd"; case 3: return String.valueOf(n) + "rd"; default: return String.valueOf(n) + "th"; } }
java
{ "resource": "" }
q167092
ProtobufWritable.newInstance
validation
public static <M extends Message> ProtobufWritable<M> newInstance(Class<M> tClass) { return new ProtobufWritable<M>(new TypeRef<M>(tClass){}); }
java
{ "resource": "" }
q167093
LzoW3CLogInputFormat.newInstance
validation
public static LzoW3CLogInputFormat newInstance(final String fieldDefinitionFile) { return new LzoW3CLogInputFormat() { @Override public RecordReader<LongWritable, MapWritable> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { RecordReader<LongWritable, MapWritable> reader = new LzoW3CLogRecordReader() { @Override protected String getFieldDefinitionFile() { return fieldDefinitionFile; } }; reader.initialize(split, context); return reader; } }; }
java
{ "resource": "" }
q167094
DeprecatedOutputFormatWrapper.setOutputFormat
validation
public static void setOutputFormat(Class<?> realOutputFormatClass, Configuration conf) { conf.setClass("mapred.output.format.class", DeprecatedOutputFormatWrapper.class, org.apache.hadoop.mapred.OutputFormat.class); HadoopUtils.setClassConf(conf, CLASS_CONF_KEY, realOutputFormatClass); }
java
{ "resource": "" }
q167095
Inflection.match
validation
public boolean match(String word) { int flags = ignoreCase_ ? Pattern.CASE_INSENSITIVE : 0; return Pattern.compile(pattern_, flags).matcher(word).find(); }
java
{ "resource": "" }
q167096
Inflection.replace
validation
public String replace(String word) { int flags = ignoreCase_ ? Pattern.CASE_INSENSITIVE : 0; return Pattern.compile(pattern_, flags).matcher(word).replaceAll(replacement_); }
java
{ "resource": "" }
q167097
Inflection.pluralize
validation
public static String pluralize(String word) { if (isUncountable(word)) { return word; } else { for (Inflection inflection : plurals_) { if (inflection.match(word)) { return inflection.replace(word); } } return word; } }
java
{ "resource": "" }
q167098
Inflection.isUncountable
validation
public static boolean isUncountable(String word) { for (String w : uncountables_) { if (w.equalsIgnoreCase(word)) { return true; } } return false; }
java
{ "resource": "" }
q167099
LzoOutputFormat.getOutputStream
validation
protected DataOutputStream getOutputStream(TaskAttemptContext job) throws IOException, InterruptedException { return LzoUtils.getIndexedLzoOutputStream( HadoopCompat.getConfiguration(job), getDefaultWorkFile(job, LzopCodec.DEFAULT_LZO_EXTENSION)); }
java
{ "resource": "" }