_id stringlengths 2 7 | title stringlengths 3 140 | partition stringclasses 3
values | text stringlengths 73 34.1k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q172200 | ThreadHealthReporter.register | test | public boolean register(String threadName) {
if(threadToGaugeMap.containsKey(threadName)) {
return false;
}
ThreadHealthReportGauge threadHealthReportGauge = new ThreadHealthReportGauge();
MetricsConfigurator.createGauge(metrics, getHealthGaugeName(threadName), threadHealthReportGauge, name, rev);
threadToGaugeMap.put(threadName, threadHealthReportGauge);
return true;
} | java | {
"resource": ""
} |
q172201 | RuntimeInfo.storeControlHubConfigs | test | public static void storeControlHubConfigs(
RuntimeInfo runtimeInfo,
Map<String, String> newConfigs
) throws IOException {
File configFile = new File(runtimeInfo.getDataDir(), SCH_CONF_OVERRIDE);
Properties properties = new Properties();
// Load existing properties from disk if they exists
if(configFile.exists()) {
try (FileReader reader = new FileReader(configFile)) {
properties.load(reader);
}
}
// Propagate updated configuration
for(Map.Entry<String, String> entry : newConfigs.entrySet()) {
if(entry.getValue() == null) {
properties.remove(entry.getKey());
} else {
properties.setProperty(entry.getKey(), entry.getValue());
}
}
// Store the new updated configuration back to disk
try(FileWriter writer = new FileWriter(configFile)) {
properties.store(writer, null);
}
} | java | {
"resource": ""
} |
q172202 | LdapLoginModule.credentialLogin | test | protected boolean credentialLogin(Object webCredential) throws LoginException
{
boolean credResult = getCurrentUser().checkCredential(webCredential);
setAuthenticated(credResult);
if (!credResult){
LOG.warn("Authentication failed - Possibly the user password is wrong");
}
return isAuthenticated();
} | java | {
"resource": ""
} |
q172203 | PipelineConfigurationUpgrader.upgradeIfNecessary | test | public PipelineConfiguration upgradeIfNecessary(
StageLibraryTask library,
PipelineConfiguration pipelineConf,
List<Issue> issues
) {
Preconditions.checkArgument(issues.isEmpty(), "Given list of issues must be empty.");
boolean upgrade;
// Firstly upgrading schema if needed, then data
upgrade = needsSchemaUpgrade(pipelineConf, issues);
if(upgrade && issues.isEmpty()) {
pipelineConf = upgradeSchema(library, pipelineConf, issues);
}
// Something went wrong with the schema upgrade
if(!issues.isEmpty()) {
return null;
}
// Upgrading data if needed
upgrade = needsUpgrade(library, pipelineConf, issues);
if (upgrade && issues.isEmpty()) {
//we try to upgrade only if we have all defs for the pipelineConf
pipelineConf = upgrade(library, pipelineConf, issues);
}
return (issues.isEmpty()) ? pipelineConf : null;
} | java | {
"resource": ""
} |
q172204 | PipelineConfigurationUpgrader.upgradeIfNeeded | test | static StageConfiguration upgradeIfNeeded(StageLibraryTask library, StageConfiguration conf, List<Issue> issues) {
return upgradeIfNeeded(
library,
library.getStage(conf.getLibrary(), conf.getStageName(), false),
conf,
issues
);
} | java | {
"resource": ""
} |
q172205 | PipelineConfigurationUpgrader.upgradeIfNeeded | test | static StageConfiguration upgradeIfNeeded(StageLibraryTask library, StageDefinition def, StageConfiguration conf, List<Issue> issues) {
IssueCreator issueCreator = IssueCreator.getStage(conf.getInstanceName());
int fromVersion = conf.getStageVersion();
int toVersion = def.getVersion();
try {
// Firstly upgrade stage itself (register any new services)
upgradeStageIfNeeded(def, conf, issueCreator, issues);
// And then upgrade all it's services
conf.getServices().forEach(serviceConf -> upgradeServicesIfNeeded(
library,
conf,
serviceConf,
issueCreator.forService(serviceConf.getService().getName()),
issues
));
} catch (Exception ex) {
LOG.error("Unknown exception during upgrade: " + ex, ex);
issues.add(issueCreator.create(
ContainerError.CONTAINER_0900,
fromVersion,
toVersion,
ex.toString()
));
}
return conf;
} | java | {
"resource": ""
} |
q172206 | PipelineConfigurationUpgrader.upgradeServicesIfNeeded | test | private static ServiceConfiguration upgradeServicesIfNeeded(
StageLibraryTask library,
StageConfiguration stageConf,
ServiceConfiguration conf,
IssueCreator issueCreator,
List<Issue> issues
) {
ServiceDefinition def = library.getServiceDefinition(conf.getService(), false);
if (def == null) {
issues.add(issueCreator.create(ContainerError.CONTAINER_0903, conf.getService().getName()));
}
int fromVersion = conf.getServiceVersion();
int toVersion = def.getVersion();
// In case we don't need an upgrade
if(!needsUpgrade(toVersion, fromVersion, issueCreator, issues)) {
return conf;
}
ClassLoader cl = Thread.currentThread().getContextClassLoader();
try {
LOG.warn("Upgrading service instance from version '{}' to version '{}'", conf.getServiceVersion(), def.getVersion());
UpgradeContext upgradeContext = new UpgradeContext(
"",
def.getName(),
stageConf.getInstanceName(),
fromVersion,
toVersion
);
List<Config> configs = def.getUpgrader().upgrade(conf.getConfiguration(), upgradeContext);
if(!upgradeContext.registeredServices.isEmpty()) {
throw new StageException(ContainerError.CONTAINER_0904);
}
conf.setServiceVersion(toVersion);
conf.setConfig(configs);
} catch (StageException ex) {
issues.add(issueCreator.create(ex.getErrorCode(), ex.getParams()));
} finally {
Thread.currentThread().setContextClassLoader(cl);
}
return conf;
} | java | {
"resource": ""
} |
q172207 | PipelineConfigurationUpgrader.upgradeStageIfNeeded | test | static private void upgradeStageIfNeeded(
StageDefinition def,
StageConfiguration conf,
IssueCreator issueCreator,
List<Issue> issues
) {
int fromVersion = conf.getStageVersion();
int toVersion = def.getVersion();
// In case we don't need an upgrade
if(!needsUpgrade(toVersion, fromVersion, IssueCreator.getStage(conf.getInstanceName()), issues)) {
return;
}
ClassLoader cl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(def.getStageClassLoader());
LOG.warn("Upgrading stage instance '{}' from version '{}' to version '{}'", conf.getInstanceName(), fromVersion, toVersion);
UpgradeContext upgradeContext = new UpgradeContext(
def.getLibrary(),
def.getName(),
conf.getInstanceName(),
fromVersion,
toVersion
);
List<Config> configs = def.getUpgrader().upgrade(conf.getConfiguration(), upgradeContext);
conf.setStageVersion(def.getVersion());
conf.setConfig(configs);
// Propagate newly registered services to the StageConfiguration
if(!upgradeContext.registeredServices.isEmpty()) {
List<ServiceConfiguration> services = new ArrayList<>();
services.addAll(conf.getServices());
// Version -1 is special to note that this version has been created by stage and not by the service itself
upgradeContext.registeredServices
.forEach((s, c) -> services.add(new ServiceConfiguration(s, -1, c)));
conf.setServices(services);
}
} catch (StageException ex) {
issues.add(issueCreator.create(ex.getErrorCode(), ex.getParams()));
} finally {
Thread.currentThread().setContextClassLoader(cl);
}
} | java | {
"resource": ""
} |
q172208 | FieldValueReplacerProcessor.getFieldsToNull | test | private List<String> getFieldsToNull(List<NullReplacerConditionalConfig> nullReplacerConditionalConfigs, Set<String> fieldsThatDoNotExist, Set<String> fieldPaths, Record record) throws OnRecordErrorException {
//Gather in this all fields to null
List<String> fieldsToNull = new ArrayList<>();
for (NullReplacerConditionalConfig nullReplacerConditionalConfig : nullReplacerConditionalConfigs) {
List<String> fieldNamesToNull = nullReplacerConditionalConfig.fieldsToNull;
//Gather fieldsPathsToNull for this nullReplacerConditionalConfig
List<String> fieldPathsToNull = new ArrayList<>();
//Gather existing paths for each nullReplacerConditionalConfig
//And if field does not exist gather them in fieldsThatDoNotExist
for (String fieldNameToNull : fieldNamesToNull) {
try {
final List<String> matchingPaths = FieldPathExpressionUtil.evaluateMatchingFieldPaths(
fieldNameToNull,
fieldPathEval,
fieldPathVars,
record,
fieldPaths
);
if (matchingPaths.isEmpty()) {
// FieldPathExpressionUtil.evaluateMatchingFieldPaths does NOT return the supplied param in its result
// regardless, like FieldRegexUtil#getMatchingFieldPaths did, so we add manually here
fieldsThatDoNotExist.add(fieldNameToNull);
} else {
for (String matchingField : matchingPaths) {
if (record.has(matchingField)) {
fieldPathsToNull.add(matchingField);
} else {
fieldsThatDoNotExist.add(matchingField);
}
}
}
} catch (ELEvalException e) {
LOG.error("Error evaluating condition: " + nullReplacerConditionalConfig.condition, e);
throw new OnRecordErrorException(record, Errors.VALUE_REPLACER_07, fieldNameToNull, e.toString(), e);
}
}
//Now evaluate the condition in nullReplacerConditionalConfig
//If it empty or condition evaluates to true, add all the gathered fields in fieldsPathsToNull
// for this nullReplacerConditionalConfig to fieldsToNull
try {
boolean evaluatedCondition = true;
//If it is empty we assume it is true.
if (!StringUtils.isEmpty(nullReplacerConditionalConfig.condition)) {
evaluatedCondition = nullConditionELEval.eval(nullConditionELVars, nullReplacerConditionalConfig.condition, Boolean.class);
}
if (evaluatedCondition) {
fieldsToNull.addAll(fieldPathsToNull);
}
} catch (ELEvalException e) {
LOG.error("Error evaluating condition: " + nullReplacerConditionalConfig.condition, e);
throw new OnRecordErrorException(record, Errors.VALUE_REPLACER_06, nullReplacerConditionalConfig.condition, e.toString());
}
}
return fieldsToNull;
} | java | {
"resource": ""
} |
q172209 | SystemPackage.isSystemClass | test | private static boolean isSystemClass(String name, List<String> packageList) {
boolean result = false;
if (packageList != null) {
String canonicalName = ClassLoaderUtil.canonicalizeClassOrResource(name);
for (String c : packageList) {
boolean shouldInclude = true;
if (c.startsWith("-")) {
c = c.substring(1);
shouldInclude = false;
}
if (canonicalName.startsWith(c)) {
if ( c.endsWith(".") // package
|| canonicalName.length() == c.length() // class
|| canonicalName.length() > c.length() // nested
&& canonicalName.charAt(c.length()) == '$' ) {
if (shouldInclude) {
result = true;
} else {
return false;
}
}
}
}
}
return result;
} | java | {
"resource": ""
} |
q172210 | XEvictingQueue.add | test | @Override public boolean add(E e) {
checkNotNull(e); // check before removing
if (maxSize == 0) {
return true;
}
if (size() == maxSize) {
delegate.remove();
}
delegate.add(e);
return true;
} | java | {
"resource": ""
} |
q172211 | XEvictingQueue.addAndGetEvicted | test | public E addAndGetEvicted(E e) {
checkNotNull(e); // check before removing
if (maxSize == 0) {
return null;
}
E evicted = null;
if (size() == maxSize) {
evicted = delegate.remove();
}
delegate.add(e);
return evicted;
} | java | {
"resource": ""
} |
q172212 | OptionalLoadingCache.valueOrDefault | test | private Optional<Value> valueOrDefault(Key key, Optional<Value> value) {
// If value is present simply return it
if(value.isPresent()) {
return value;
}
if(!cacheMissingValues) {
delegate.invalidate(key);
}
return defaultValue;
} | java | {
"resource": ""
} |
q172213 | PreviewPipelineRunner.addReportedErrorsIfNeeded | test | private List<StageOutput> addReportedErrorsIfNeeded(List<StageOutput> snapshotsOfAllStagesOutput) {
synchronized (this.reportedErrors) {
if(reportedErrors.isEmpty()) {
return snapshotsOfAllStagesOutput;
}
try {
return snapshotsOfAllStagesOutput.stream()
.map(so -> new StageOutput(
so.getInstanceName(),
so.getOutput(),
so.getErrorRecords(),
reportedErrors.get(so.getInstanceName()),
so.getEventRecords()
))
.collect(Collectors.toList());
} finally {
reportedErrors.clear();
}
}
} | java | {
"resource": ""
} |
q172214 | ProtobufTypeUtil.getDescriptor | test | public static Descriptors.Descriptor getDescriptor(
ProtoConfigurableEntity.Context context,
String protoDescriptorFile,
String messageType,
Map<String, Set<Descriptors.FieldDescriptor>> messageTypeToExtensionMap,
Map<String, Object> defaultValueMap
) throws StageException {
File descriptorFileHandle = new File(context.getResourcesDirectory(), protoDescriptorFile);
try(
FileInputStream fin = new FileInputStream(descriptorFileHandle);
) {
DescriptorProtos.FileDescriptorSet set = DescriptorProtos.FileDescriptorSet.parseFrom(fin);
// Iterate over all the file descriptor set computed above and cache dependencies and all encountered
// file descriptors
// this map holds all the dependencies that a given file descriptor has.
// This cached map will be looked up while building FileDescriptor instances
Map<String, Set<Descriptors.FileDescriptor>> fileDescriptorDependentsMap = new HashMap<>();
// All encountered FileDescriptor instances cached based on their name.
Map<String, Descriptors.FileDescriptor> fileDescriptorMap = new HashMap<>();
ProtobufTypeUtil.getAllFileDescriptors(set, fileDescriptorDependentsMap, fileDescriptorMap);
// Get the descriptor for the expected message type
Descriptors.Descriptor descriptor = ProtobufTypeUtil.getDescriptor(set, fileDescriptorMap, protoDescriptorFile, messageType);
// Compute and cache all extensions defined for each message type
ProtobufTypeUtil.populateDefaultsAndExtensions(fileDescriptorMap, messageTypeToExtensionMap, defaultValueMap);
return descriptor;
} catch (FileNotFoundException e) {
throw new StageException(Errors.PROTOBUF_06, descriptorFileHandle.getAbsolutePath(), e);
} catch (IOException e) {
throw new StageException(Errors.PROTOBUF_08, e.toString(), e);
}
} | java | {
"resource": ""
} |
q172215 | ProtobufTypeUtil.getAllFileDescriptors | test | public static void getAllFileDescriptors(
DescriptorProtos.FileDescriptorSet set,
Map<String, Set<Descriptors.FileDescriptor>> dependenciesMap,
Map<String, Descriptors.FileDescriptor> fileDescriptorMap
) throws StageException {
List<DescriptorProtos.FileDescriptorProto> fileList = set.getFileList();
try {
for (DescriptorProtos.FileDescriptorProto fdp : fileList) {
if (!fileDescriptorMap.containsKey(fdp.getName())) {
Set<Descriptors.FileDescriptor> dependencies = dependenciesMap.get(fdp.getName());
if (dependencies == null) {
dependencies = new LinkedHashSet<>();
dependenciesMap.put(fdp.getName(), dependencies);
dependencies.addAll(getDependencies(dependenciesMap, fileDescriptorMap, fdp, set));
}
Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom(
fdp,
dependencies.toArray(new Descriptors.FileDescriptor[dependencies.size()])
);
fileDescriptorMap.put(fdp.getName(), fileDescriptor);
}
}
} catch (Descriptors.DescriptorValidationException e) {
throw new StageException(Errors.PROTOBUF_07, e.getDescription(), e);
}
} | java | {
"resource": ""
} |
q172216 | ProtobufTypeUtil.populateDefaultsAndExtensions | test | public static void populateDefaultsAndExtensions(
Map<String, Descriptors.FileDescriptor> fileDescriptorMap,
Map<String, Set<Descriptors.FieldDescriptor>> typeToExtensionMap,
Map<String, Object> defaultValueMap
) {
for (Descriptors.FileDescriptor f : fileDescriptorMap.values()) {
// go over every file descriptor and look for extensions and default values of those extensions
for (Descriptors.FieldDescriptor fieldDescriptor : f.getExtensions()) {
String containingType = fieldDescriptor.getContainingType().getFullName();
Set<Descriptors.FieldDescriptor> fieldDescriptors = typeToExtensionMap.get(containingType);
if (fieldDescriptors == null) {
fieldDescriptors = new LinkedHashSet<>();
typeToExtensionMap.put(containingType, fieldDescriptors);
}
fieldDescriptors.add(fieldDescriptor);
if (fieldDescriptor.hasDefaultValue()) {
defaultValueMap.put(containingType + "." + fieldDescriptor.getName(), fieldDescriptor.getDefaultValue());
}
}
// go over messages within file descriptor and look for all fields and extensions and their defaults
for (Descriptors.Descriptor d : f.getMessageTypes()) {
addDefaultsAndExtensions(typeToExtensionMap, defaultValueMap, d);
}
}
} | java | {
"resource": ""
} |
q172217 | ProtobufTypeUtil.getDescriptor | test | public static Descriptors.Descriptor getDescriptor(
DescriptorProtos.FileDescriptorSet set,
Map<String, Descriptors.FileDescriptor> fileDescriptorMap,
String descriptorFile,
String qualifiedMessageType
) throws StageException {
// find the FileDescriptorProto which contains the message type
// IF cannot find, then bail out
String packageName = null;
String messageType = qualifiedMessageType;
int lastIndex = qualifiedMessageType.lastIndexOf('.');
if (lastIndex != -1) {
packageName = qualifiedMessageType.substring(0, lastIndex);
messageType = qualifiedMessageType.substring(lastIndex + 1);
}
DescriptorProtos.FileDescriptorProto file = getFileDescProtoForMsgType(packageName, messageType, set);
if (file == null) {
// could not find the message type from all the proto files contained in the descriptor file
throw new StageException(Errors.PROTOBUF_00, qualifiedMessageType, descriptorFile);
}
// finally get the FileDescriptor for the message type
Descriptors.FileDescriptor fileDescriptor = fileDescriptorMap.get(file.getName());
// create builder using the FileDescriptor
// this can only find the top level message types
return fileDescriptor.findMessageTypeByName(messageType);
} | java | {
"resource": ""
} |
q172218 | ProtobufTypeUtil.protobufToSdcField | test | public static Field protobufToSdcField(
Record record,
String fieldPath,
Descriptors.Descriptor descriptor,
Map<String, Set<Descriptors.FieldDescriptor>> messageTypeToExtensionMap,
Object message
) throws DataParserException {
LinkedHashMap<String, Field> sdcRecordMapFieldValue = new LinkedHashMap<>();
// get all the expected fields from the proto file
Map<String, Descriptors.FieldDescriptor> protobufFields = new LinkedHashMap<>();
for (Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields()) {
protobufFields.put(fieldDescriptor.getName(), fieldDescriptor);
}
// get all fields in the read message
Map<Descriptors.FieldDescriptor, Object> values = ((DynamicMessage) message).getAllFields();
// for every field present in the proto definition create an sdc field.
for (Descriptors.FieldDescriptor fieldDescriptor : protobufFields.values()) {
Object value = values.get(fieldDescriptor);
sdcRecordMapFieldValue.put(
fieldDescriptor.getName(),
createField(record, fieldPath, fieldDescriptor, messageTypeToExtensionMap, value)
);
}
// handle applicable extensions for this message type
if (messageTypeToExtensionMap.containsKey(descriptor.getFullName())) {
for (Descriptors.FieldDescriptor fieldDescriptor : messageTypeToExtensionMap.get(descriptor.getFullName())) {
if (values.containsKey(fieldDescriptor)) {
Object value = values.get(fieldDescriptor);
sdcRecordMapFieldValue.put(
fieldDescriptor.getName(),
createField(record, fieldPath, fieldDescriptor, messageTypeToExtensionMap, value)
);
}
}
}
// handle unknown fields
// unknown fields can go into the record header
UnknownFieldSet unknownFields = ((DynamicMessage) message).getUnknownFields();
if (!unknownFields.asMap().isEmpty()) {
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
try {
unknownFields.writeDelimitedTo(bOut);
bOut.flush();
bOut.close();
} catch (IOException e) {
throw new DataParserException(Errors.PROTOBUF_10, e.toString(), e);
}
String path = fieldPath.isEmpty() ? FORWARD_SLASH : fieldPath;
byte[] bytes = org.apache.commons.codec.binary.Base64.encodeBase64(bOut.toByteArray());
record.getHeader().setAttribute(PROTOBUF_UNKNOWN_FIELDS_PREFIX + path, new String(bytes, StandardCharsets.UTF_8));
}
return Field.createListMap(sdcRecordMapFieldValue);
} | java | {
"resource": ""
} |
q172219 | ProtobufTypeUtil.sdcFieldToProtobufMsg | test | public static DynamicMessage sdcFieldToProtobufMsg(
Record record,
Descriptors.Descriptor desc,
Map<String, Set<Descriptors.FieldDescriptor>> messageTypeToExtensionMap,
Map<String, Object> defaultValueMap
) throws DataGeneratorException {
return sdcFieldToProtobufMsg(record, record.get(), "", desc, messageTypeToExtensionMap, defaultValueMap);
} | java | {
"resource": ""
} |
q172220 | ProtobufTypeUtil.sdcFieldToProtobufMsg | test | private static DynamicMessage sdcFieldToProtobufMsg(
Record record,
Field field,
String fieldPath,
Descriptors.Descriptor desc,
Map<String, Set<Descriptors.FieldDescriptor>> messageTypeToExtensionMap,
Map<String, Object> defaultValueMap
) throws DataGeneratorException {
if (field == null) {
return null;
}
// compute all fields to look for including extensions
DynamicMessage.Builder builder = DynamicMessage.newBuilder(desc);
List<Descriptors.FieldDescriptor> fields = new ArrayList<>();
fields.addAll(desc.getFields());
if (messageTypeToExtensionMap.containsKey(desc.getFullName())) {
fields.addAll(messageTypeToExtensionMap.get(desc.getFullName()));
}
// root field is always a Map in a record representing protobuf data
Map<String, Field> valueAsMap = field.getValueAsMap();
for (Descriptors.FieldDescriptor f : fields) {
Field mapField = valueAsMap.get(f.getName());
// Repeated field
if (f.isMapField()) {
handleMapField(record, mapField, fieldPath, messageTypeToExtensionMap, defaultValueMap, f, builder);
} else if (f.isRepeated()) {
if (mapField != null) {
handleRepeatedField(
record,
mapField,
fieldPath,
messageTypeToExtensionMap,
defaultValueMap,
f,
builder
);
}
} else {
// non repeated field
handleNonRepeatedField(
record,
valueAsMap,
fieldPath,
messageTypeToExtensionMap,
defaultValueMap,
desc,
f,
builder
);
}
}
// if record has unknown fields for this field path, handle it
try {
handleUnknownFields(record, fieldPath, builder);
} catch (IOException e) {
throw new DataGeneratorException(Errors.PROTOBUF_05, e.toString(), e);
}
return builder.build();
} | java | {
"resource": ""
} |
q172221 | JerseyClientUtil.upgradeToJerseyConfigBean | test | public static void upgradeToJerseyConfigBean(List<Config> configs) {
List<Config> configsToAdd = new ArrayList<>();
List<Config> configsToRemove = new ArrayList<>();
List<String> movedConfigs = ImmutableList.of(
"conf.requestTimeoutMillis",
"conf.numThreads",
"conf.authType",
"conf.oauth",
"conf.basicAuth",
"conf.useProxy",
"conf.proxy",
"conf.sslConfig"
);
for (Config config : configs) {
if (hasPrefixIn(movedConfigs, config.getName())) {
configsToRemove.add(config);
configsToAdd.add(new Config(config.getName().replace("conf.", "conf.client."), config.getValue()));
}
}
configsToAdd.add(new Config("conf.client.transferEncoding", RequestEntityProcessing.CHUNKED));
configs.removeAll(configsToRemove);
configs.addAll(configsToAdd);
} | java | {
"resource": ""
} |
q172222 | KuduUtils.checkConnection | test | public static void checkConnection(AsyncKuduClient kuduClient,
Context context,
String KUDU_MASTER,
final List<Stage.ConfigIssue> issues
){
try {
kuduClient.getTablesList().join();
} catch (Exception ex) {
issues.add(
context.createConfigIssue(
Groups.KUDU.name(),
KuduLookupConfig.CONF_PREFIX + KUDU_MASTER ,
Errors.KUDU_00,
ex.toString(),
ex
)
);
}
} | java | {
"resource": ""
} |
q172223 | KuduUtils.convertFromKuduType | test | public static Field.Type convertFromKuduType(Type kuduType){
switch(kuduType) {
case BINARY: return Field.Type.BYTE_ARRAY;
case BOOL: return Field.Type.BOOLEAN;
case DOUBLE: return Field.Type.DOUBLE;
case FLOAT: return Field.Type.FLOAT;
case INT8: return Field.Type.BYTE;
case INT16: return Field.Type.SHORT;
case INT32: return Field.Type.INTEGER;
case INT64: return Field.Type.LONG;
case STRING: return Field.Type.STRING;
case UNIXTIME_MICROS: return Field.Type.DATETIME;
default:
if ("DECIMAL".equals(kuduType.name())) {
return Field.Type.DECIMAL;
}
throw new UnsupportedOperationException("Unknown data type: " + kuduType.getName());
}
} | java | {
"resource": ""
} |
q172224 | KuduUtils.createField | test | public static Field createField(RowResult result, String fieldName, Type type) throws StageException {
switch (type) {
case INT8:
return Field.create(Field.Type.BYTE, result.getByte(fieldName));
case INT16:
return Field.create(Field.Type.SHORT, result.getShort(fieldName));
case INT32:
return Field.create(Field.Type.INTEGER, result.getInt(fieldName));
case INT64:
return Field.create(Field.Type.LONG, result.getLong(fieldName));
case BINARY:
try {
return Field.create(Field.Type.BYTE_ARRAY, result.getBinary(fieldName));
} catch (IllegalArgumentException ex) {
throw new OnRecordErrorException(Errors.KUDU_35, fieldName);
}
case STRING:
return Field.create(Field.Type.STRING, result.getString(fieldName));
case BOOL:
return Field.create(Field.Type.BOOLEAN, result.getBoolean(fieldName));
case FLOAT:
return Field.create(Field.Type.FLOAT, result.getFloat(fieldName));
case DOUBLE:
return Field.create(Field.Type.DOUBLE, result.getDouble(fieldName));
case UNIXTIME_MICROS:
//UNIXTIME_MICROS is in microsecond
return Field.create(Field.Type.DATETIME, new Date(result.getLong(fieldName)/1000L));
default:
if ("DECIMAL".equals(type.name())) {
return Field.create(Field.Type.DECIMAL, result.getDecimal(fieldName));
}
throw new StageException(Errors.KUDU_10, fieldName, type.getName());
}
} | java | {
"resource": ""
} |
q172225 | FullPipeBatch.intercept | test | private List<Record> intercept(List<Record> records, List<? extends Interceptor> interceptors) throws StageException {
for(Interceptor interceptor : interceptors) {
records = interceptor.intercept(records);
}
return records;
} | java | {
"resource": ""
} |
q172226 | FileLine.getText | test | public String getText() {
if (line == null) {
line = new String(buffer, offsetInChunk, length, charset);
}
return line;
} | java | {
"resource": ""
} |
q172227 | ValidationUtil.addMissingConfigsToStage | test | public static void addMissingConfigsToStage(
StageLibraryTask stageLibrary,
StageConfiguration stageConf
) {
StageDefinition stageDef = stageLibrary.getStage(stageConf.getLibrary(), stageConf.getStageName(), false);
if (stageDef != null) {
for (ConfigDefinition configDef : stageDef.getConfigDefinitions()) {
String configName = configDef.getName();
Config config = stageConf.getConfig(configName);
if (config == null) {
Object defaultValue = configDef.getDefaultValue();
LOG.warn(
"Stage '{}' missing configuration '{}', adding with '{}' as default",
stageConf.getInstanceName(),
configName,
defaultValue
);
config = new Config(configName, defaultValue);
stageConf.addConfig(config);
}
}
}
} | java | {
"resource": ""
} |
q172228 | HttpProcessor.parseResponse | test | private Record parseResponse(InputStream response) throws StageException {
Record record = null;
if (conf.httpMethod == HttpMethod.HEAD) {
// Head will have no body so can't be parsed. Return an empty record.
record = getContext().createRecord("");
record.set(Field.create(new HashMap()));
} else if (response != null) {
try (DataParser parser = parserFactory.getParser("", response, "0")) {
// A response may only contain a single record, so we only parse it once.
record = parser.parse();
if (conf.dataFormat == DataFormat.TEXT) {
// Output is placed in a field "/text" so we remove it here.
record.set(record.get("/text"));
}
} catch (IOException | DataParserException e) {
errorRecordHandler.onError(Errors.HTTP_00, e.toString(), e);
}
}
return record;
} | java | {
"resource": ""
} |
q172229 | HttpProcessor.addResponseHeaders | test | private void addResponseHeaders(Record record, Response response) throws StageException {
if (conf.headerOutputLocation == HeaderOutputLocation.NONE) {
return;
}
Record.Header header = record.getHeader();
if (conf.headerOutputLocation == HeaderOutputLocation.FIELD) {
writeResponseHeaderToField(record, response);
} else if (conf.headerOutputLocation == HeaderOutputLocation.HEADER) {
writeResponseHeaderToRecordHeader(response, header);
}
} | java | {
"resource": ""
} |
q172230 | HttpProcessor.writeResponseHeaderToField | test | private void writeResponseHeaderToField(Record record, Response response) throws StageException {
if (record.has(conf.headerOutputField)) {
throw new StageException(Errors.HTTP_11, conf.headerOutputField);
}
Map<String, Field> headers = new HashMap<>(response.getStringHeaders().size());
for (Map.Entry<String, List<String>> entry : response.getStringHeaders().entrySet()) {
if (!entry.getValue().isEmpty()) {
String firstValue = entry.getValue().get(0);
headers.put(entry.getKey(), Field.create(firstValue));
}
}
record.set(conf.headerOutputField, Field.create(headers));
} | java | {
"resource": ""
} |
q172231 | HttpProcessor.writeResponseHeaderToRecordHeader | test | private void writeResponseHeaderToRecordHeader(Response response, Record.Header header) {
for (Map.Entry<String, List<String>> entry : response.getStringHeaders().entrySet()) {
if (!entry.getValue().isEmpty()) {
String firstValue = entry.getValue().get(0);
header.setAttribute(conf.headerAttributePrefix + entry.getKey(), firstValue);
}
}
} | java | {
"resource": ""
} |
q172232 | BlobStoreTaskImpl.saveMetadata | test | synchronized private void saveMetadata() throws StageException {
// 0) Validate pre-conditions
if(Files.exists(newMetadataFile)) {
throw new StageException(BlobStoreError.BLOB_STORE_0010);
}
// 1) New content is written into a new temporary file.
try (
OutputStream os = Files.newOutputStream(newMetadataFile, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)
) {
jsonMapper.writeValue(os, metadata);
} catch (IOException e) {
throw new StageException(BlobStoreError.BLOB_STORE_0001, e.toString(), e);
}
// 2) Old metadata is dropped
try {
if(Files.exists(metadataFile)) {
Files.delete(metadataFile);
}
} catch (IOException e) {
throw new StageException(BlobStoreError.BLOB_STORE_0011, e.toString(), e);
}
// 3) Rename from new to old is done
try {
Files.move(newMetadataFile, metadataFile);
} catch (IOException e) {
throw new StageException(BlobStoreError.BLOB_STORE_0012, e.toString(), e);
}
} | java | {
"resource": ""
} |
q172233 | HttpClientSource.configureClient | test | private void configureClient(List<ConfigIssue> issues) throws StageException {
clientCommon.init(issues, getContext());
if (issues.isEmpty()) {
client = clientCommon.getClient();
parserFactory = conf.dataFormatConfig.getParserFactory();
}
} | java | {
"resource": ""
} |
q172234 | HttpClientSource.parseHeadersOnly | test | String parseHeadersOnly(BatchMaker batchMaker) throws StageException {
HttpSourceOffset sourceOffset = new HttpSourceOffset(
getResolvedUrl(),
currentParameterHash,
System.currentTimeMillis(),
getCurrentPage()
);
Record record = getContext().createRecord(sourceOffset + "::0");
addResponseHeaders(record.getHeader());
record.set(Field.create(new HashMap()));
batchMaker.addRecord(record);
recordCount++;
incrementSourceOffset(sourceOffset, 1);
lastRequestCompletedTime = System.currentTimeMillis();
return sourceOffset.toString();
} | java | {
"resource": ""
} |
q172235 | HttpClientSource.incrementSourceOffset | test | private void incrementSourceOffset(HttpSourceOffset sourceOffset, int increment) {
if (conf.pagination.mode == PaginationMode.BY_PAGE) {
sourceOffset.incrementStartAt(1);
} else if (conf.pagination.mode == PaginationMode.BY_OFFSET) {
sourceOffset.incrementStartAt(increment);
}
} | java | {
"resource": ""
} |
q172236 | HttpClientSource.parsePaginatedResult | test | private int parsePaginatedResult(BatchMaker batchMaker, String sourceOffset, Record record) throws
StageException {
int numSubRecords = 0;
if (!record.has(conf.pagination.resultFieldPath)) {
final StageException stageException = new StageException(Errors.HTTP_12, conf.pagination.resultFieldPath);
LOG.error(stageException.getMessage());
throw stageException;
}
Field resultField = record.get(conf.pagination.resultFieldPath);
if (resultField.getType() != Field.Type.LIST) {
final StageException stageException = new StageException(Errors.HTTP_08, resultField.getType());
LOG.error(stageException.getMessage());
throw stageException;
}
List<Field> results = resultField.getValueAsList();
int subRecordIdx = 0;
for (Field result : results) {
Record r = getContext().createRecord(sourceOffset + "::" + subRecordIdx++);
if (conf.pagination.keepAllFields) {
r.set(record.get().clone());
r.set(conf.pagination.resultFieldPath, result);
} else {
r.set(result);
}
addResponseHeaders(r.getHeader());
batchMaker.addRecord(r);
++numSubRecords;
}
if (conf.pagination.mode != PaginationMode.LINK_FIELD) {
haveMorePages = numSubRecords > 0;
}
return numSubRecords;
} | java | {
"resource": ""
} |
q172237 | HttpClientSource.addResponseHeaders | test | private void addResponseHeaders(Record.Header header) {
final MultivaluedMap<String, String> headers = getResponse().getStringHeaders();
if (headers == null) {
return;
}
for (Map.Entry<String, List<String>> entry : headers.entrySet()) {
if (!entry.getValue().isEmpty()) {
String firstValue = entry.getValue().get(0);
header.setAttribute(entry.getKey(), firstValue);
}
}
} | java | {
"resource": ""
} |
q172238 | HttpClientSource.resolveHeaders | test | private MultivaluedMap<String, Object> resolveHeaders() throws StageException {
MultivaluedMap<String, Object> requestHeaders = new MultivaluedHashMap<>();
for (Map.Entry<String, String> entry : conf.headers.entrySet()) {
List<Object> header = new ArrayList<>(1);
Object resolvedValue = headerEval.eval(headerVars, entry.getValue(), String.class);
header.add(resolvedValue);
requestHeaders.put(entry.getKey(), header);
hasher.putString(entry.getKey(), Charset.forName(conf.dataFormatConfig.charset));
hasher.putString(entry.getValue(), Charset.forName(conf.dataFormatConfig.charset));
}
return requestHeaders;
} | java | {
"resource": ""
} |
q172239 | HttpClientSource.processResponse | test | private Optional<String> processResponse(long start, int maxRecords, BatchMaker batchMaker) throws
StageException {
Optional<String> newSourceOffset = Optional.empty();
if (getResponse() == null) {
return newSourceOffset;
}
// Response was not in the OK range, so treat as an error
int status = getResponse().getStatus();
if (status < 200 || status >= 300) {
lastRequestCompletedTime = System.currentTimeMillis();
String reason = getResponse().getStatusInfo().getReasonPhrase();
String respString = getResponse().readEntity(String.class);
getResponse().close();
setResponse(null);
final String errorMsg = reason + " : " + respString;
LOG.warn(Errors.HTTP_01.getMessage(), status, errorMsg);
errorRecordHandler.onError(Errors.HTTP_01, status, errorMsg);
return newSourceOffset;
}
if (conf.pagination.mode == PaginationMode.LINK_HEADER) {
next = getResponse().getLink("next");
if (next == null) {
haveMorePages = false;
}
}
if (getResponse().hasEntity()) {
newSourceOffset = Optional.of(parseResponse(start, maxRecords, batchMaker));
} else if (conf.httpMethod.getLabel() == "HEAD") {
// Handle HEAD only requests, which have no body, by creating a blank record for output with headers.
newSourceOffset = Optional.of(parseHeadersOnly(batchMaker));
}
return newSourceOffset;
} | java | {
"resource": ""
} |
q172240 | AvroConversionBaseMapper.propertyDefined | test | protected boolean propertyDefined(Configuration conf, String propertyName) {
String prop = conf.get(propertyName);
// String property will have default empty, integer -1, we'll skip both of them
return prop != null && !prop.isEmpty() && !prop.equals("-1");
} | java | {
"resource": ""
} |
q172241 | ShimUtil.convert | test | public static DataParserException convert(
com.streamsets.pipeline.lib.parser.DataParserException original
) {
if(original instanceof com.streamsets.pipeline.lib.parser.RecoverableDataParserException) {
return new RecoverableDataParserException(
((com.streamsets.pipeline.lib.parser.RecoverableDataParserException) original).getUnparsedRecord(),
original.getErrorCode(),
original.getParams()
);
}
return new DataParserException(
original.getErrorCode(),
original.getParams()
);
} | java | {
"resource": ""
} |
q172242 | ShimUtil.convert | test | public static DataGeneratorException convert(
com.streamsets.pipeline.lib.generator.DataGeneratorException original
) {
return new DataGeneratorException(
original.getErrorCode(),
original.getParams()
);
} | java | {
"resource": ""
} |
q172243 | GoogleCloudCredentialsConfig.getCredentials | test | private Credentials getCredentials(Stage.Context context, List<Stage.ConfigIssue> issues) {
Credentials credentials = null;
File credentialsFile;
if (Paths.get(path).isAbsolute()) {
credentialsFile = new File(path);
} else {
credentialsFile = new File(context.getResourcesDirectory(), path);
}
if (!credentialsFile.exists() || !credentialsFile.isFile()) {
LOG.error(GOOGLE_01.getMessage(), credentialsFile.getPath());
issues.add(context.createConfigIssue(
Groups.CREDENTIALS.name(), CONF_CREDENTIALS_CREDENTIALS_PROVIDER,
GOOGLE_01,
credentialsFile.getPath()
));
return null;
}
try (InputStream in = new FileInputStream(credentialsFile)) {
credentials = ServiceAccountCredentials.fromStream(in);
} catch (IOException | IllegalArgumentException e) {
LOG.error(GOOGLE_02.getMessage(), e);
issues.add(context.createConfigIssue(
Groups.CREDENTIALS.name(), CONF_CREDENTIALS_CREDENTIALS_PROVIDER,
GOOGLE_02
));
}
return credentials;
} | java | {
"resource": ""
} |
q172244 | PreviewApi.previewWithOverride | test | public PreviewInfoJson previewWithOverride (String pipelineId, List<StageOutputJson> stageOutputsToOverrideJson,
String rev, Integer batchSize, Integer batches, Boolean skipTargets,
String endStage, Long timeout) throws ApiException {
Object postBody = stageOutputsToOverrideJson;
byte[] postBinaryBody = null;
// verify the required parameter 'pipelineId' is set
if (pipelineId == null) {
throw new ApiException(400, "Missing the required parameter 'pipelineId' when calling previewWithOverride");
}
// verify the required parameter 'stageOutputsToOverrideJson' is set
if (stageOutputsToOverrideJson == null) {
throw new ApiException(400,
"Missing the required parameter 'stageOutputsToOverrideJson' when calling previewWithOverride");
}
// create path and map variables
String path = "/v1/pipeline/{pipelineId}/preview".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "pipelineId" + "\\}", apiClient.escapeString(pipelineId.toString()));
// query params
List<Pair> queryParams = new ArrayList<Pair>();
Map<String, String> headerParams = new HashMap<String, String>();
Map<String, Object> formParams = new HashMap<String, Object>();
queryParams.addAll(apiClient.parameterToPairs("", "rev", rev));
queryParams.addAll(apiClient.parameterToPairs("", "batchSize", batchSize));
queryParams.addAll(apiClient.parameterToPairs("", "batches", batches));
queryParams.addAll(apiClient.parameterToPairs("", "skipTargets", skipTargets));
queryParams.addAll(apiClient.parameterToPairs("", "endStage", endStage));
queryParams.addAll(apiClient.parameterToPairs("", "timeout", timeout));
final String[] accepts = {
"application/json"
};
final String accept = apiClient.selectHeaderAccept(accepts);
final String[] contentTypes = {
};
final String contentType = apiClient.selectHeaderContentType(contentTypes);
String[] authNames = new String[] { "basic" };
TypeRef returnType = new TypeRef<PreviewInfoJson>() {};
return apiClient.invokeAPI(path, "POST", queryParams, postBody, postBinaryBody, headerParams, formParams, accept, contentType, authNames, returnType);
} | java | {
"resource": ""
} |
q172245 | AbstractOverrunDelimitedReader.copyToBuffer | test | protected int copyToBuffer(StringBuilder s, int initialLen, int startChar, int currentChar) {
int overrun = 0;
int currentSize = s.length() - initialLen;
int readSize = currentChar - startChar;
if (maxLine > -1 && currentSize + readSize > maxLine) {
int adjustedReadSize = maxLine - currentSize;
if (adjustedReadSize > 0) {
s.append(cb, startChar, adjustedReadSize);
overrun = readSize - adjustedReadSize;
} else {
overrun = readSize;
}
} else {
s.append(cb, startChar, readSize);
}
return overrun;
} | java | {
"resource": ""
} |
q172246 | ApplicationPackage.removeLogicalDuplicates | test | static void removeLogicalDuplicates(SortedSet<String> packages) {
Iterator<String> iterator = packages.iterator();
if (!iterator.hasNext()) {
return;
}
String last = iterator.next();
while (iterator.hasNext()) {
String current = iterator.next();
if (current.startsWith(last)) {
iterator.remove();
} else {
last = current;
}
}
} | java | {
"resource": ""
} |
q172247 | HdfsTarget.emptyBatch | test | protected void emptyBatch() throws StageException {
setBatchTime();
try {
hdfsTargetConfigBean.getUGI().doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
hdfsTargetConfigBean.getCurrentWriters().purge();
if (hdfsTargetConfigBean.getLateWriters() != null) {
hdfsTargetConfigBean.getLateWriters().purge();
}
return null;
}
});
} catch (Exception ex) {
throw throwStageException(ex);
}
} | java | {
"resource": ""
} |
q172248 | MultiLineLiveFileReader.findNextMainLine | test | int findNextMainLine(LiveFileChunk chunk, int startIdx) {
List<FileLine> lines = chunk.getLines();
int found = -1;
for (int i = startIdx; found == -1 && i < lines.size(); i++) {
if (pattern.matcher(lines.get(i).getText().trim()).matches()) {
found = i;
}
}
return found;
} | java | {
"resource": ""
} |
q172249 | MultiLineLiveFileReader.resolveChunk | test | LiveFileChunk resolveChunk(LiveFileChunk chunk) {
List<FileLine> completeLines = new ArrayList<>();
List<FileLine> chunkLines = chunk.getLines();
if (incompleteMultiLine.length() == 0) {
incompleteMultiLineOffset = chunk.getOffset();
incompleteMultiLineTruncated = chunk.isTruncated();
}
incompleteMultiLineTruncated |= chunk.isTruncated();
int pos = 0;
int idx = findNextMainLine(chunk, pos);
// while we have main lines we keep adding/compacting into the new chunk
while (idx > -1) {
//any multi lines up to the next main line belong to the previous main line
for (int i = pos; i < idx; i++) {
incompleteMultiLine.append(chunkLines.get(i).getText());
}
// if we have incomplete lines, at this point they are a complete multiline, compact and add to new chunk lines
if (incompleteMultiLine.length() != 0) {
completeLines.add(new FileLine(incompleteMultiLineOffset, incompleteMultiLine.toString()));
incompleteMultiLineOffset += incompleteMultiLine.length();
// clear the incomplete multi lines as we just used them to create a full line
incompleteMultiLine.setLength(0);
incompleteMultiLineTruncated = false;
}
// add the current main line as incomplete as we still don't if it is a complete line
incompleteMultiLine.append(chunkLines.get(idx).getText());
// find the next main line
pos = idx + 1;
idx = findNextMainLine(chunk, pos);
}
// lets process the left over multi lines in the chunk after the last main line.
// if any they will kept to completed with lines from the next chunk.
for (int i = pos; i < chunkLines.size(); i++) {
incompleteMultiLine.append(chunkLines.get(i).getText());
}
if (completeLines.isEmpty()) {
// didn't get a complete multi line yet, we keep storing lines but return a null chunk
chunk = null;
} else {
// create a new chunk with all complete multi lines
chunk = new LiveFileChunk(chunk.getTag(), chunk.getFile(), chunk.getCharset(), completeLines,
incompleteMultiLineTruncated);
}
return chunk;
} | java | {
"resource": ""
} |
q172250 | ConnectionManager.closeConnection | test | public void closeConnection() {
LOGGER.debug("Closing connection");
Connection connectionToRemove = threadLocalConnection.get();
jdbcUtil.closeQuietly(connectionToRemove);
if (connectionToRemove != null) {
synchronized (this) {
connectionsToCloseDuringDestroy.remove(connectionToRemove);
}
}
threadLocalConnection.set(null);
} | java | {
"resource": ""
} |
q172251 | FieldRenamerProcessor.escapeQuotedSubstring | test | private static String escapeQuotedSubstring(String input) {
String[] parts = input.split("'");
StringBuilder output = new StringBuilder(input.length() * 2);
for (int i = 0; i < parts.length; i++) {
if ((i % 2) == 1) {
output.append("'").append(parts[i].replace("|", "\\|")).append("'");
} else {
output.append(parts[i]);
}
}
return output.toString();
} | java | {
"resource": ""
} |
q172252 | PubSubSource.getFlowControlSettings | test | private FlowControlSettings getFlowControlSettings() {
return FlowControlSettings.newBuilder()
.setLimitExceededBehavior(FlowController.LimitExceededBehavior.Block)
.setMaxOutstandingElementCount((long) conf.basic.maxBatchSize * conf.maxThreads / conf.advanced.numSubscribers)
.build();
} | java | {
"resource": ""
} |
q172253 | PubSubSource.getChannelProvider | test | private InstantiatingGrpcChannelProvider getChannelProvider() {
return SubscriptionAdminSettings
.defaultGrpcTransportProviderBuilder()
.setMaxInboundMessageSize(MAX_INBOUND_MESSAGE_SIZE)
.setEndpoint(Strings.isNullOrEmpty(conf.advanced.customEndpoint) ? SubscriptionAdminSettings
.getDefaultEndpoint() : conf.advanced.customEndpoint)
.build();
} | java | {
"resource": ""
} |
q172254 | Producer.put | test | public Object put(OffsetAndResult<Map.Entry> batch) {
if (consumerError != null) {
throw new RuntimeException(Utils.format("Consumer encountered error: {}", consumerError), consumerError);
}
if (producerError != null) {
throw new RuntimeException(Utils.format("Producer encountered error: {}", producerError), producerError);
}
try {
Object expectedOffset = "EMPTY_BATCH";
if (!batch.getResult().isEmpty()) {
expectedOffset = batch.getResult().get(batch.getResult().size() - 1).getKey(); // get the last one
}
while (!dataChannel.offer(batch, 10, TimeUnit.MILLISECONDS)) {
for (ControlChannel.Message controlMessage : controlChannel.getProducerMessages()) {
switch (controlMessage.getType()) {
case CONSUMER_ERROR:
Throwable throwable = (Throwable) controlMessage.getPayload();
consumerError = throwable;
throw new ConsumerRuntimeException(Utils.format("Consumer encountered error: {}", throwable), throwable);
default:
String msg = Utils.format("Illegal control message type: '{}'", controlMessage.getType());
throw new IllegalStateException(msg);
}
}
}
return expectedOffset;
} catch (Throwable throwable) {
controlChannel.producerComplete();
if (!(throwable instanceof ConsumerRuntimeException)) {
String msg = "Error caught in producer: " + throwable;
LOG.error(msg, throwable);
controlChannel.producerError(throwable);
if (producerError == null) {
producerError = throwable;
}
}
throw Throwables.propagate(throwable);
}
} | java | {
"resource": ""
} |
q172255 | GrokDictionary.compileExpression | test | public Grok compileExpression(final String expression) {
throwErrorIfDictionaryIsNotReady();
final String digestedExpression = digestExpressionAux(expression);
logger.debug("Digested [" + expression + "] into [" + digestedExpression + "] before compilation");
return new Grok(Pattern.compile(digestedExpression));
} | java | {
"resource": ""
} |
q172256 | GrokDictionary.digestExpressionAux | test | private String digestExpressionAux(String originalExpression) {
final String PATTERN_START = "%{";
final String PATTERN_STOP = "}";
final char PATTERN_DELIMITER = ':';
while(true) {
int PATTERN_START_INDEX = originalExpression.indexOf(PATTERN_START);
int PATTERN_STOP_INDEX = originalExpression.indexOf(PATTERN_STOP, PATTERN_START_INDEX + PATTERN_START.length());
// End the loop is %{ or } is not in the current line
if (PATTERN_START_INDEX < 0 || PATTERN_STOP_INDEX < 0) {
break;
}
// Grab what's inside %{ }
String grokPattern = originalExpression.substring(PATTERN_START_INDEX + PATTERN_START.length(), PATTERN_STOP_INDEX);
// Where is the : character
int PATTERN_DELIMITER_INDEX = grokPattern.indexOf(PATTERN_DELIMITER);
String regexName = grokPattern;
String groupName = null;
if (PATTERN_DELIMITER_INDEX >= 0) {
regexName = grokPattern.substring(0, PATTERN_DELIMITER_INDEX);
groupName = grokPattern.substring(PATTERN_DELIMITER_INDEX + 1, grokPattern.length());
}
final String dictionaryValue = regexDictionary.get(regexName);
if (dictionaryValue == null) {
throw new GrokCompilationException("Missing value for regex name : " + regexName);
}
// Defer till next iteration
if (dictionaryValue.contains(PATTERN_START)) {
break;
}
String replacement = dictionaryValue;
// Named capture group
if (null != groupName) {
replacement = "(?<" + groupName + ">" + dictionaryValue + ")";
}
originalExpression = new StringBuilder(originalExpression).replace(PATTERN_START_INDEX, PATTERN_STOP_INDEX + PATTERN_STOP.length(), replacement).toString();
}
return originalExpression;
} | java | {
"resource": ""
} |
q172257 | GrokDictionary.addDictionary | test | public void addDictionary(final InputStream inputStream) {
try {
addDictionaryAux(new InputStreamReader(inputStream, "UTF-8"));
} catch (IOException e) {
throw new GrokCompilationException(e);
}
} | java | {
"resource": ""
} |
q172258 | GrokDictionary.addDictionary | test | public void addDictionary(Reader reader) {
try {
addDictionaryAux(reader);
} catch (IOException e) {
throw new GrokCompilationException(e);
} finally {
IOUtils.closeQuietly(reader);
}
} | java | {
"resource": ""
} |
q172259 | AggregatorDataProvider.stop | test | public Map<Aggregator, AggregatorData> stop() {
Utils.checkState(started, "Not started");
Utils.checkState(!stopped, "Already stopped");
stopped = true;
long currentTimeMillis = System.currentTimeMillis();
for(Map.Entry<Aggregator, AggregatorData> e : data.entrySet()) {
e.getValue().setTime(currentTimeMillis);
}
Map<Aggregator, AggregatorData> result = data;
result = aggregateDataWindows(result);
return result;
} | java | {
"resource": ""
} |
q172260 | AggregatorDataProvider.roll | test | public Map<Aggregator, AggregatorData> roll(long newDataWindowEndTimeMillis) {
Utils.checkState(started, "Not started");
Utils.checkState(!stopped, "Already stopped");
Map<Aggregator, AggregatorData> result = data;
Map<Aggregator, AggregatorData> newData = new ConcurrentHashMap<>();
for (Aggregator aggregator : aggregators) {
newData.put(aggregator, aggregator.createAggregatorData(newDataWindowEndTimeMillis));
}
data = newData;
Map<Aggregator, AggregatorData> oldData = result;
// In case of sliding window, aggregate the data windows to get the result
result = aggregateDataWindows(result);
if (currentDataWindow != null) {
currentDataWindow.setDataAndClose(oldData);
}
DataWindow newDataWindow = createDataWindow(newDataWindowEndTimeMillis);
synchronized (dataWindowQueue) {
dataWindowQueue.add(newDataWindow);
dataWindowList = new ArrayList<>(dataWindowQueue);
}
currentDataWindow = newDataWindow;
return result;
} | java | {
"resource": ""
} |
q172261 | JSON.serialize | test | public String serialize(Object obj) throws ApiException {
try {
if (obj != null)
return mapper.writeValueAsString(obj);
else
return null;
} catch (Exception e) {
throw new ApiException(400, e.getMessage());
}
} | java | {
"resource": ""
} |
q172262 | JSON.deserialize | test | public <T> T deserialize(String body, TypeRef returnType) throws ApiException {
JavaType javaType = mapper.constructType(returnType.getType());
try {
return mapper.readValue(body, javaType);
} catch (IOException e) {
if (returnType.getType().equals(String.class))
return (T) body;
else
throw new ApiException(500, e.getMessage(), null, body);
}
} | java | {
"resource": ""
} |
q172263 | JSON.deserialize | test | public <T> T deserialize(File file, TypeRef returnType) throws ApiException {
JavaType javaType = mapper.constructType(returnType.getType());
try {
return mapper.readValue(file, javaType);
} catch (IOException e) {
throw new ApiException(500, e.getMessage(), null, "File to read file");
}
} | java | {
"resource": ""
} |
q172264 | SampleTarget.write | test | private void write(Record record) throws OnRecordErrorException {
// This is a contrived example, normally you may be performing an operation that could throw
// an exception or produce an error condition. In that case you can throw an OnRecordErrorException
// to send this record to the error pipeline with some details.
if (!record.has("/someField")) {
throw new OnRecordErrorException(Errors.SAMPLE_01, record, "exception detail message.");
}
// TODO: write the records to your final destination
} | java | {
"resource": ""
} |
q172265 | HiveMetastoreUtil.resolveEL | test | public static String resolveEL(ELEval elEval, ELVars variables, String val) throws ELEvalException {
return elEval.eval(variables, val, String.class);
} | java | {
"resource": ""
} |
q172266 | HiveMetastoreUtil.generateInnerFieldFromTheList | test | private static <T> Field generateInnerFieldFromTheList(
LinkedHashMap<String, T> original,
String innerPairFirstFieldName,
String innerPairSecondFieldName,
boolean isSecondFieldHiveType
) throws HiveStageCheckedException {
List<Field> columnList = new LinkedList<>();
for(Map.Entry<String,T> pair: original.entrySet()) {
LinkedHashMap<String, Field> entry = new LinkedHashMap<>();
entry.put(innerPairFirstFieldName, Field.create(pair.getKey()));
if (isSecondFieldHiveType){
HiveTypeInfo hiveTypeInfo = (HiveTypeInfo) pair.getValue();
entry.put(
innerPairSecondFieldName,
hiveTypeInfo.getHiveType().getSupport().generateHiveTypeInfoFieldForMetadataRecord(hiveTypeInfo)
);
} else {
entry.put(innerPairSecondFieldName, Field.create(pair.getValue().toString())); //stored value is "INT". need to fix this
}
columnList.add(Field.createListMap(entry));
}
return !columnList.isEmpty() ? Field.create(columnList) : null;
} | java | {
"resource": ""
} |
q172267 | HiveMetastoreUtil.getTableName | test | public static String getTableName(Record metadataRecord) throws HiveStageCheckedException {
if (metadataRecord.has(SEP + TABLE_FIELD)) {
return metadataRecord.get(SEP + TABLE_FIELD).getValueAsString();
}
throw new HiveStageCheckedException(Errors.HIVE_17, TABLE_FIELD, metadataRecord);
} | java | {
"resource": ""
} |
q172268 | HiveMetastoreUtil.getDatabaseName | test | public static String getDatabaseName(Record metadataRecord) throws HiveStageCheckedException {
if (metadataRecord.has(SEP + DATABASE_FIELD)) {
String dbName = metadataRecord.get(SEP + DATABASE_FIELD).getValueAsString();
return dbName.isEmpty()? DEFAULT_DBNAME : dbName;
}
throw new HiveStageCheckedException(Errors.HIVE_17, DATABASE_FIELD, metadataRecord);
} | java | {
"resource": ""
} |
q172269 | HiveMetastoreUtil.getInternalField | test | public static boolean getInternalField(Record metadataRecord) throws HiveStageCheckedException{
if (metadataRecord.has(SEP + INTERNAL_FIELD)) {
return metadataRecord.get(SEP + INTERNAL_FIELD).getValueAsBoolean();
}
throw new HiveStageCheckedException(Errors.HIVE_17, INTERNAL_FIELD, metadataRecord);
} | java | {
"resource": ""
} |
q172270 | HiveMetastoreUtil.getLocation | test | public static String getLocation(Record metadataRecord) throws HiveStageCheckedException{
if (metadataRecord.has(SEP + LOCATION_FIELD)) {
return metadataRecord.get(SEP + LOCATION_FIELD).getValueAsString();
}
throw new HiveStageCheckedException(Errors.HIVE_17, LOCATION_FIELD, metadataRecord);
} | java | {
"resource": ""
} |
q172271 | HiveMetastoreUtil.getCustomLocation | test | public static boolean getCustomLocation(Record metadataRecord) throws HiveStageCheckedException{
if (metadataRecord.get(SEP + VERSION).getValueAsInteger() < 3) {
return DEFAULT_CUSTOM_LOCATION;
}
if (metadataRecord.has(SEP + CUSTOM_LOCATION)) {
return metadataRecord.get(SEP + CUSTOM_LOCATION).getValueAsBoolean();
}
throw new HiveStageCheckedException(Errors.HIVE_17, CUSTOM_LOCATION, metadataRecord);
} | java | {
"resource": ""
} |
q172272 | HiveMetastoreUtil.getAvroSchema | test | public static String getAvroSchema(Record metadataRecord) throws HiveStageCheckedException{
if (metadataRecord.has(SEP + AVRO_SCHEMA)) {
return metadataRecord.get(SEP + AVRO_SCHEMA).getValueAsString();
}
throw new HiveStageCheckedException(Errors.HIVE_17, AVRO_SCHEMA, metadataRecord);
} | java | {
"resource": ""
} |
q172273 | HiveMetastoreUtil.getDataFormat | test | public static String getDataFormat(Record metadataRecord) throws HiveStageCheckedException {
if (metadataRecord.get(SEP + VERSION).getValueAsInteger() == 1) {
return DEFAULT_DATA_FORMAT;
}
if (metadataRecord.has(SEP + DATA_FORMAT)) {
return metadataRecord.get(SEP + DATA_FORMAT).getValueAsString();
}
throw new HiveStageCheckedException(Errors.HIVE_17, DATA_FORMAT, metadataRecord);
} | java | {
"resource": ""
} |
q172274 | HiveMetastoreUtil.newSchemaMetadataFieldBuilder | test | public static Field newSchemaMetadataFieldBuilder (
String database,
String tableName,
LinkedHashMap<String, HiveTypeInfo> columnList,
LinkedHashMap<String, HiveTypeInfo> partitionTypeList,
boolean internal,
String location,
String avroSchema,
HMPDataFormat dataFormat
) throws HiveStageCheckedException {
LinkedHashMap<String, Field> metadata = new LinkedHashMap<>();
metadata.put(VERSION, Field.create(SCHEMA_CHANGE_METADATA_RECORD_VERSION));
metadata.put(METADATA_RECORD_TYPE, Field.create(MetadataRecordType.TABLE.name()));
metadata.put(DATABASE_FIELD, Field.create(database));
metadata.put(TABLE_FIELD, Field.create(tableName));
metadata.put(LOCATION_FIELD, Field.create(location));
metadata.put(DATA_FORMAT, Field.create(dataFormat.name()));
//fill in column type list here
metadata.put(
COLUMNS_FIELD,
generateInnerFieldFromTheList(
columnList,
COLUMN_NAME,
TYPE_INFO,
true
)
);
//fill in partition type list here
if (partitionTypeList != null && !partitionTypeList.isEmpty()) {
metadata.put(
PARTITION_FIELD,
generateInnerFieldFromTheList(
partitionTypeList,
PARTITION_NAME,
TYPE_INFO,
true
)
);
}
metadata.put(INTERNAL_FIELD, Field.create(internal));
metadata.put(AVRO_SCHEMA, Field.create(avroSchema));
return Field.createListMap(metadata);
} | java | {
"resource": ""
} |
q172275 | HiveMetastoreUtil.validatePartitionInformation | test | public static void validatePartitionInformation(
TypeInfoCacheSupport.TypeInfo typeInfo,
LinkedHashMap<String, String> partitionValMap,
String qualifiedTableName
) throws HiveStageCheckedException {
Set<String> partitionNamesInHive = typeInfo.getPartitionTypeInfo().keySet();
Set<String> partitionNames = partitionValMap.keySet();
if (!(partitionNamesInHive.size() == partitionNames.size()
&& partitionNamesInHive.containsAll(partitionNames))) {
LOG.error(Utils.format(
"Partition mismatch. In Hive: {}, In Record : {}",
partitionNamesInHive.size(),
partitionNames.size())
);
throw new HiveStageCheckedException(Errors.HIVE_27, qualifiedTableName);
}
} | java | {
"resource": ""
} |
q172276 | HiveMetastoreUtil.generatePartitionPath | test | public static String generatePartitionPath(LinkedHashMap<String, String> partitions) {
StringBuilder builder = new StringBuilder();
for(Map.Entry<String, String> pair: partitions.entrySet()) {
builder.append(String.format(PARTITION_PATH, pair.getKey(), pair.getValue()));
}
return builder.toString();
} | java | {
"resource": ""
} |
q172277 | HiveMetastoreUtil.serializeSchemaToHDFS | test | public static String serializeSchemaToHDFS(
UserGroupInformation loginUGI,
final FileSystem fs,
final String location,
final String schemaFolder,
final String databaseName,
final String tableName,
final String schemaJson
) throws StageException {
String folderLocation;
if (schemaFolder.startsWith(SEP)) {
folderLocation = schemaFolder;
} else {
folderLocation = location + SEP + schemaFolder;
}
final Path schemasFolderPath = new Path(folderLocation);
final String path = folderLocation + SEP + String.format(
AVRO_SCHEMA_FILE_FORMAT,
databaseName,
tableName,
UUID.randomUUID().toString()
);
try {
loginUGI.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception{
if (!fs.exists(schemasFolderPath)) {
fs.mkdirs(schemasFolderPath);
}
Path schemaFilePath = new Path(path);
//This will never happen unless two HMS targets are writing, we will error out for this
//and let user handle this via error record handling.
if (!fs.exists(schemaFilePath)) {
try (FSDataOutputStream os = fs.create(schemaFilePath)) {
byte []schemaBytes = schemaJson.getBytes("UTF-8");
os.write(schemaBytes, 0, schemaBytes.length);
}
} else {
LOG.error(Utils.format("Already schema file {} exists in HDFS", path));
throw new IOException("Already schema file exists");
}
return null;
}
});
} catch (Exception e) {
LOG.error("Error in Writing Schema to HDFS: " + e.toString(), e);
throw new StageException(Errors.HIVE_18, path, e.getMessage());
}
return path;
} | java | {
"resource": ""
} |
q172278 | JdbcGenericRecordWriter.setParameters | test | @VisibleForTesting
@SuppressWarnings("unchecked")
int setParameters(
int opCode,
SortedMap<String, String> columnsToParameters,
final Record record,
final Connection connection,
PreparedStatement statement
) throws OnRecordErrorException {
int paramIdx = 1;
// Set columns and their value in query. No need to perform this for delete operation.
if(opCode != OperationType.DELETE_CODE) {
paramIdx = setParamsToStatement(paramIdx, statement, columnsToParameters, record, connection, opCode);
}
// Set primary keys in WHERE clause for update and delete operations
if(opCode != OperationType.INSERT_CODE){
paramIdx = setPrimaryKeys(paramIdx, record, statement, opCode);
}
return paramIdx;
} | java | {
"resource": ""
} |
q172279 | SystemApi.getConfiguration | test | public Map<String, Object> getConfiguration () throws ApiException {
Object postBody = null;
byte[] postBinaryBody = null;
// create path and map variables
String path = "/v1/system/configuration".replaceAll("\\{format\\}","json");
// query params
List<Pair> queryParams = new ArrayList<Pair>();
Map<String, String> headerParams = new HashMap<String, String>();
Map<String, Object> formParams = new HashMap<String, Object>();
final String[] accepts = {
"application/json"
};
final String accept = apiClient.selectHeaderAccept(accepts);
final String[] contentTypes = {
};
final String contentType = apiClient.selectHeaderContentType(contentTypes);
String[] authNames = new String[] { "basic" };
TypeRef returnType = new TypeRef<Map<String, Object>>() {};
return apiClient.invokeAPI(path, "GET", queryParams, postBody, postBinaryBody, headerParams, formParams, accept,
contentType, authNames, returnType);
} | java | {
"resource": ""
} |
q172280 | SyslogDecoder.parseRfc3164Time | test | public static long parseRfc3164Time(String ts) throws OnRecordErrorException {
LocalDateTime now = LocalDateTime.now();
int year = now.getYear();
ts = TWO_SPACES.matcher(ts).replaceFirst(" ");
LocalDateTime date;
try {
MonthDay monthDay = MonthDay.parse(ts, rfc3164Format);
LocalTime time = LocalTime.parse(ts, rfc3164Format);
// this is overly complicated because of the way Java 8 Time API works, as compared to Joda
// essentially, we just want to pull year out of "now" and set all other fields based on
// what was parsed
date = now;
// zero out millis since we aren't actually parsing those
date = date.with(ChronoField.MILLI_OF_SECOND, 0);
// set month and day of month from parsed
date = date.withMonth(monthDay.getMonthValue()).withDayOfMonth(monthDay.getDayOfMonth());
// set time fields from parsed
date = date.withHour(time.getHour()).withMinute(time.getMinute()).withSecond(time.getSecond());
} catch (DateTimeParseException e) {
throw new OnRecordErrorException(Errors.SYSLOG_10, ts, e);
}
// The RFC3164 is a bit weird date format - it contains day and month, but no year. So we have to somehow guess
// the year. The current logic is to provide a sliding window - going 11 months to the past and 1 month to the
// future. If the message is outside of this window, it will have incorrectly guessed year. We go 11 months to the
// past as we're expecting that more messages will be from the past (syslog usually contains historical data).
LocalDateTime fixed = date;
if (fixed.isAfter(now) && fixed.minusMonths(1).isAfter(now)) {
fixed = date.withYear(year - 1);
} else if (fixed.isBefore(now) && fixed.plusMonths(11).isBefore(now)) {
fixed = date.withYear(year + 1);
}
date = fixed;
return date.toInstant(ZoneOffset.UTC).toEpochMilli();
} | java | {
"resource": ""
} |
q172281 | Utils.format | test | public static String format(String template, Object... args) {
String[] templateArr = TEMPLATES.get(template);
if (templateArr == null) {
// we may have a race condition here but the end result is idempotent
templateArr = prepareTemplate(template);
TEMPLATES.put(template, templateArr);
}
StringBuilder sb = new StringBuilder(template.length() * 2);
for (int i = 0; i < templateArr.length; i++) {
sb.append(templateArr[i]);
if (args != null && (i < templateArr.length - 1)) {
sb.append((i < args.length) ? args[i] : TOKEN);
}
}
return sb.toString();
} | java | {
"resource": ""
} |
q172282 | StageRuntime.startBatch | test | @Override
public final BatchContext startBatch() {
return (BatchContext) AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
try {
Thread.currentThread().setContextClassLoader(mainClassLoader);
return pushSourceContextDelegate.startBatch();
} finally {
Thread.currentThread().setContextClassLoader(getDefinition().getStageClassLoader());
}
}
});
} | java | {
"resource": ""
} |
q172283 | SolrTarget.checkRecordContainsSolrFields | test | private boolean checkRecordContainsSolrFields(
Map<String, Field> recordFieldMap,
Record record,
List<String> solrFieldsMap,
Errors errorToThrow
) throws StageException {
// for (Map.Entry<String, Field> recordFieldMapEntry : recordFieldMap.entrySet())
List<String> fieldsFound = new ArrayList<>();
recordFieldMap.keySet().forEach(recordFieldKey -> {
if (solrFieldsMap.contains(recordFieldKey)) {
fieldsFound.add(recordFieldKey);
}
});
// if record does not contain solr fields then process error accordingly
if (solrFieldsMap.size() != fieldsFound.size()) {
Set<String> missingFields = new HashSet<>();
solrFieldsMap.forEach(requiredField -> {
if (!fieldsFound.contains(requiredField)) {
missingFields.add(requiredField);
}
});
handleError(record, errorToThrow, Joiner.on(",").join(missingFields));
return false;
}
return true;
} | java | {
"resource": ""
} |
q172284 | SolrTarget.filterAutogeneratedFieldNames | test | private List<String> filterAutogeneratedFieldNames(List<String> fieldNames) {
List<String> result = new ArrayList<>();
fieldNames.forEach(name -> {
if (!autogeneratedFieldNamesMap.contains(name)) {
result.add(name);
}
});
return result;
} | java | {
"resource": ""
} |
q172285 | SolrTarget.sendOnRecordErrorExceptionToHandler | test | private void sendOnRecordErrorExceptionToHandler(Record record, Errors error, StageException ex)
throws StageException {
errorRecordHandler.onError(new OnRecordErrorException(
record,
error,
record.getHeader().getSourceId(),
ex.toString(),
ex
));
} | java | {
"resource": ""
} |
q172286 | DependencyParser.parseJarName | test | public static Optional<Dependency> parseJarName(String sourceName, String jarName) {
if(SPECIAL_CASES.containsKey(jarName)) {
Dependency specialCase = SPECIAL_CASES.get(jarName);
return Optional.of(new Dependency(sourceName, specialCase.getName(), specialCase.getVersion()));
}
// Go over all known patterns
for(Pattern p : PATTERNS) {
Matcher m = p.matcher(jarName);
if (m.matches()) {
LOG.trace("Applied pattern '{}' to {}", p.pattern(), jarName);
return Optional.of(new Dependency(sourceName, m.group(1), m.group(2)));
}
}
// Otherwise this jar name is unknown to us
return Optional.empty();
} | java | {
"resource": ""
} |
q172287 | DependencyParser.parseURL | test | public static Optional<Dependency> parseURL(URL url) {
return parseJarName(url.toString(), Paths.get(url.getPath()).getFileName().toString());
} | java | {
"resource": ""
} |
q172288 | CouchbaseConnector.getInstance | test | public static synchronized CouchbaseConnector getInstance(BaseCouchbaseConfig config, List<Stage.ConfigIssue> issues, Stage.Context context) {
Map<String, Object> runnerSharedMap = context.getStageRunnerSharedMap();
if(runnerSharedMap.containsKey(INSTANCE)) {
LOG.debug("Using existing instance of CouchbaseConnector");
} else {
LOG.debug("CouchbaseConnector not yet instantiated. Creating new instance");
validateConfig(config, issues, context);
if(issues.isEmpty()) {
runnerSharedMap.put(INSTANCE, new CouchbaseConnector(config, issues, context));
}
}
return (CouchbaseConnector) runnerSharedMap.get(INSTANCE);
} | java | {
"resource": ""
} |
q172289 | CouchbaseConnector.close | test | public synchronized void close() {
if(!isClosed) {
if(bucket != null) {
LOG.debug("Closing Couchbase bucket");
bucket.close();
}
if(cluster != null) {
LOG.debug("Disconnecting Couchbase cluster");
cluster.disconnect();
}
if(env != null) {
LOG.debug("Shutting down Couchbase environment");
env.shutdown();
}
// Explicitly shutdown the RxJava scheduler threads. Not doing so will leak threads when a pipeline stops.
// Note: this disallows restarting scheduler threads without also explicitly calling Schedulers.start()
// LOG.debug("Stopping RxJava schedulers");
// Schedulers.shutdown();
isClosed = true;
}
} | java | {
"resource": ""
} |
q172290 | CouchbaseConnector.validateConfig | test | private static void validateConfig(BaseCouchbaseConfig config, List<Stage.ConfigIssue> issues, Stage.Context context){
if(config.couchbase.nodes == null) {
issues.add(context.createConfigIssue(Groups.COUCHBASE.name(), "config.couchbase.nodes", Errors.COUCHBASE_29));
}
if(config.couchbase.kvTimeout < 0) {
issues.add(context.createConfigIssue(Groups.COUCHBASE.name(), "config.couchbase.kvTimeout", Errors.COUCHBASE_30));
}
if(config.couchbase.connectTimeout < 0) {
issues.add(context.createConfigIssue(Groups.COUCHBASE.name(), "config.couchbase.connectTimeout", Errors.COUCHBASE_31));
}
if(config.couchbase.disconnectTimeout < 0) {
issues.add(context.createConfigIssue(Groups.COUCHBASE.name(), "config.couchbase.disconnectTimeout", Errors.COUCHBASE_32));
}
if(config.couchbase.tls.tlsEnabled) {
config.couchbase.tls.init(context, Groups.COUCHBASE.name(), "config.couchbase.tls.", issues);
}
if(config.credentials.version == null) {
issues.add(context.createConfigIssue(Groups.CREDENTIALS.name(), "config.credentials.version", Errors.COUCHBASE_33));
}
if(config.credentials.version == AuthenticationType.USER) {
if(config.credentials.userName == null) {
issues.add(context.createConfigIssue(Groups.CREDENTIALS.name(), "config.credentials.userName", Errors.COUCHBASE_34));
}
if(config.credentials.userPassword == null) {
issues.add(context.createConfigIssue(Groups.CREDENTIALS.name(), "config.credentials.userPassword", Errors.COUCHBASE_35));
}
}
} | java | {
"resource": ""
} |
q172291 | OperationType.getLabelFromStringCode | test | public static String getLabelFromStringCode(String code) throws NumberFormatException {
try {
int intCode = Integer.parseInt(code);
return getLabelFromIntCode(intCode);
} catch (NumberFormatException ex) {
throw new NumberFormatException(
String.format("%s but received '%s'","operation code must be numeric", code)
);
}
} | java | {
"resource": ""
} |
q172292 | ConfigDefinitionExtractor.verifyDependencyExists | test | private void verifyDependencyExists(
Map<String, ConfigDefinition> definitionsMap,
ConfigDefinition def,
String dependsOnKey,
Object contextMsg
) {
Preconditions.checkState(definitionsMap.containsKey(dependsOnKey),
Utils.format("Error while processing {} ConfigDef='{}'. Dependency='{}' does not exist.",
contextMsg, def.getName(), dependsOnKey));
} | java | {
"resource": ""
} |
q172293 | Vault.read | test | public String read(String path, String key, long delay) {
if (!secrets.containsKey(path)) {
VaultClient vault = new VaultClient(getConfig());
Secret secret;
try {
secret = vault.logical().read(path);
} catch (VaultException e) {
LOG.error(e.toString(), e);
throw new VaultRuntimeException(e.toString());
}
// Record the expiration date of this lease
String leaseId;
if (secret.isRenewable()) {
// Only renewable secrets seem to have a leaseId
leaseId = secret.getLeaseId();
} else {
// So for non-renewable secrets we'll store the path with an extra / so that we can purge them correctly.
leaseId = path + "/";
}
leases.put(leaseId, System.currentTimeMillis() + (secret.getLeaseDuration() * 1000));
secrets.put(path, secret);
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
Map<String, Object> data = secrets.get(path).getData();
String value = getSecretValue(data, key).orElseThrow(() -> new VaultRuntimeException("Value not found for key"));
LOG.trace("CredentialStore '{}' Vault, retrieved value for key '{}'", csId, key);
return value;
} | java | {
"resource": ""
} |
q172294 | CouchbaseTarget.getOperationFromHeader | test | private WriteOperationType getOperationFromHeader(Record record, String key) {
String op = record.getHeader().getAttribute(OperationType.SDC_OPERATION_TYPE);
if (op == null || op.isEmpty()) {
return config.defaultWriteOperation;
}
int opCode;
try {
opCode = Integer.parseInt(op);
} catch(NumberFormatException e) {
LOG.debug("Unparsable CDC operation. Sending record to error.");
handleError(record, Errors.COUCHBASE_08, e);
return null;
}
switch (opCode) {
case OperationType.INSERT_CODE:
return WriteOperationType.INSERT;
case OperationType.UPDATE_CODE:
return WriteOperationType.REPLACE;
case OperationType.UPSERT_CODE:
return WriteOperationType.UPSERT;
case OperationType.DELETE_CODE:
return WriteOperationType.DELETE;
default:
switch (config.unsupportedOperation) {
case DISCARD:
LOG.debug("Unsupported CDC operation for key: {}. Discarding record per configuration.", key);
return null;
case TOERROR:
LOG.debug("Unsupported CDC operation for key: {}. Sending record to error configuration.", key);
handleError(record, Errors.COUCHBASE_09, new RuntimeException());
return null;
default:
LOG.debug("Unsupported CDC operation for key: {}. Using default write operation per configuration.", key);
return config.defaultWriteOperation;
}
}
} | java | {
"resource": ""
} |
q172295 | CouchbaseTarget.writeDoc | test | private Observable<AbstractDocument> writeDoc(String key, int ttl, long cas, ByteArrayOutputStream baos, Record record) {
WriteOperationType opType = getOperationFromHeader(record, key);
if(opType == null) {
return Observable.empty();
}
AbstractDocument doc;
if(config.dataFormat == DataFormat.JSON) {
try {
doc = JsonDocument.create(key, ttl, JsonObject.fromJson(baos.toString(config.dataFormatConfig.charset)), cas);
} catch(Exception e) {
return handleError(record, Errors.COUCHBASE_10, e);
}
} else {
doc = ByteArrayDocument.create(key, ttl, baos.toByteArray(), cas);
}
switch (opType) {
case DELETE: {
LOG.debug("DELETE key: {}, TTL: {}, CAS: {}", key, ttl, cas);
return connector.bucket().remove(doc, config.persistTo, config.replicateTo)
.timeout(config.couchbase.kvTimeout, TimeUnit.MILLISECONDS);
}
case INSERT: {
LOG.debug("INSERT key: {}, TTL: {}, CAS: {}", key, ttl, cas);
return connector.bucket().insert(doc, config.persistTo, config.replicateTo)
.timeout(config.couchbase.kvTimeout, TimeUnit.MILLISECONDS);
}
case REPLACE: {
LOG.debug("REPLACE key: {}, TTL: {}, CAS: {}", key, ttl, cas);
return connector.bucket().replace(doc, config.persistTo, config.replicateTo)
.timeout(config.couchbase.kvTimeout, TimeUnit.MILLISECONDS);
}
case UPSERT: {
LOG.debug("UPSERT key: {}, TTL: {}, CAS: {}", key, ttl, cas);
return connector.bucket().upsert(doc, config.persistTo, config.replicateTo)
.timeout(config.couchbase.kvTimeout, TimeUnit.MILLISECONDS);
}
default:
return Observable.empty();
}
} | java | {
"resource": ""
} |
q172296 | CouchbaseTarget.buildSubdocMutation | test | private Observable<DocumentFragment<Mutation>> buildSubdocMutation(AsyncMutateInBuilder mutation, int ttl, long cas,
boolean upsertDoc) {
return mutation
.upsertDocument(upsertDoc)
.withExpiry(ttl)
.withCas(cas)
.withDurability(config.persistTo, config.replicateTo)
.execute()
.timeout(config.couchbase.kvTimeout, TimeUnit.MILLISECONDS);
} | java | {
"resource": ""
} |
q172297 | H2SchemaWriter.makeAlterTableSqlString | test | @Override
protected String makeAlterTableSqlString(
String schema, String tableName, LinkedHashMap<String, JdbcTypeInfo> columnDiff
) {
String tableSchema = (schema == null) ? getDefaultSchema() : schema;
StringBuilder sqlString = new StringBuilder();
boolean first = true;
for (Map.Entry<String, JdbcTypeInfo> entry : columnDiff.entrySet()) {
if (first) {
first = false;
} else {
sqlString.append("\n");
}
sqlString
.append(ALTER_TABLE)
.append(" ");
if (tableSchema != null) {
sqlString.append(tableSchema);
sqlString.append(".");
}
sqlString.append(tableName)
.append(" ")
.append("ADD COLUMN")
.append(" ")
.append(entry.getKey())
.append(" ")
.append(entry.getValue().toString())
.append(";");
}
return sqlString.toString();
} | java | {
"resource": ""
} |
q172298 | KerberosLogin.configure | test | public void configure(Map<String, ?> configs, final String loginContextName) {
super.configure(configs, loginContextName);
this.loginContextName = loginContextName;
this.ticketRenewWindowFactor = (Double) configs.get(SaslConfigs.SASL_KERBEROS_TICKET_RENEW_WINDOW_FACTOR);
this.ticketRenewJitter = (Double) configs.get(SaslConfigs.SASL_KERBEROS_TICKET_RENEW_JITTER);
this.minTimeBeforeRelogin = (Long) configs.get(SaslConfigs.SASL_KERBEROS_MIN_TIME_BEFORE_RELOGIN);
this.kinitCmd = (String) configs.get(SaslConfigs.SASL_KERBEROS_KINIT_CMD);
this.serviceName = getServiceName(configs, loginContextName);
} | java | {
"resource": ""
} |
q172299 | Consumer.take | test | public OffsetAndResult<Map.Entry> take() {
if (producerError != null) {
throw new RuntimeException(Utils.format("Producer encountered error: {}", producerError), producerError);
}
if (consumerError != null) {
throw new RuntimeException(Utils.format("Consumer encountered error: {}", consumerError), consumerError);
}
try {
Utils.checkState(batchCommitted, "Cannot take messages when last batch is uncommitted");
while (running) {
for (ControlChannel.Message controlMessage : controlChannel.getConsumerMessages()) {
switch (controlMessage.getType()) {
case PRODUCER_COMPLETE:
// producer is complete, empty channel and afterwards return null
running = false;
break;
case PRODUCER_ERROR:
running = false;
Throwable throwable = (Throwable) controlMessage.getPayload();
producerError = throwable;
throw new ProducerRuntimeException(Utils.format("Producer encountered error: {}", throwable), throwable);
default:
String msg = Utils.format("Illegal control message type: '{}'", controlMessage.getType());
throw new IllegalStateException(msg);
}
}
OffsetAndResult<Map.Entry> batch = dataChannel.take(10, TimeUnit.MILLISECONDS);
LOG.trace("Received batch: {}", batch);
if (batch != null) {
batchCommitted = false; // got a new batch
return batch;
}
}
LOG.trace("Returning null");
return null;
} catch (Throwable throwable) {
if (!(throwable instanceof ProducerRuntimeException)) {
String msg = "Error caught in consumer: " + throwable;
LOG.error(msg, throwable);
error(throwable);
}
throw Throwables.propagate(throwable);
}
} | java | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.