_id
stringlengths
2
7
title
stringlengths
3
140
partition
stringclasses
3 values
text
stringlengths
73
34.1k
language
stringclasses
1 value
meta_information
dict
q172300
Consumer.commit
test
public void commit(String offset) { batchCommitted = true; LOG.trace("Last committed offset '{}', attempting to commit '{}'", lastCommittedOffset, offset); Utils.checkState(null != lastCommittedOffset, "Last committed offset cannot be null"); controlChannel.consumerCommit(offset); lastCommittedOffset = offset; }
java
{ "resource": "" }
q172301
Consumer.error
test
public void error(Throwable throwable) { if (consumerError == null) { consumerError = throwable; controlChannel.consumerError(throwable); } }
java
{ "resource": "" }
q172302
ConfigInjector.injectStage
test
public void injectStage(Object stage, StageDefinition stageDef, StageConfiguration stageConf, Map<String, Object> constants, List<Issue> issues) { injectConfigsToObject(stage, new StageInjectorContext(stageDef, stageConf, constants, issues)); }
java
{ "resource": "" }
q172303
DefinitionsApi.getDefinitions
test
public DefinitionsJson getDefinitions (HideStage.Type hideStage) throws ApiException { Object postBody = null; byte[] postBinaryBody = null; // create path and map variables String path = "/v1/definitions".replaceAll("\\{format\\}","json"); // query params List<Pair> queryParams = new ArrayList<Pair>(); if (hideStage != null) { queryParams.add(new Pair("hideStage", hideStage.name())); } Map<String, String> headerParams = new HashMap<String, String>(); Map<String, Object> formParams = new HashMap<String, Object>(); final String[] accepts = { "application/json" }; final String accept = apiClient.selectHeaderAccept(accepts); final String[] contentTypes = { }; final String contentType = apiClient.selectHeaderContentType(contentTypes); String[] authNames = new String[] { "basic" }; TypeRef returnType = new TypeRef<DefinitionsJson>() {}; return apiClient.invokeAPI(path, "GET", queryParams, postBody, postBinaryBody, headerParams, formParams, accept, contentType, authNames, returnType); }
java
{ "resource": "" }
q172304
LineagePublisherTaskImpl.getDefinition
test
private LineagePublisherDefinition getDefinition(String name) { String defConfig = LineagePublisherConstants.configDef(name); String publisherDefinition = configuration.get(defConfig, null); if(StringUtils.isEmpty(publisherDefinition)) { throw new IllegalArgumentException(Utils.format("Missing definition '{}'", defConfig)); } String []lineagePluginDefs = publisherDefinition.split("::"); if(lineagePluginDefs.length != 2) { throw new IllegalStateException(Utils.format( "Invalid definition '{}', expected $libraryName::$publisherName", publisherDefinition )); } LineagePublisherDefinition def = stageLibraryTask.getLineagePublisherDefinition( lineagePluginDefs[0], // Library lineagePluginDefs[1] // Plugin name ); if(def == null) { throw new IllegalStateException(Utils.format("Can't find publisher '{}'", publisherDefinition)); } return def; }
java
{ "resource": "" }
q172305
SQLParserUtils.formatName
test
private static String formatName(String columnName, boolean caseSensitive) { String returnValue = format(columnName); if (caseSensitive) { return returnValue; } return returnValue.toUpperCase(); }
java
{ "resource": "" }
q172306
SQLParserUtils.formatValue
test
private static String formatValue(String value) { // The value can either be null (if the IS keyword is present before it or just a NULL string with no quotes) if (value == null || NULL_STRING.equalsIgnoreCase(value)) { return null; } String returnValue = format(value); return returnValue.replaceAll("''", "'"); }
java
{ "resource": "" }
q172307
StoreApi.getPipelineInfo
test
public PipelineConfigurationJson getPipelineInfo (String pipelineId, String rev, String get, Boolean attachment) throws ApiException { Object postBody = null; byte[] postBinaryBody = null; // verify the required parameter 'pipelineId' is set if (pipelineId == null) { throw new ApiException(400, "Missing the required parameter 'pipelineId' when calling getPipelineInfo"); } // create path and map variables String path = "/v1/pipeline/{pipelineId}".replaceAll("\\{format\\}","json") .replaceAll("\\{" + "pipelineId" + "\\}", apiClient.escapeString(pipelineId.toString())); // query params List<Pair> queryParams = new ArrayList<Pair>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, Object> formParams = new HashMap<String, Object>(); queryParams.addAll(apiClient.parameterToPairs("", "rev", rev)); queryParams.addAll(apiClient.parameterToPairs("", "get", get)); queryParams.addAll(apiClient.parameterToPairs("", "attachment", attachment)); final String[] accepts = { "application/json" }; final String accept = apiClient.selectHeaderAccept(accepts); final String[] contentTypes = { }; final String contentType = apiClient.selectHeaderContentType(contentTypes); String[] authNames = new String[] { "basic" }; TypeRef returnType = new TypeRef<PipelineConfigurationJson>() {}; return apiClient.invokeAPI(path, "GET", queryParams, postBody, postBinaryBody, headerParams, formParams, accept, contentType, authNames, returnType); }
java
{ "resource": "" }
q172308
StoreApi.createDraftPipelineFragment
test
public PipelineFragmentEnvelopeJson createDraftPipelineFragment ( String fragmentId, String description, List<StageConfigurationJson> stageInstances ) throws ApiException { Object postBody = stageInstances; byte[] postBinaryBody = null; // verify the required parameter 'pipelineId' is set if (fragmentId == null) { throw new ApiException(400, "Missing the required parameter 'fragmentId' when calling createPipelineFragment"); } // create path and map variables String path = "/v1/fragment/{fragmentId}".replaceAll("\\{format\\}","json") .replaceAll("\\{" + "fragmentId" + "\\}", apiClient.escapeString(fragmentId.toString())); // query params List<Pair> queryParams = new ArrayList<Pair>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, Object> formParams = new HashMap<String, Object>(); queryParams.addAll(apiClient.parameterToPairs("", "description", description)); queryParams.addAll(apiClient.parameterToPairs("", "draft", true)); final String[] accepts = { "application/json" }; final String accept = apiClient.selectHeaderAccept(accepts); final String[] contentTypes = { }; final String contentType = apiClient.selectHeaderContentType(contentTypes); String[] authNames = new String[] { "basic" }; TypeRef returnType = new TypeRef<PipelineFragmentEnvelopeJson>() {}; return apiClient.invokeAPI(path, "PUT", queryParams, postBody, postBinaryBody, headerParams, formParams, accept, contentType, authNames, returnType); }
java
{ "resource": "" }
q172309
StoreApi.getPipelines
test
public List<PipelineInfoJson> getPipelines ( String filterText, String label, int offset, int len, PipelineOrderByFields orderBy, Order order, boolean includeStatus ) throws ApiException { Object postBody = null; byte[] postBinaryBody = null; // create path and map variables String path = "/v1/pipelines".replaceAll("\\{format\\}","json"); // query params List<Pair> queryParams = new ArrayList<Pair>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, Object> formParams = new HashMap<String, Object>(); queryParams.addAll(apiClient.parameterToPairs("", "filterText", filterText)); queryParams.addAll(apiClient.parameterToPairs("", "label", label)); queryParams.addAll(apiClient.parameterToPairs("", "offset", offset)); queryParams.addAll(apiClient.parameterToPairs("", "len", len)); queryParams.addAll(apiClient.parameterToPairs("", "orderBy", orderBy)); queryParams.addAll(apiClient.parameterToPairs("", "order", order)); final String[] accepts = { "application/json" }; final String accept = apiClient.selectHeaderAccept(accepts); final String[] contentTypes = { }; final String contentType = apiClient.selectHeaderContentType(contentTypes); String[] authNames = new String[] { "basic" }; TypeRef returnType = new TypeRef<List<PipelineInfoJson>>() {}; return apiClient.invokeAPI(path, "GET", queryParams, postBody, postBinaryBody, headerParams, formParams, accept, contentType, authNames, returnType); }
java
{ "resource": "" }
q172310
StoreApi.importPipelineFragment
test
public PipelineFragmentEnvelopeJson importPipelineFragment ( String fragmentId, boolean draft, boolean includeLibraryDefinitions, PipelineFragmentEnvelopeJson fragmentEnvelope ) throws ApiException { Object postBody = fragmentEnvelope; byte[] postBinaryBody = null; // verify the required parameter 'fragmentId' is set if (fragmentId == null) { throw new ApiException(400, "Missing the required parameter 'fragmentId' when calling importPipelineFragment"); } // verify the required parameter 'fragmentEnvelope' is set if (fragmentEnvelope == null) { throw new ApiException( 400, "Missing the required parameter 'pipelineEnvelope' when calling importPipelineFragment" ); } // create path and map variables String path = "/v1/fragment/{fragmentId}/import".replaceAll("\\{format\\}","json") .replaceAll("\\{" + "fragmentId" + "\\}", apiClient.escapeString(fragmentId.toString())); // query params List<Pair> queryParams = new ArrayList<Pair>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, Object> formParams = new HashMap<String, Object>(); queryParams.addAll(apiClient.parameterToPairs("", "draft", draft)); queryParams.addAll(apiClient.parameterToPairs("", "includeLibraryDefinitions", includeLibraryDefinitions)); final String[] accepts = { "application/json" }; final String accept = apiClient.selectHeaderAccept(accepts); final String[] contentTypes = { "application/json" }; final String contentType = apiClient.selectHeaderContentType(contentTypes); String[] authNames = new String[] { "basic" }; TypeRef returnType = new TypeRef<PipelineFragmentEnvelopeJson>() {}; return apiClient.invokeAPI(path, "POST", queryParams, postBody, postBinaryBody, headerParams, formParams, accept, contentType, authNames, returnType); }
java
{ "resource": "" }
q172311
DataLakeGeneratorManager.getFilePath
test
public String getFilePath( String dirPathTemplate, Record record, Date recordTime ) throws StageException { String dirPath; // get directory path if (dirPathTemplateInHeader) { dirPath = record.getHeader().getAttribute(DataLakeTarget.TARGET_DIRECTORY_HEADER); Utils.checkArgument(!(dirPath == null || dirPath.isEmpty()), "Directory Path cannot be null"); } else { dirPath = resolvePath(dirPathTemplateEval, dirPathTemplateVars, dirPathTemplate, recordTime, record); } // SDC-5492: replace "//" to "/" in file path dirPath = dirPath.replaceAll("/+","/"); if (dirPath.endsWith("/")) { dirPath = dirPath.substring(0, dirPath.length()-1); } return outputStreamHelper.getTempFilePath(dirPath, record, recordTime); }
java
{ "resource": "" }
q172312
JdbcRecordReader.getOperationFromRecord
test
int getOperationFromRecord( Record record, int defaultOpCode, UnsupportedOperationAction unsupportedAction, List<OnRecordErrorException> errorRecords ) { String op = record.getHeader().getAttribute(OperationType.SDC_OPERATION_TYPE); int opCode = -1; // unsupported if (Strings.isNullOrEmpty(op)) { return defaultOpCode; } // Check if the operation code from header attribute is valid try { opCode = JDBCOperationType.convertToIntCode(op); } catch (NumberFormatException | UnsupportedOperationException ex) { LOG.debug( "Operation obtained from record is not supported. Handle by UnsupportedOperationAction {}. {}", unsupportedAction.getLabel(), ex ); switch (unsupportedAction) { case SEND_TO_ERROR: LOG.debug("Sending record to error due to unsupported operation {}", op); errorRecords.add(new OnRecordErrorException(record, JdbcErrors.JDBC_70, op)); break; case USE_DEFAULT: opCode = defaultOpCode; break; case DISCARD: default: // unknown action LOG.debug("Discarding record with unsupported operation {}", op); } } return opCode; }
java
{ "resource": "" }
q172313
JdbcRecordReader.getFieldPath
test
String getFieldPath(String columnName, Map<String, String> columnsToField, int op){ return columnsToField.get(columnName); }
java
{ "resource": "" }
q172314
EventBuffer.poll
test
public EnrichedEvent poll(long timeout, TimeUnit unit) throws StageException { try { return queue.poll(timeout, unit); } catch (InterruptedException e) { LOG.error(Errors.MYSQL_001.getMessage(), e.toString(), e); Thread.currentThread().interrupt(); throw new StageException(Errors.MYSQL_001, e.toString(), e); } }
java
{ "resource": "" }
q172315
RunnerPool.getRunner
test
public T getRunner() throws PipelineRuntimeException { validateNotDestroyed(); try { return queue.take().runner; } catch (InterruptedException e) { throw new PipelineRuntimeException(ContainerError.CONTAINER_0801, e); } finally { runtimeStats.setAvailableRunners(queue.size()); histogram.update(queue.size()); } }
java
{ "resource": "" }
q172316
RunnerPool.getIdleRunner
test
public T getIdleRunner(long idleTime) { // Take the first runner QueueItem<T> item = queue.poll(); // All runners might be currently in use, which is fine in this case. if(item == null) { return null; } // If the runner wasn't idle for the expected time, we need to put it back to the queue (it will be added to the // begging again). if((System.currentTimeMillis() - item.timestamp) < idleTime) { queue.add(item); return null; } // Otherwise we do have runner that hasn't been used for at least idleTime, so we can return it now return item.runner; }
java
{ "resource": "" }
q172317
RunnerPool.returnRunner
test
public void returnRunner(T runner) throws PipelineRuntimeException { validateNotDestroyed(); queue.add(new QueueItem<>(runner)); runtimeStats.setAvailableRunners(queue.size()); histogram.update(queue.size()); }
java
{ "resource": "" }
q172318
RunnerPool.destroy
test
public void destroy() throws PipelineRuntimeException { // Firstly set this runner as destroyed destroyed.set(true); // Validate that this thread pool have all runners back, otherwise we're missing something and that is sign of // a trouble. if(queue.size() < runtimeStats.getTotalRunners()) { throw new PipelineRuntimeException(ContainerError.CONTAINER_0802, queue.size(), runtimeStats.getTotalRunners()); } }
java
{ "resource": "" }
q172319
RunnerPool.validateNotDestroyed
test
private void validateNotDestroyed() throws PipelineRuntimeException { if(destroyed.get()) { throw new PipelineRuntimeException(ContainerError.CONTAINER_0803, queue.size(), runtimeStats.getTotalRunners()); } }
java
{ "resource": "" }
q172320
KinesisUtil.checkStreamExists
test
public static long checkStreamExists( ClientConfiguration awsClientConfig, KinesisConfigBean conf, String streamName, List<Stage.ConfigIssue> issues, Stage.Context context ) { long numShards = 0; try { numShards = getShardCount(awsClientConfig, conf, streamName); } catch (AmazonClientException|StageException e) { LOG.error(Errors.KINESIS_01.getMessage(), e.toString(), e); issues.add(context.createConfigIssue( Groups.KINESIS.name(), KINESIS_CONFIG_BEAN + ".streamName", Errors.KINESIS_01, e.toString() )); } return numShards; }
java
{ "resource": "" }
q172321
KinesisUtil.getLastShardId
test
public static String getLastShardId( ClientConfiguration awsClientConfig, KinesisConfigBean conf, String streamName ) throws StageException { AmazonKinesis kinesisClient = getKinesisClient(awsClientConfig, conf); String lastShardId = null; try { StreamDescription description; do { if (lastShardId == null) { description = kinesisClient.describeStream(streamName).getStreamDescription(); } else { description = kinesisClient.describeStream(streamName, lastShardId).getStreamDescription(); } int pageSize = description.getShards().size(); lastShardId = description.getShards().get(pageSize - 1).getShardId(); } while (description.getHasMoreShards()); return lastShardId; } finally { kinesisClient.shutdown(); } }
java
{ "resource": "" }
q172322
JdbcMultiRowRecordWriter.processQueue
test
private void processQueue( LinkedList<Record> queue, List<OnRecordErrorException> errorRecords, Connection connection, int maxRowsPerBatch, int opCode ) throws StageException { if (queue.isEmpty()) { return; } int rowCount = 0; // Assume that columns are all same for the same operation to the same table // If some columns are missing in record, the record goes to error. final Record first = queue.getFirst(); SortedMap<String, String> columnsToParameters = recordReader.getColumnsToParameters( first, opCode, getColumnsToParameters(), opCode == OperationType.UPDATE_CODE ? getColumnsToFieldNoPK() : getColumnsToFields() ); if (columnsToParameters.isEmpty()) { // no parameters found for configured columns if (LOG.isWarnEnabled()) { LOG.warn("No parameters found for record with ID {}; skipping", first.getHeader().getSourceId()); } return; } String query = generateQueryForMultiRow( opCode, columnsToParameters, getPrimaryKeyColumns(), // the next batch will have either the max number of records, or however many are left. Math.min(maxRowsPerBatch, queue.size()) ); // Need to store removed records from queue, because we might need to add newly generated columns // to records for Jdbc Tee Processor. LinkedList<Record> removed = new LinkedList<>(); try (PreparedStatement statement = jdbcUtil.getPreparedStatement(getGeneratedColumnMappings(), query, connection)) { int paramIdx = 1; // Start processing records in queue. All records have the same operation to the same table. while (!queue.isEmpty()) { Record r = queue.removeFirst(); if (opCode != DELETE_CODE) { paramIdx = setParamsToStatement(paramIdx, statement, columnsToParameters, r, connection, opCode); } if (opCode != OperationType.INSERT_CODE) { paramIdx = setPrimaryKeys(paramIdx, r, statement, opCode); } removed.add(r); ++rowCount; if (rowCount == maxRowsPerBatch) { // time to execute the current batch processBatch(removed, errorRecords, statement, connection); // reset our counters rowCount = 0; paramIdx = 1; removed.clear(); } } } catch (SQLException e) { handleSqlException(e, removed, errorRecords); } // Process the rest of the records that are removed from queue but haven't processed yet // this happens when rowCount is still less than maxRowsPerBatch. // This is a bit of an ugly fix as its not very DRY but sufficient until there's a larger // refactoring of this code. if (!removed.isEmpty()) { query = generateQueryForMultiRow( opCode, columnsToParameters, getPrimaryKeyColumns(), removed.size() // always the remainder ); try (PreparedStatement statement = jdbcUtil.getPreparedStatement( getGeneratedColumnMappings(), query, connection )) { int paramIdx = 1; for (Record r : removed) { if (opCode != DELETE_CODE) { paramIdx = setParamsToStatement(paramIdx, statement, columnsToParameters, r, connection, opCode); } if (opCode != OperationType.INSERT_CODE) { paramIdx = setPrimaryKeys(paramIdx, r, statement, opCode); } } processBatch(removed, errorRecords, statement, connection); } catch (SQLException e) { handleSqlException(e, removed, errorRecords); } } }
java
{ "resource": "" }
q172323
JdbcMultiRowRecordWriter.handleSqlException
test
private void handleSqlException( SQLException exception, List<Record> inputRecords, List<OnRecordErrorException> errors ) throws StageException { if(jdbcUtil.isDataError(getCustomDataSqlStateCodes(), getConnectionString(), exception)) { String formattedError = jdbcUtil.formatSqlException(exception); LOG.error(JdbcErrors.JDBC_89.getMessage(), formattedError); for(Record inputRecord : inputRecords) { errors.add(new OnRecordErrorException(inputRecord, JdbcErrors.JDBC_89, formattedError)); } return; } super.handleSqlException(exception); }
java
{ "resource": "" }
q172324
JdbcMultiRowRecordWriter.getColumnHash
test
private HashCode getColumnHash(Record record, int op) throws OnRecordErrorException { Map<String, String> parameters = getColumnsToParameters(); SortedMap<String, String> columnsToParameters = recordReader.getColumnsToParameters(record, op, parameters, getColumnsToFields()); return columnHashFunction.newHasher().putObject(columnsToParameters, stringMapFunnel).hash(); }
java
{ "resource": "" }
q172325
SpoolDirUtil.compareFiles
test
public static boolean compareFiles(WrappedFileSystem fs, WrappedFile f1, WrappedFile f2) { if (!fs.exists(f2)) { return true; } try { long mtime1 = fs.getLastModifiedTime(f1); long mtime2 = fs.getLastModifiedTime(f2); long ctime1 = fs.getChangedTime(f1); long ctime2 = fs.getChangedTime(f2); long time1 = Math.max(mtime1, ctime1); long time2 = Math.max(mtime2, ctime2); int compares = Long.compare(time1, time2); if (compares != 0) { return compares > 0; } } catch (IOException ex) { LOG.error("Failed to get ctime: '{}'", f1.getFileName(), ex); return false; } return f1.getAbsolutePath().compareTo(f2.getAbsolutePath()) > 0; }
java
{ "resource": "" }
q172326
ShellExecutor.retrievePidIfFeasible
test
private static int retrievePidIfFeasible(Process process) { if(unixProcessClass == null) { return UNDETERMINED_PID; } if(!unixProcessClass.isInstance(process)) { LOG.debug("Do not support retrieving PID from {}", process.getClass().getName()); return UNDETERMINED_PID; } try { return (int)pidField.get(process); } catch (IllegalAccessException e) { LOG.debug("Can't retrieve PID value from the field", e); return UNDETERMINED_PID; } }
java
{ "resource": "" }
q172327
FieldHasherProcessor.validateAndExtractFieldsToHash
test
private Set<String> validateAndExtractFieldsToHash( Record record, Set<String> fieldsDontExist, Set<String> fieldsWithListOrMapType, Set<String> fieldsWithNull, Collection<String> matchingFieldsPath ) { Set<String> validFieldsToHashForThisConfig = new HashSet<String>(); for (String matchingFieldPath : matchingFieldsPath) { if (record.has(matchingFieldPath)) { Field field = record.get(matchingFieldPath); if (UNSUPPORTED_FIELD_TYPES.contains(field.getType())) { fieldsWithListOrMapType.add(matchingFieldPath); } else if (field.getValue() == null) { fieldsWithNull.add(matchingFieldPath); } else { validFieldsToHashForThisConfig.add(matchingFieldPath); } } else { fieldsDontExist.add(matchingFieldPath); } } return validFieldsToHashForThisConfig; }
java
{ "resource": "" }
q172328
HadoopSecurityUtil.getProxyUser
test
public static UserGroupInformation getProxyUser( String user, // Hadoop user (HDFS User, HBase user, generally the to-be-impersonated user in component's configuration) Stage.Context context, // Stage context object UserGroupInformation loginUser, // Login UGI (sdc user) List<Stage.ConfigIssue> issues, // Reports errors String configGroup, // Group where "HDFS User" is present String configName // Config name of "HDFS User" ) { // Should we always impersonate current user? boolean alwaysImpersonate = context.getConfiguration().get( HadoopConfigConstants.IMPERSONATION_ALWAYS_CURRENT_USER, false ); // If so, propagate current user to "user" (the one to be impersonated) if(alwaysImpersonate) { if(!StringUtils.isEmpty(user)) { issues.add(context.createConfigIssue(configGroup, configName, Errors.HADOOP_00001)); } user = context.getUserContext().getAliasName(); } // If impersonated user is empty, simply return login UGI (no impersonation performed) if(StringUtils.isEmpty(user)) { return loginUser; } // Optionally lower case the user name boolean lowerCase = context.getConfiguration().get( HadoopConfigConstants.LOWERCASE_USER, false ); if(lowerCase) { user = user.toLowerCase(); } return UserGroupInformation.createProxyUser(user, loginUser); }
java
{ "resource": "" }
q172329
ActiveStats.roll
test
public ActiveStats roll() { long now = System.currentTimeMillis(); setEndTime(now); ActiveStats statsBean = new ActiveStats().setStartTime(now) .setDataCollectorVersion(getDataCollectorVersion()) .setDpmEnabled(isDpmEnabled()) .setUpTime(getUpTime().roll()); statsBean.setPipelines(getPipelines().stream().map(UsageTimer::roll).collect(Collectors.toList())); statsBean.setStages(getStages().stream() .filter(timer -> timer.getMultiplier() > 0) .map(UsageTimer::roll) .collect(Collectors.toList())); return statsBean; }
java
{ "resource": "" }
q172330
ActiveStats.snapshot
test
public ActiveStats snapshot() { ActiveStats snapshot = new ActiveStats().setStartTime(getStartTime()) .setDataCollectorVersion(getDataCollectorVersion()) .setDpmEnabled(isDpmEnabled()) .setUpTime(getUpTime().snapshot()) .setRecordCount(getRecordCount()); snapshot.setPipelines(getPipelines().stream().map(UsageTimer::snapshot).collect(Collectors.toList())); snapshot.setStages(getStages().stream().map(UsageTimer::snapshot).collect(Collectors.toList())); return snapshot; }
java
{ "resource": "" }
q172331
HdfsMetadataExecutor.ensureDirectoryExists
test
private void ensureDirectoryExists(FileSystem fs, Path path) throws IOException { if(!fs.exists(path)) { LOG.debug("Creating directory: {}", path); if(!fs.mkdirs(path)) { throw new IOException("Can't create directory: " + path); } } }
java
{ "resource": "" }
q172332
JdbcUtil.write
test
public void write( Batch batch, SchemaTableClassifier schemaTableClassifier, LoadingCache<SchemaAndTable, JdbcRecordWriter> recordWriters, ErrorRecordHandler errorRecordHandler, boolean perRecord ) throws StageException { Multimap<SchemaAndTable, Record> partitions = schemaTableClassifier.classify(batch); for (SchemaAndTable key : partitions.keySet()) { Iterator<Record> recordIterator = partitions.get(key).iterator(); write(recordIterator, key, recordWriters, errorRecordHandler, perRecord); } }
java
{ "resource": "" }
q172333
JdbcUtil.write
test
public void write( Batch batch, ELEval tableNameEval, ELVars tableNameVars, String tableNameTemplate, LoadingCache<String, JdbcRecordWriter> recordWriters, ErrorRecordHandler errorRecordHandler, boolean perRecord ) throws StageException { Multimap<String, Record> partitions = ELUtils.partitionBatchByExpression( tableNameEval, tableNameVars, tableNameTemplate, batch ); for (String tableName : partitions.keySet()) { Iterator<Record> recordIterator = partitions.get(tableName).iterator(); write(recordIterator, tableName, recordWriters, errorRecordHandler, perRecord); } }
java
{ "resource": "" }
q172334
JdbcUtil.write
test
public <T> void write( Iterator<Record> recordIterator, T key, LoadingCache<T, JdbcRecordWriter> recordWriters, ErrorRecordHandler errorRecordHandler, boolean perRecord ) throws StageException { final JdbcRecordWriter jdbcRecordWriter; try { jdbcRecordWriter = recordWriters.getUnchecked(key); } catch (UncheckedExecutionException ex) { final Throwable throwable = ex.getCause(); final ErrorCode errorCode; final Object[] messageParams; if (throwable instanceof StageException) { StageException stageEx = (StageException) ex.getCause(); errorCode = stageEx.getErrorCode(); messageParams = stageEx.getParams(); } else { errorCode = JdbcErrors.JDBC_301; messageParams = new Object[] {ex.getMessage(), ex.getCause()}; } // Failed to create RecordWriter, report all as error records. while (recordIterator.hasNext()) { Record record = recordIterator.next(); errorRecordHandler.onError(new OnRecordErrorException(record, errorCode, messageParams)); } return; } List<OnRecordErrorException> errors = perRecord ? jdbcRecordWriter.writePerRecord(recordIterator) : jdbcRecordWriter.writeBatch(recordIterator); for (OnRecordErrorException error : errors) { errorRecordHandler.onError(error); } }
java
{ "resource": "" }
q172335
JdbcUtil.generateNoMoreDataEvent
test
public void generateNoMoreDataEvent(PushSource.Context context) { LOG.info("No More data to process, Triggered No More Data Event"); BatchContext batchContext = context.startBatch(); CommonEvents.NO_MORE_DATA.create(context, batchContext).createAndSend(); context.processBatch(batchContext); }
java
{ "resource": "" }
q172336
HiveMetadataProcessor.detectNewPartition
test
private Map<PartitionInfoCacheSupport.PartitionValues, String> detectNewPartition( PartitionInfoCacheSupport.PartitionValues partitionValues, PartitionInfoCacheSupport.PartitionInfo pCache, String location ) throws StageException{ Map<PartitionInfoCacheSupport.PartitionValues, String> partitionInfoDiff = new HashMap<>(); partitionInfoDiff.put(partitionValues, location); partitionInfoDiff = (pCache != null)? pCache.getDiff(partitionInfoDiff) : partitionInfoDiff; if (pCache == null || !partitionInfoDiff.isEmpty()){ return partitionInfoDiff; } return null; }
java
{ "resource": "" }
q172337
HiveMetadataProcessor.updateRecordForHDFS
test
@VisibleForTesting static void updateRecordForHDFS( Record record, boolean roll, String avroSchema, String location ){ if(roll){ record.getHeader().setAttribute(HDFS_HEADER_ROLL, "true"); } record.getHeader().setAttribute(HDFS_HEADER_AVROSCHEMA, avroSchema); record.getHeader().setAttribute(HDFS_HEADER_TARGET_DIRECTORY, location); LOG.trace("Record {} will be stored in {} path: roll({}), avro schema: {}", record.getHeader().getSourceId(), location, roll, avroSchema); }
java
{ "resource": "" }
q172338
GroupByAggregator.process
test
public void process(String group, T value) { getData().process(ImmutableMap.of(group, value)); }
java
{ "resource": "" }
q172339
StageValidator.isSameVersion
test
public static boolean isSameVersion(Class<? extends Stage> a, Class<? extends Stage> b) { StageDef aDef = a.getAnnotation(StageDef.class); StageDef bDef = b.getAnnotation(StageDef.class); return aDef.version() == bDef.version(); }
java
{ "resource": "" }
q172340
BootstrapMesosDriver.main
test
public static void main(String[] args) throws Exception { BootstrapCluster.printSystemPropsEnvVariables(); String mesosDir = System.getenv("MESOS_DIRECTORY"); if (mesosDir == null) { throw new IllegalStateException("Expected the env. variable MESOS_DIRECTORY to be defined"); } File mesosHomeDir = new File(mesosDir); String sparkDir = System.getenv("SPARK_HOME"); if (sparkDir == null) { throw new IllegalStateException("Expected the env. variable SPARK_HOME to be defined"); } File sparkHomeDir = new File(sparkDir); int processExitValue = BootstrapCluster.findAndExtractJar(mesosHomeDir, sparkHomeDir); if (processExitValue != 0) { throw new IllegalStateException( "Process extracting archives from uber jar exited abnormally; check Mesos driver stdout file"); } System.setProperty("SDC_MESOS_BASE_DIR", new File(mesosHomeDir, BootstrapCluster.SDC_MESOS_BASE_DIR).getAbsolutePath()); final Class<?> clazz = Class.forName("com.streamsets.pipeline.BootstrapClusterStreaming"); final Method method = clazz.getMethod("main", String[].class); method.invoke(null, new Object[] { args }); }
java
{ "resource": "" }
q172341
HttpClientCommon.configureAuthAndBuildClient
test
private void configureAuthAndBuildClient( ClientBuilder clientBuilder, List<Stage.ConfigIssue> issues ) { if (jerseyClientConfig.authType == AuthenticationType.OAUTH) { String consumerKey = jerseyClientConfig.oauth.resolveConsumerKey(context,"CREDENTIALS", "conf.oauth.", issues); String consumerSecret = jerseyClientConfig.oauth.resolveConsumerSecret(context, "CREDENTIALS", "conf.oauth.", issues); String token = jerseyClientConfig.oauth.resolveToken(context, "CREDENTIALS", "conf.oauth.", issues); String tokenSecret = jerseyClientConfig.oauth.resolveTokenSecret(context, "CREDENTIALS", "conf.oauth.", issues); if(issues.isEmpty()) { authToken = JerseyClientUtil.configureOAuth1( consumerKey, consumerSecret, token, tokenSecret, clientBuilder ); } } else if (jerseyClientConfig.authType.isOneOf(AuthenticationType.DIGEST, AuthenticationType.BASIC, AuthenticationType.UNIVERSAL)) { String username = jerseyClientConfig.basicAuth.resolveUsername(context,"CREDENTIALS", "conf.basicAuth.", issues); String password = jerseyClientConfig.basicAuth.resolvePassword(context,"CREDENTIALS", "conf.basicAuth.", issues); if(issues.isEmpty()) { JerseyClientUtil.configurePasswordAuth( jerseyClientConfig.authType, username, password, clientBuilder ); } } try { buildNewAuthenticatedClient(issues, false); clientInitialized = true; } catch (StageException e) { // should not happen, since we passed throwExceptions as false above ExceptionUtils.throwUndeclared(e); } }
java
{ "resource": "" }
q172342
HttpClientCommon.resolveHeaders
test
public MultivaluedMap<String, Object> resolveHeaders( Map<String, String> headers, Record record ) throws StageException { RecordEL.setRecordInContext(headerVars, record); MultivaluedMap<String, Object> requestHeaders = new MultivaluedHashMap<>(); for (Map.Entry<String, String> entry : headers.entrySet()) { List<Object> header = new ArrayList<>(1); Object resolvedValue = headerEval.eval(headerVars, entry.getValue(), String.class); header.add(resolvedValue); requestHeaders.put(entry.getKey(), header); } return requestHeaders; }
java
{ "resource": "" }
q172343
HttpClientCommon.getHttpMethod
test
public HttpMethod getHttpMethod( HttpMethod httpMethod, String methodExpression, Record record ) throws ELEvalException { if (httpMethod != HttpMethod.EXPRESSION) { return httpMethod; } RecordEL.setRecordInContext(methodVars, record); return HttpMethod.valueOf(methodEval.eval(methodVars, methodExpression, String.class)); }
java
{ "resource": "" }
q172344
ScriptTypedNullObject.fillNullTypes
test
public static void fillNullTypes(SimpleBindings bindings) { bindings.put("NULL_BOOLEAN", NULL_BOOLEAN); bindings.put("NULL_CHAR", NULL_CHAR); bindings.put("NULL_BYTE", NULL_BYTE); bindings.put("NULL_SHORT", NULL_SHORT); bindings.put("NULL_INTEGER", NULL_INTEGER); bindings.put("NULL_LONG", NULL_LONG); bindings.put("NULL_FLOAT", NULL_FLOAT); bindings.put("NULL_DOUBLE", NULL_DOUBLE); bindings.put("NULL_DATE", NULL_DATE); bindings.put("NULL_DATETIME", NULL_DATETIME); bindings.put("NULL_TIME", NULL_TIME); bindings.put("NULL_DECIMAL", NULL_DECIMAL); bindings.put("NULL_BYTE_ARRAY", NULL_BYTE_ARRAY); bindings.put("NULL_STRING", NULL_STRING); bindings.put("NULL_LIST", NULL_LIST); bindings.put("NULL_MAP", NULL_MAP); }
java
{ "resource": "" }
q172345
ScriptTypedNullObject.getFieldNull
test
public static Object getFieldNull(Record record, String fieldPath) { Field f = record.get(fieldPath); if (f != null ) { return f.getValue() == null? getTypedNullFromField(f) : f.getValue(); } return null; }
java
{ "resource": "" }
q172346
AvroSchemaHelper.loadFromRegistry
test
public Schema loadFromRegistry(String subject, int schemaId) throws SchemaRegistryException { try { if (isEmpty(subject)) { return loadFromRegistry(schemaId); } else { return loadFromRegistry(subject); } } catch (SchemaRegistryException e) { throw new SchemaRegistryException(e); } }
java
{ "resource": "" }
q172347
AvroSchemaHelper.registerSchema
test
public int registerSchema(Schema schema, String subject) throws SchemaRegistryException { try { return schemaIdCache.get(subject + schema.hashCode(), () -> registryClient.register(subject, schema)); } catch (ExecutionException e) { throw new SchemaRegistryException(e); } }
java
{ "resource": "" }
q172348
AvroSchemaHelper.loadFromRegistry
test
public Schema loadFromRegistry(String subject) throws SchemaRegistryException { try { SchemaMetadata metadata = registryClient.getLatestSchemaMetadata(subject); return registryClient.getByID(metadata.getId()); } catch (IOException | RestClientException e) { throw new SchemaRegistryException(e); } }
java
{ "resource": "" }
q172349
AvroSchemaHelper.getSchemaIdFromSubject
test
public int getSchemaIdFromSubject(String subject) throws SchemaRegistryException { try { SchemaMetadata metadata = registryClient.getLatestSchemaMetadata(subject); return metadata.getId(); } catch (IOException | RestClientException e) { throw new SchemaRegistryException(e); } }
java
{ "resource": "" }
q172350
AvroSchemaHelper.loadFromRegistry
test
public Schema loadFromRegistry(int id) throws SchemaRegistryException { try { return registryClient.getByID(id); } catch (IOException | RestClientException e) { throw new SchemaRegistryException(e); } }
java
{ "resource": "" }
q172351
AvroSchemaHelper.writeSchemaId
test
public int writeSchemaId(OutputStream os, int schemaId) throws IOException { if (schemaId > 0) { os.write(MAGIC_BYTE); os.write(ByteBuffer.allocate(ID_SIZE).putInt(schemaId).array()); } return schemaId; }
java
{ "resource": "" }
q172352
AvroSchemaHelper.detectSchemaId
test
public Optional<Integer> detectSchemaId(byte[] data) { if (data.length < 5) { return Optional.empty(); } ByteBuffer wrapped = ByteBuffer.wrap(data); // 5 == MAGIC_BYTE + ID_SIZE if (wrapped.get() != MAGIC_BYTE) { return Optional.empty(); } return Optional.of(wrapped.getInt()); }
java
{ "resource": "" }
q172353
AvroSchemaHelper.getDefaultValues
test
public static Map<String, Object> getDefaultValues(Schema schema) throws SchemaRegistryException { Map<String, Object> defaultValues = new HashMap<>(); try { defaultValues.putAll(AvroTypeUtil.getDefaultValuesFromSchema(schema, new HashSet<String>())); } catch (IOException e) { throw new SchemaRegistryException(e); } return defaultValues; }
java
{ "resource": "" }
q172354
ServicesUtil.parseAll
test
public static List<Record> parseAll( Stage.Context stageContext, ToErrorContext toErrorContext, boolean produceSingleRecordPerMessage, String messageId, byte[] payload ) throws StageException { List<Record> records = new ArrayList<>(); try (DataParser parser = stageContext.getService(DataFormatParserService.class).getParser(messageId, payload)) { Record record = null; do { try { record = parser.parse(); } catch (RecoverableDataParserException e) { handleException(stageContext, toErrorContext, messageId, e, e.getUnparsedRecord()); //Go to next record continue; } if (record != null) { records.add(record); } } while (record != null); } catch (IOException |DataParserException ex) { Record record = stageContext.createRecord(messageId); record.set(Field.create(payload)); handleException(stageContext, toErrorContext, messageId, ex, record); return records; } if (produceSingleRecordPerMessage) { List<Field> list = new ArrayList<>(); for (Record record : records) { list.add(record.get()); } Record record = records.get(0); record.set(Field.create(list)); records.clear(); records.add(record); } return records; }
java
{ "resource": "" }
q172355
ClasspathValidatorResult.logDetails
test
public void logDetails() { if(isValid()) { return; } LOG.warn("Validation results for {}", name); if(!unparseablePaths.isEmpty()) { LOG.warn("Can't parse the following artifacts:"); for(String path : unparseablePaths) { LOG.warn(" {}", path); } } if(!versionCollisions.isEmpty()) { LOG.warn("Detected colliding dependency versions:"); for(Map.Entry<String, Map<String, List<Dependency>>> entry : versionCollisions.entrySet()) { LOG.warn(" Dependency {} have versions: {}", entry.getKey(), StringUtils.join(entry.getValue().keySet(), ", ")); for(Map.Entry<String, List<Dependency>> versionEntry : entry.getValue().entrySet()) { LOG.warn(" Version: {}", versionEntry.getKey()); for(Dependency dependency: versionEntry.getValue()) { LOG.warn(" {}", dependency.getSourceName()); } } } } }
java
{ "resource": "" }
q172356
BadRecordsHandler.getBadRecords
test
private List<Record> getBadRecords(ErrorSink errorSink) { List<Record> badRecords = new ArrayList<>(); for (Map.Entry<String, List<Record>> entry : errorSink.getErrorRecords().entrySet()) { for (Record record : entry.getValue()) { RecordImpl errorRecord; switch (errorRecordPolicy) { case ORIGINAL_RECORD: errorRecord = (RecordImpl) ((RecordImpl)record).getHeader().getSourceRecord(); errorRecord.getHeader().copyErrorFrom(record); break; case STAGE_RECORD: errorRecord = (RecordImpl) record; break; default: throw new IllegalArgumentException("Uknown error record policy: " + errorRecordPolicy); } errorRecord.getHeader().setErrorContext(runtimeInfo.getId(), pipelineName); badRecords.add(errorRecord); } } return badRecords; }
java
{ "resource": "" }
q172357
SchemaGenerator.init
test
public List<Stage.ConfigIssue> init(SchemaGeneratorConfig config, Stage.Context context) { this.config = config; return Collections.emptyList(); }
java
{ "resource": "" }
q172358
DirectedGraph.getOutwardEdgeVertices
test
public Collection<V> getOutwardEdgeVertices(V vertex) { Collection<V> outwardEdgeVerticesForVertex = outwardEdgeVertices.get(vertex); return outwardEdgeVerticesForVertex != null ? outwardEdgeVerticesForVertex : Collections.<V>emptySet(); }
java
{ "resource": "" }
q172359
DirectedGraph.getInwardEdgeVertices
test
public Collection<V> getInwardEdgeVertices(V vertex) { Collection<V> inwardEdgeVerticesForVertex = inwardEdgesVertices.get(vertex); return inwardEdgeVerticesForVertex != null ? inwardEdgeVerticesForVertex : Collections.<V>emptySet(); }
java
{ "resource": "" }
q172360
OffsetUtil.serializeOffsetMap
test
public static String serializeOffsetMap(Map<String, String> offsetMap) throws IOException { return JSON_MAPPER.writeValueAsString(offsetMap); }
java
{ "resource": "" }
q172361
OffsetUtil.deserializeOffsetMap
test
@SuppressWarnings("unchecked") public static Map<String, String> deserializeOffsetMap(String lastSourceOffset) throws IOException { Map<String, String> offsetMap; if (lastSourceOffset == null || lastSourceOffset.isEmpty()) { offsetMap = new HashMap<>(); } else { offsetMap = JSON_MAPPER.readValue(lastSourceOffset, Map.class); } return offsetMap; }
java
{ "resource": "" }
q172362
OmniturePollingConsumer.queueReport
test
public int queueReport() throws IOException, InterruptedException, ExecutionException, TimeoutException, StageException { final AsyncInvoker asyncInvoker = queueResource.request() .header(WSSE_HEADER, OmnitureAuthUtil.getHeader(username.get(), sharedSecret.get())) .async(); LOG.debug("Queueing report using URL {} with description {}", queueResource.getUri().toURL().toString(), reportDescription); final Future<Response> responseFuture = asyncInvoker.post(Entity.json(reportDescription)); Response response = responseFuture.get(responseTimeoutMillis, TimeUnit.MILLISECONDS); if (response == null) { LOG.error("Failed to get response using URL {}", queueResource.getUri().toURL().toString()); throw new StageException(Errors.OMNITURE_01, "HTTP response was null"); } LOG.debug("Received response: status {}", response.getStatus()); ObjectMapper mapper = new ObjectMapper(); String json = response.readEntity(String.class); LOG.trace("Response JSON: {}", json); JsonNode root = mapper.readTree(json); if (root == null) { LOG.error("Invalid JSON in response: {}", json); throw new StageException(Errors.OMNITURE_01, json); } if (root.has("error")) { throw new StageException(Errors.OMNITURE_01, root.get("error_description").asText()); } LOG.info("Omniture report queued"); return root.get("reportID").asInt(); }
java
{ "resource": "" }
q172363
OmniturePollingConsumer.getReport
test
public void getReport(int reportId) throws InterruptedException, ExecutionException, TimeoutException, IOException, StageException { int waitTime = 1000; Response response = null; while (!stop) { final AsyncInvoker asyncInvoker = getResource.request() .header(WSSE_HEADER, OmnitureAuthUtil.getHeader(username.get(), sharedSecret.get())) .async(); LOG.debug("Getting report using URL {} with report ID {}", getResource.getUri().toURL().toString(), reportId); final Future<Response> responseFuture = asyncInvoker.post(Entity.json("{ \"reportID\": " + reportId + " }")); response = responseFuture.get(responseTimeoutMillis, TimeUnit.MILLISECONDS); String input = response.readEntity(String.class); ObjectMapper mapper = new ObjectMapper(); JsonNode root = mapper.readTree(input); // If the report has an error field, it means the report has not finished generating if (!root.has("error")) { boolean accepted = entityQueue.offer(input, responseTimeoutMillis, TimeUnit.MILLISECONDS); if (!accepted) { LOG.warn("Response buffer full, dropped record."); } break; } else { // Exponential backoff while making subsequent Report.Get requests if (root.get("error").textValue().equals("report_not_ready")) { waitTime *= 2; LOG.info("Report not available. Sleeping for {} seconds", waitTime / 1000); Thread.sleep(waitTime); } else { throw new StageException(Errors.OMNITURE_02, root.get("error").get("error_description").asText()); } } } response.close(); }
java
{ "resource": "" }
q172364
SdcClusterOffsetHelper.isSDCCheckPointing
test
public boolean isSDCCheckPointing() { try { return fs.exists(checkPointFilePath) || fs.exists(backupCheckPointFilePath); } catch (IOException ex) { LOG.error("Error doing isSDCCheckPointing", ex); throw new RuntimeException(Utils.format("Error checking exists on hdfs path: {}. Reason: {}", checkPointFilePath.toString(), ex.toString()), ex); } }
java
{ "resource": "" }
q172365
SdcClusterOffsetHelper.writeOffsetsToMainOffsetFile
test
private void writeOffsetsToMainOffsetFile(Map<Integer, Long> partitionToOffsetMap) throws IOException { LOG.info("Saving the following offset {} to {}", partitionToOffsetMap, checkPointFilePath); //Creating a marker file (overwriting if it already exists) to mark that we are going to write offsets out the offsets to the main offset file. try(OutputStream os = fs.create(checkPointMarkerFilePath, true)) { //NOOP } //If the both above passes and writing fails or leaves corrupted file we will have the back file try (OutputStream os = fs.create(checkPointFilePath, true)) { OBJECT_MAPPER.writeValue(os, new ClusterSourceOffsetJson(serializeKafkaPartitionOffset(partitionToOffsetMap), SDC_STREAMING_OFFSET_VERSION)); } //If this fails we are still good, as we will start from the backup offset file. (Not optimal, but deterministic) boolean deleted = fs.delete(checkPointMarkerFilePath, false); LOG.warn("Status {} for Deleting Marker File {}", deleted, checkPointMarkerFilePath); //If the write fails we don't want to touch the timestamp and will error out so not doing this in finally lastOffsetStoredTime = System.currentTimeMillis(); }
java
{ "resource": "" }
q172366
GcsObjectPostProcessingHandler.delete
test
private void delete(BlobId blobId) { LOG.debug("Deleting object '{}'", String.format(BLOB_PATH_TEMPLATE, blobId.getBucket(), blobId.getName())); boolean deleted = storage.delete(blobId); if (!deleted) { LOG.error("Cannot delete object '{}'", String.format(BLOB_PATH_TEMPLATE, blobId.getBucket(), blobId.getName())); } }
java
{ "resource": "" }
q172367
GcsObjectPostProcessingHandler.handleError
test
void handleError(BlobId blobId) { switch (gcsOriginErrorConfig.errorHandlingOption) { case NONE: break; case ARCHIVE: handleArchive(blobId); break; case DELETE: delete(blobId); break; } }
java
{ "resource": "" }
q172368
GcsObjectPostProcessingHandler.handleArchive
test
private void handleArchive(BlobId blobId) { String destinationPath = getDestinationPath(blobId, gcsOriginErrorConfig.errorPrefix); switch (gcsOriginErrorConfig.archivingOption) { case COPY_TO_BUCKET: copy(blobId, gcsOriginErrorConfig.errorBucket, destinationPath, false); break; case MOVE_TO_BUCKET: copy(blobId, gcsOriginErrorConfig.errorBucket, destinationPath, true); break; case COPY_TO_PREFIX: copy(blobId, blobId.getBucket(), destinationPath, false); break; case MOVE_TO_PREFIX: copy(blobId, blobId.getBucket(), destinationPath, true); break; } }
java
{ "resource": "" }
q172369
FileContext.getReader
test
public LiveFileReader getReader() throws IOException { Utils.checkState(open, "FileContext is closed"); if (reader == null) { currentFile = getStartingCurrentFileName(); long fileOffset = getStartingOffset(); boolean needsToScan = currentFile == null || fileOffset == Long.MAX_VALUE; if (needsToScan) { if (currentFile != null) { // we need to refresh the file in case the name changed before scanning as the scanner does not refresh currentFile = currentFile.refresh(); } currentFile = scanner.scan(currentFile); fileOffset = 0; } if (currentFile != null) { reader = new SingleLineLiveFileReader(getRollMode(), getMultiFileInfo().getTag(), currentFile, charset, fileOffset, maxLineLength); if (!multiFileInfo.getMultiLineMainLinePatter().isEmpty()) { reader = new MultiLineLiveFileReader(getMultiFileInfo().getTag(), reader, Pattern.compile(multiFileInfo.getMultiLineMainLinePatter())); } if (fileOffset == 0) { // file start event eventPublisher.publish(new FileEvent(currentFile, FileEvent.Action.START)); } } } return reader; }
java
{ "resource": "" }
q172370
FileContext.releaseReader
test
public void releaseReader(boolean inErrorDiscardReader) throws IOException { Utils.checkState(open, "FileContext is closed"); // update starting offsets for next invocation either cold (no reader) or hot (reader) boolean hasNext; try { hasNext = reader != null && reader.hasNext(); } catch (IOException ex) { IOUtils.closeQuietly(reader); reader = null; hasNext = false; } boolean doneWithFile = !hasNext || inErrorDiscardReader; if (doneWithFile) { IOUtils.closeQuietly(reader); reader = null; // Using Long.MAX_VALUE to signal we reach the end of the file and next iteration should get the next file. setStartingCurrentFileName(currentFile); setStartingOffset(Long.MAX_VALUE); // If we failed to open the file in first place, it will be null and hence we won't do anything with it. if(currentFile == null) { return; } // File end event LiveFile file = currentFile.refresh(); if (inErrorDiscardReader) { LOG.warn("Processing file '{}' produced an error, skipping '{}' post processing on that file", file, postProcessing); eventPublisher.publish(new FileEvent(file, FileEvent.Action.ERROR)); } else { eventPublisher.publish(new FileEvent(file, FileEvent.Action.END)); switch (postProcessing) { case NONE: LOG.debug("File '{}' processing completed, post processing action 'NONE'", file); break; case DELETE: if(!inPreviewMode) { try { Files.delete(file.getPath()); LOG.debug("File '{}' processing completed, post processing action 'DELETED'", file); } catch (IOException ex) { throw new IOException(Utils.format("Could not delete '{}': {}", file, ex.toString()), ex); } } break; case ARCHIVE: if(!inPreviewMode) { Path fileArchive = Paths.get(archiveDir, file.getPath().toString()); if (fileArchive == null) { throw new IOException("Could not find archive file"); } try { Files.createDirectories(fileArchive.getParent()); Files.move(file.getPath(), fileArchive); LOG.debug("File '{}' processing completed, post processing action 'ARCHIVED' as", file); } catch (IOException ex) { throw new IOException(Utils.format("Could not archive '{}': {}", file, ex.toString()), ex); } } break; } } } else { setStartingCurrentFileName(currentFile); setStartingOffset(getReader().getOffset()); } }
java
{ "resource": "" }
q172371
ApiClient.setDPMBaseURL
test
public ApiClient setDPMBaseURL(String dpmBaseURL) { if(dpmBaseURL != null && authentication != null) { authentication.setDPMBaseURL(dpmBaseURL); } return this; }
java
{ "resource": "" }
q172372
ApiClient.addDefaultHeader
test
public ApiClient addDefaultHeader(String key, String value) { defaultHeaderMap.put(key, value); return this; }
java
{ "resource": "" }
q172373
ApiClient.parseDate
test
public Date parseDate(String str) { try { return dateFormat.parse(str); } catch (java.text.ParseException e) { throw new RuntimeException(e); } }
java
{ "resource": "" }
q172374
ApiClient.escapeString
test
public String escapeString(String str) { try { return URLEncoder.encode(str, "utf8").replaceAll("\\+", "%20"); } catch (UnsupportedEncodingException e) { return str; } }
java
{ "resource": "" }
q172375
ApiClient.getClient
test
private Client getClient() { if(!hostMap.containsKey(basePath)) { ClientConfig config = new ClientConfig(); config.property(ClientProperties.SUPPRESS_HTTP_COMPLIANCE_VALIDATION, true); Client client = ClientBuilder.newClient(config); client.register(new CsrfProtectionFilter("CSRF")); hostMap.put(basePath, client); } return hostMap.get(basePath); }
java
{ "resource": "" }
q172376
KineticaHelper.initConnection
test
private GPUdb initConnection(KineticaConfigBean conf) throws GPUdbException, StageException { KineticaConnectionUtils kineticaConnectionUtils = new KineticaConnectionUtils(); return kineticaConnectionUtils.getGPUdb(conf); }
java
{ "resource": "" }
q172377
KineticaHelper.getTableMetadata
test
private void getTableMetadata(GPUdb gpudb, String tableName) throws GPUdbException { KineticaTableUtils kineticaTableUtils = new KineticaTableUtils(gpudb, tableName); type = kineticaTableUtils.getType(); }
java
{ "resource": "" }
q172378
KineticaHelper.createBulkInserter
test
private BulkInserter<IndexedRecord> createBulkInserter(GPUdb gpudb, Type type, KineticaConfigBean conf) throws GPUdbException { KineticaBulkInserterUtils kineticaBulkInserterUtils = new KineticaBulkInserterUtils(gpudb, type, conf); return kineticaBulkInserterUtils.createBulkInserter(); }
java
{ "resource": "" }
q172379
EventCreator.create
test
public EventBuilder create(Stage.Context context, ToEventContext toEvent) { return new EventBuilder(context, toEvent); }
java
{ "resource": "" }
q172380
AvroSchemaGenerator.buildSchema
test
public static Schema buildSchema(Map<String, Schema> fields, Object... levels) { List<Schema.Field> recordFields = new ArrayList<>(fields.size()); for (Map.Entry<String, Schema> entry : fields.entrySet()) { recordFields.add(new Schema.Field( entry.getKey(), entry.getValue(), null, // Avro's Schema.Field constructor requires doc. entry.getValue().getJsonProp("default")) ); } Schema recordSchema; if (levels.length == 0) { recordSchema = Schema.createRecord(schemaName, null, null, false); } else { LinkedList<String> lvl = (LinkedList<String>)levels[0]; recordSchema = Schema.createRecord(joiner.join(lvl), null, null, false); } recordSchema.setFields(recordFields); return recordSchema; }
java
{ "resource": "" }
q172381
OracleCDCOperationCode.convertFromOracleToSDCCode
test
public static int convertFromOracleToSDCCode(String code){ try { int intCode = Integer.parseInt(code); switch (intCode) { case INSERT_CODE: return OperationType.INSERT_CODE; case DELETE_CODE: return OperationType.DELETE_CODE; case UPDATE_CODE: case SELECT_FOR_UPDATE_CODE: return OperationType.UPDATE_CODE; default: //DDL_CODE throw new UnsupportedOperationException(Utils.format("Operation code {} is not supported", code)); } } catch (NumberFormatException ex) { throw new NumberFormatException("Operation code must be a numeric value. " + ex.getMessage()); } }
java
{ "resource": "" }
q172382
MapreduceUtils.addJarsToJob
test
public static void addJarsToJob(Configuration conf, Class ...klasses) { // Build set of jars that needs to be added, order doesn't matter for us and we will remove duplicates Set<String> additinonalJars = new HashSet<>(); for(Class klass : klasses) { final String jar = jarForClass(klass); LOG.info("Adding jar {} for class {}", jar, klass.getCanonicalName()); additinonalJars.add(jar); } appendJars(conf, additinonalJars); }
java
{ "resource": "" }
q172383
MapreduceUtils.addJarsToJob
test
public static void addJarsToJob(Configuration conf, boolean allowMultiple, String... jarPatterns) { final ClassLoader loader = MapreduceUtils.class.getClassLoader(); if (!(loader instanceof URLClassLoader)) { throw new IllegalStateException(String.format( "ClassLoader for %s is not an instance of URLClassLoader (it is %s), and thus this method cannot be used", MapreduceUtils.class.getCanonicalName(), loader.getClass().getCanonicalName() )); } final URLClassLoader urlClassLoader = (URLClassLoader) loader; addJarsToJob(conf, allowMultiple, urlClassLoader.getURLs(), jarPatterns); }
java
{ "resource": "" }
q172384
ForceSource.checkFieldOrderByList
test
private boolean checkFieldOrderByList(SOQLParser.FieldOrderByListContext fieldOrderByList, String fieldName) { return fieldOrderByList.fieldOrderByElement(0).fieldElement().getText().equalsIgnoreCase(fieldName); }
java
{ "resource": "" }
q172385
ForceSource.checkConditionExpressions
test
private boolean checkConditionExpressions( SOQLParser.ConditionExpressionsContext conditionExpressions, String fieldName ) { for (SOQLParser.ConditionExpressionContext ce : conditionExpressions.conditionExpression()) { if ((ce.conditionExpressions() != null && checkConditionExpressions(ce.conditionExpressions(), fieldName)) || (ce.fieldExpression() != null && ce.fieldExpression().fieldElement().getText().equalsIgnoreCase(fieldName))) { return true; } } return false; }
java
{ "resource": "" }
q172386
FileContextProviderUtil.getOffsetLagForFile
test
public static long getOffsetLagForFile(String fileOffsetString) throws IOException { long offset = FileContextProviderUtil.getLongOffsetFromFileOffset(fileOffsetString); //We are refreshing the live file here because we are going to get the size by using path. LiveFile file = FileContextProviderUtil.getRefreshedLiveFileFromFileOffset(fileOffsetString); long fileSizeInBytes = Files.size(file.getPath().toAbsolutePath()); return (fileSizeInBytes - offset); }
java
{ "resource": "" }
q172387
BootstrapMain.premain
test
public static void premain(String args, Instrumentation instrumentation) { if (BootstrapMain.instrumentation == null) { BootstrapMain.instrumentation = instrumentation; } else { throw new IllegalStateException("Premain method cannot be called twice (" + BootstrapMain.instrumentation + ")"); } }
java
{ "resource": "" }
q172388
BaseKafkaSource.getParallelism
test
public int getParallelism() throws StageException { if (originParallelism == 0) { //origin parallelism is not yet calculated originParallelism = kafkaValidationUtil.getPartitionCount( conf.metadataBrokerList, conf.topic, new HashMap<String, Object>(conf.kafkaConsumerConfigs), 3, 1000 ); if(originParallelism < 1) { throw new StageException(KafkaErrors.KAFKA_42, conf.topic); } } return originParallelism; }
java
{ "resource": "" }
q172389
BigQueryDelegate.runQuery
test
public TableResult runQuery(QueryJobConfiguration queryConfig, long timeout, long pageSize) throws StageException { checkArgument(timeout >= 1000, "Timeout must be at least one second."); Instant maxTime = Instant.now().plusMillis(timeout); // Create a job ID so that we can safely retry. JobId jobId = JobId.of(UUID.randomUUID().toString()); JobInfo jobInfo = JobInfo.newBuilder(queryConfig).setJobId(jobId).build(); Job queryJob = bigquery.create(jobInfo); // Check for errors if (queryJob == null) { LOG.error("Job no longer exists: {}", jobInfo); throw new RuntimeException("Job no longer exists: "+jobInfo); } else if (queryJob.getStatus().getError() != null) { BigQueryError error = queryJob.getStatus().getError(); LOG.error("Query Job execution error: {}", error); throw new StageException(Errors.BIGQUERY_02, error); } //Should consider using .waitFor(RetryOption.totalTimeout()) while(!queryJob.isDone()) { if (Instant.now(clock).isAfter(maxTime) || !ThreadUtil.sleep(100)) { if (bigquery.cancel(queryJob.getJobId())) { LOG.info("Job {} cancelled successfully.", queryJob.getJobId()); } else { LOG.warn("Job {} not found", queryJob.getJobId()); } throw new StageException(Errors.BIGQUERY_00); } } if (queryJob.getStatus().getError() != null) { String errorMsg = queryJob.getStatus().getError().toString(); throw new StageException(Errors.BIGQUERY_02, errorMsg); } // Get the results. TableResult result = null; try { result = queryJob.getQueryResults(QueryResultsOption.pageSize(pageSize)); } catch (InterruptedException e) { String errorMsg = e.getMessage(); throw new StageException(Errors.BIGQUERY_02, errorMsg); } return result; }
java
{ "resource": "" }
q172390
BigQueryDelegate.fieldsToMap
test
public LinkedHashMap<String, Field> fieldsToMap( // NOSONAR List<com.google.cloud.bigquery.Field> schema, List<FieldValue> values ) { checkState( schema.size() == values.size(), "Schema '{}' and Values '{}' sizes do not match.", schema.size(), values.size() ); LinkedHashMap<String, Field> root = new LinkedHashMap<>(); for (int i = 0; i < values.size(); i++) { FieldValue value = values.get(i); com.google.cloud.bigquery.Field field = schema.get(i); if (value.getAttribute().equals(FieldValue.Attribute.PRIMITIVE)) { root.put(field.getName(), fromPrimitiveField(field, value)); } else if (value.getAttribute().equals(FieldValue.Attribute.RECORD)) { root.put( field.getName(), Field.create(fieldsToMap(field.getSubFields(), value.getRecordValue())) ); } else if (value.getAttribute().equals(FieldValue.Attribute.REPEATED)) { root.put(field.getName(), Field.create(fromRepeatedField(field, value.getRepeatedValue()))); } } return root; }
java
{ "resource": "" }
q172391
KineticaTableUtils.getTableDescription
test
private List<String> getTableDescription() throws GPUdbException { List<List<String>> descriptions = showTableResponse.getTableDescriptions(); if (descriptions == null || descriptions.size() != 1) { throw new GPUdbException("Error getting description for table " + tableName); } return descriptions.get(0); }
java
{ "resource": "" }
q172392
KineticaTableUtils.validateTableAcceptsInserts
test
private void validateTableAcceptsInserts() throws GPUdbException { for (String s : tableDescription) { if (s.equalsIgnoreCase("COLLECTION")) { throw new GPUdbException("Error: table " + tableName + " is a Collection"); } else if (s.equalsIgnoreCase("VIEW")) { throw new GPUdbException("Error: table " + tableName + " is a View"); } else if (s.equalsIgnoreCase("JOIN")) { throw new GPUdbException("Error: table " + tableName + " is a Join Table"); } else if (s.equalsIgnoreCase("RESULT_TABLE")) { throw new GPUdbException("Error: table " + tableName + " is a Result Table"); } } }
java
{ "resource": "" }
q172393
KineticaTableUtils.getColumnType
test
private Class<?> getColumnType(JSONObject field) throws GPUdbException { Class<?> columnType = null; // The Avro "type" element might be an array if the type is nullable if (field.get("type") instanceof JSONArray) { JSONArray columnTypes = field.getJSONArray("type"); for (int j = 0; j < columnTypes.length(); j++) { String ct = (String) columnTypes.get(j); if (!ct.equals("null")) { columnType = getClassForType(ct); break; } } } else { columnType = getClassForType(field.getString("type")); } if (columnType == null) { throw new GPUdbException("Error getting column type for field: " + field.toString()); } return columnType; }
java
{ "resource": "" }
q172394
KineticaTableUtils.typeIsNullable
test
private boolean typeIsNullable(JSONObject field) throws GPUdbException { if (field.get("type") instanceof JSONArray) { JSONArray columnTypes = field.getJSONArray("type"); for (int j = 0; j < columnTypes.length(); j++) { String ct = (String) columnTypes.get(j); if (ct.equals("null")) { return true; } } } return false; }
java
{ "resource": "" }
q172395
KineticaTableUtils.getTableSchema
test
private JSONObject getTableSchema(String tableName, ShowTableResponse showTableResponse) throws GPUdbException { List<String> schemas = showTableResponse.getTypeSchemas(); if (schemas == null || schemas.size() != 1) { throw new GPUdbException("Error getting schema for table " + tableName); } return new JSONObject(schemas.get(0)); }
java
{ "resource": "" }
q172396
KineticaTableUtils.getColumnProperties
test
private Map<String, List<String>> getColumnProperties(String tableName, ShowTableResponse showTableResponse) throws GPUdbException { List<Map<String, List<String>>> columnPropertiesList = showTableResponse.getProperties(); if (columnPropertiesList == null || columnPropertiesList.size() != 1) { throw new GPUdbException("Error getting properties for table " + tableName); } return columnPropertiesList.get(0); }
java
{ "resource": "" }
q172397
KineticaTableUtils.getClassForType
test
private Class<?> getClassForType(String typeName) throws GPUdbException { typeName = typeName.replace(" ", ""); if (typeName.equalsIgnoreCase(STRING_TYPE_NAME)) { return String.class; } else if (typeName.equalsIgnoreCase(LONG_TYPE_NAME)) { return Long.class; } else if (typeName.equalsIgnoreCase(INTEGER_TYPE_NAME)) { return Integer.class; } else if (typeName.equalsIgnoreCase(FLOAT_TYPE_NAME)) { return Float.class; } else if (typeName.equalsIgnoreCase(DOUBLE_TYPE_NAME)) { return Double.class; } else if (typeName.equalsIgnoreCase(BYTES_TYPE_NAME)) { return ByteBuffer.class; } else { throw new GPUdbException("Error: unknown type '" + typeName + "' in table schema"); } }
java
{ "resource": "" }
q172398
AerospikeBeanConfig.init
test
public void init(Target.Context context, List<Target.ConfigIssue> issues) { List<Host> hosts = getAerospikeHosts(issues, connectionString, Groups.AEROSPIKE.getLabel(), "aerospikeBeanConfig.connectionString", context); ClientPolicy cp = new ClientPolicy(); try { client = new AerospikeClient(cp, hosts.toArray(new Host[hosts.size()])); int retries = 0; while (!client.isConnected() && retries <= maxRetries) { if (retries > maxRetries) { issues.add(context.createConfigIssue(Groups.AEROSPIKE.getLabel(), "aerospikeBeanConfig.connectionString", AerospikeErrors.AEROSPIKE_03, connectionString)); return; } retries++; try { Thread.sleep(100); } catch (InterruptedException ignored) { } } } catch (AerospikeException ex) { issues.add(context.createConfigIssue(Groups.AEROSPIKE.getLabel(), "aerospikeBeanConfig.connectionString", AerospikeErrors.AEROSPIKE_03, connectionString)); } }
java
{ "resource": "" }
q172399
MqttClientTarget.getTopic
test
String getTopic(Record record) throws StageException { String result = publisherConf.topic; if (publisherConf.runtimeTopicResolution) { RecordEL.setRecordInContext(topicVars, record); try { result = topicEval.eval(topicVars, publisherConf.topicExpression, String.class); if (isEmpty(result)) { throw new StageException(Errors.MQTT_08, publisherConf.topicExpression, record.getHeader().getSourceId()); } if (!allowedTopics.contains(result) && !allowAllTopics) { throw new StageException(Errors.MQTT_09, result, record.getHeader().getSourceId()); } } catch (ELEvalException e) { throw new StageException( Errors.MQTT_10, publisherConf.topicExpression, record.getHeader().getSourceId(), e.toString() ); } } return result; }
java
{ "resource": "" }