proj_name
stringclasses
131 values
relative_path
stringlengths
30
228
class_name
stringlengths
1
68
func_name
stringlengths
1
48
masked_class
stringlengths
78
9.82k
func_body
stringlengths
46
9.61k
len_input
int64
29
2.01k
len_output
int64
14
1.94k
total
int64
55
2.05k
relevant_context
stringlengths
0
38.4k
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/JobStepBuilder.java
JobStepBuilder
build
class JobStepBuilder extends StepBuilderHelper<JobStepBuilder> { private Job job; private JobLauncher jobLauncher; private JobParametersExtractor jobParametersExtractor; /** * Create a new builder initialized with any properties in the parent. The parent is * copied, so it can be re-used. * @param parent a parent helper containing common step properties */ public JobStepBuilder(StepBuilderHelper<?> parent) { super(parent); } /** * Provide a job to execute during the step. * @param job the job to execute * @return this for fluent chaining */ public JobStepBuilder job(Job job) { this.job = job; return this; } /** * Add a job launcher. Defaults to a simple job launcher. * @param jobLauncher the job launcher to use * @return this for fluent chaining */ public JobStepBuilder launcher(JobLauncher jobLauncher) { this.jobLauncher = jobLauncher; return this; } /** * Provide a job parameters extractor. Useful for extracting job parameters from the * parent step execution context or job parameters. * @param jobParametersExtractor the job parameters extractor to use * @return this for fluent chaining */ public JobStepBuilder parametersExtractor(JobParametersExtractor jobParametersExtractor) { this.jobParametersExtractor = jobParametersExtractor; return this; } /** * Build a step from the job provided. * @return a new job step */ public Step build() {<FILL_FUNCTION_BODY>} @Override protected JobStepBuilder self() { return this; } }
JobStep step = new JobStep(); step.setName(getName()); super.enhance(step); if (job != null) { step.setJob(job); } if (jobParametersExtractor != null) { step.setJobParametersExtractor(jobParametersExtractor); } if (jobLauncher == null) { TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher(); jobLauncher.setJobRepository(getJobRepository()); try { jobLauncher.afterPropertiesSet(); } catch (Exception e) { throw new StepBuilderException(e); } this.jobLauncher = jobLauncher; } step.setJobLauncher(jobLauncher); try { step.afterPropertiesSet(); } catch (Exception e) { throw new StepBuilderException(e); } return step;
434
253
687
<methods>public void <init>(java.lang.String) ,public void <init>(java.lang.String, org.springframework.batch.core.repository.JobRepository) ,public org.springframework.batch.core.step.builder.JobStepBuilder allowStartIfComplete(boolean) ,public org.springframework.batch.core.step.builder.JobStepBuilder listener(java.lang.Object) ,public org.springframework.batch.core.step.builder.JobStepBuilder listener(org.springframework.batch.core.StepExecutionListener) ,public org.springframework.batch.core.step.builder.JobStepBuilder meterRegistry(io.micrometer.core.instrument.MeterRegistry) ,public org.springframework.batch.core.step.builder.JobStepBuilder observationConvention(org.springframework.batch.core.observability.BatchStepObservationConvention) ,public org.springframework.batch.core.step.builder.JobStepBuilder observationRegistry(io.micrometer.observation.ObservationRegistry) ,public org.springframework.batch.core.step.builder.JobStepBuilder repository(org.springframework.batch.core.repository.JobRepository) ,public org.springframework.batch.core.step.builder.JobStepBuilder startLimit(int) <variables>protected final org.apache.commons.logging.Log logger,protected final non-sealed org.springframework.batch.core.step.builder.StepBuilderHelper.CommonStepProperties properties
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/PartitionStepBuilder.java
PartitionStepBuilder
build
class PartitionStepBuilder extends StepBuilderHelper<PartitionStepBuilder> { private TaskExecutor taskExecutor; private Partitioner partitioner; private static final int DEFAULT_GRID_SIZE = 6; private Step step; private PartitionHandler partitionHandler; private int gridSize = DEFAULT_GRID_SIZE; private StepExecutionSplitter splitter; private StepExecutionAggregator aggregator; private String stepName; /** * Create a new builder initialized with any properties in the parent. The parent is * copied, so it can be re-used. * @param parent a parent helper containing common step properties */ public PartitionStepBuilder(StepBuilderHelper<?> parent) { super(parent); } /** * Add a partitioner which can be used to create a {@link StepExecutionSplitter}. Use * either this or an explicit {@link #splitter(StepExecutionSplitter)} but not both. * @param workerStepName the name of the worker step (used to construct step execution * names) * @param partitioner a partitioner to use * @return this for fluent chaining */ public PartitionStepBuilder partitioner(String workerStepName, Partitioner partitioner) { this.stepName = workerStepName; this.partitioner = partitioner; return this; } /** * Provide an actual step instance to execute in parallel. If an explicit * {@link #partitionHandler(PartitionHandler)} is provided, the step is optional and * is only used to extract configuration data (name and other basic properties of a * step). * @param step a step to execute in parallel * @return this for fluent chaining */ public PartitionStepBuilder step(Step step) { this.step = step; return this; } /** * Provide a task executor to use when constructing a {@link PartitionHandler} from * the {@link #step(Step)}. Mainly used for running a step locally in parallel, but * can be used to execute remotely if the step is remote. Not used if an explicit * {@link #partitionHandler(PartitionHandler)} is provided. * @param taskExecutor a task executor to use when executing steps in parallel * @return this for fluent chaining */ public PartitionStepBuilder taskExecutor(TaskExecutor taskExecutor) { this.taskExecutor = taskExecutor; return this; } /** * Provide an explicit partition handler that will carry out the work of the partition * step. The partition handler is the main SPI for adapting a partition step to a * specific distributed computation environment. Optional if you only need local or * remote processing through the Step interface. * * @see #step(Step) for setting up a default handler that works with a local or remote * Step * @param partitionHandler a partition handler * @return this for fluent chaining */ public PartitionStepBuilder partitionHandler(PartitionHandler partitionHandler) { this.partitionHandler = partitionHandler; return this; } /** * A hint to the {@link #splitter(StepExecutionSplitter)} about how many step * executions are required. If running locally or remotely through a * {@link #taskExecutor(TaskExecutor)} determines precisely the number of step * executions in the first attempt at a partition step execution. * @param gridSize the grid size * @return this for fluent chaining */ public PartitionStepBuilder gridSize(int gridSize) { this.gridSize = gridSize; return this; } /** * Provide an explicit {@link StepExecutionSplitter} instead of having one build from * the {@link #partitioner(String, Partitioner)}. Useful if you need more control over * the splitting. * @param splitter a step execution splitter * @return this for fluent chaining */ public PartitionStepBuilder splitter(StepExecutionSplitter splitter) { this.splitter = splitter; return this; } /** * Provide a step execution aggregator for aggregating partitioned step executions * into a single result for the {@link PartitionStep} itself. Default is a simple * implementation that works in most cases. * @param aggregator a step execution aggregator * @return this for fluent chaining */ public PartitionStepBuilder aggregator(StepExecutionAggregator aggregator) { this.aggregator = aggregator; return this; } public Step build() {<FILL_FUNCTION_BODY>} @Override protected PartitionStepBuilder self() { return this; } protected TaskExecutor getTaskExecutor() { return taskExecutor; } protected Partitioner getPartitioner() { return partitioner; } protected Step getStep() { return step; } protected PartitionHandler getPartitionHandler() { return partitionHandler; } protected int getGridSize() { return gridSize; } protected StepExecutionSplitter getSplitter() { return splitter; } protected StepExecutionAggregator getAggregator() { return aggregator; } protected String getStepName() { return stepName; } }
PartitionStep step = new PartitionStep(); step.setName(getName()); super.enhance(step); if (partitionHandler != null) { step.setPartitionHandler(partitionHandler); } else { TaskExecutorPartitionHandler partitionHandler = new TaskExecutorPartitionHandler(); partitionHandler.setStep(this.step); if (taskExecutor == null) { taskExecutor = new SyncTaskExecutor(); } partitionHandler.setGridSize(gridSize); partitionHandler.setTaskExecutor(taskExecutor); step.setPartitionHandler(partitionHandler); } if (splitter != null) { step.setStepExecutionSplitter(splitter); } else { boolean allowStartIfComplete = isAllowStartIfComplete(); String name = stepName; if (this.step != null) { try { allowStartIfComplete = this.step.isAllowStartIfComplete(); name = this.step.getName(); } catch (Exception e) { if (logger.isInfoEnabled()) { logger.info("Ignored exception from step asking for name and allowStartIfComplete flag. " + "Using default from enclosing PartitionStep (" + name + "," + allowStartIfComplete + ")."); } } } SimpleStepExecutionSplitter splitter = new SimpleStepExecutionSplitter(); splitter.setPartitioner(partitioner); splitter.setJobRepository(getJobRepository()); splitter.setAllowStartIfComplete(allowStartIfComplete); splitter.setStepName(name); this.splitter = splitter; step.setStepExecutionSplitter(splitter); } if (aggregator != null) { step.setStepExecutionAggregator(aggregator); } try { step.afterPropertiesSet(); } catch (Exception e) { throw new StepBuilderException(e); } return step;
1,294
531
1,825
<methods>public void <init>(java.lang.String) ,public void <init>(java.lang.String, org.springframework.batch.core.repository.JobRepository) ,public org.springframework.batch.core.step.builder.PartitionStepBuilder allowStartIfComplete(boolean) ,public org.springframework.batch.core.step.builder.PartitionStepBuilder listener(java.lang.Object) ,public org.springframework.batch.core.step.builder.PartitionStepBuilder listener(org.springframework.batch.core.StepExecutionListener) ,public org.springframework.batch.core.step.builder.PartitionStepBuilder meterRegistry(io.micrometer.core.instrument.MeterRegistry) ,public org.springframework.batch.core.step.builder.PartitionStepBuilder observationConvention(org.springframework.batch.core.observability.BatchStepObservationConvention) ,public org.springframework.batch.core.step.builder.PartitionStepBuilder observationRegistry(io.micrometer.observation.ObservationRegistry) ,public org.springframework.batch.core.step.builder.PartitionStepBuilder repository(org.springframework.batch.core.repository.JobRepository) ,public org.springframework.batch.core.step.builder.PartitionStepBuilder startLimit(int) <variables>protected final org.apache.commons.logging.Log logger,protected final non-sealed org.springframework.batch.core.step.builder.StepBuilderHelper.CommonStepProperties properties
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilderHelper.java
StepBuilderHelper
enhance
class StepBuilderHelper<B extends StepBuilderHelper<B>> { protected final Log logger = LogFactory.getLog(getClass()); protected final CommonStepProperties properties; /** * Create a new {@link StepBuilderHelper}. * @param name the step name * @deprecated use {@link StepBuilderHelper#StepBuilderHelper(String, JobRepository)} */ @Deprecated(since = "5.1", forRemoval = true) public StepBuilderHelper(String name) { this.properties = new CommonStepProperties(); properties.name = name; } /** * Create a new {@link StepBuilderHelper}. * @param name the step name * @param jobRepository the job repository * @since 5.1 */ public StepBuilderHelper(String name, JobRepository jobRepository) { this.properties = new CommonStepProperties(); properties.name = name; properties.jobRepository = jobRepository; } /** * Create a new builder initialized with any properties in the parent. The parent is * copied, so it can be re-used. * @param parent a parent helper containing common step properties */ protected StepBuilderHelper(StepBuilderHelper<?> parent) { this.properties = new CommonStepProperties(parent.properties); } /** * Set the job repository * @param jobRepository the repository to set * @return this to enable fluent chaining * @deprecated use {@link StepBuilderHelper#StepBuilderHelper(String, JobRepository)} */ @Deprecated(since = "5.1", forRemoval = true) public B repository(JobRepository jobRepository) { properties.jobRepository = jobRepository; return self(); } /** * Sets the step observation convention. * @param observationConvention the step observation convention (optional) * @return this to enable fluent chaining * @since 5.1 */ public B observationConvention(BatchStepObservationConvention observationConvention) { properties.observationConvention = observationConvention; return self(); } public B observationRegistry(ObservationRegistry observationRegistry) { properties.observationRegistry = observationRegistry; return self(); } public B meterRegistry(MeterRegistry meterRegistry) { properties.meterRegistry = meterRegistry; return self(); } public B startLimit(int startLimit) { properties.startLimit = startLimit; return self(); } /** * Registers objects using the annotation based listener configuration. * @param listener the object that has a method configured with listener annotation * @return this for fluent chaining */ public B listener(Object listener) { Set<Method> stepExecutionListenerMethods = new HashSet<>(); stepExecutionListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), BeforeStep.class)); stepExecutionListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterStep.class)); if (stepExecutionListenerMethods.size() > 0) { StepListenerFactoryBean factory = new StepListenerFactoryBean(); factory.setDelegate(listener); properties.addStepExecutionListener((StepExecutionListener) factory.getObject()); } return self(); } public B listener(StepExecutionListener listener) { properties.addStepExecutionListener(listener); return self(); } public B allowStartIfComplete(boolean allowStartIfComplete) { properties.allowStartIfComplete = allowStartIfComplete; return self(); } protected abstract B self(); protected String getName() { return properties.name; } protected JobRepository getJobRepository() { return properties.jobRepository; } protected boolean isAllowStartIfComplete() { return properties.allowStartIfComplete != null ? properties.allowStartIfComplete : false; } protected void enhance(AbstractStep step) {<FILL_FUNCTION_BODY>} public static class CommonStepProperties { private List<StepExecutionListener> stepExecutionListeners = new ArrayList<>(); private int startLimit = Integer.MAX_VALUE; private Boolean allowStartIfComplete; private JobRepository jobRepository; private BatchStepObservationConvention observationConvention = new DefaultBatchStepObservationConvention(); private ObservationRegistry observationRegistry = ObservationRegistry.NOOP; private MeterRegistry meterRegistry = Metrics.globalRegistry; public CommonStepProperties() { } public CommonStepProperties(CommonStepProperties properties) { this.name = properties.name; this.startLimit = properties.startLimit; this.allowStartIfComplete = properties.allowStartIfComplete; this.jobRepository = properties.jobRepository; this.observationConvention = properties.observationConvention; this.observationRegistry = properties.observationRegistry; this.meterRegistry = properties.meterRegistry; this.stepExecutionListeners = new ArrayList<>(properties.stepExecutionListeners); } public JobRepository getJobRepository() { return jobRepository; } public void setJobRepository(JobRepository jobRepository) { this.jobRepository = jobRepository; } public BatchStepObservationConvention getObservationConvention() { return observationConvention; } public void setObservationConvention(BatchStepObservationConvention observationConvention) { this.observationConvention = observationConvention; } public ObservationRegistry getObservationRegistry() { return observationRegistry; } public void setObservationRegistry(ObservationRegistry observationRegistry) { this.observationRegistry = observationRegistry; } public MeterRegistry getMeterRegistry() { return meterRegistry; } public void setMeterRegistry(MeterRegistry meterRegistry) { this.meterRegistry = meterRegistry; } public String getName() { return name; } public void setName(String name) { this.name = name; } public List<StepExecutionListener> getStepExecutionListeners() { return stepExecutionListeners; } public void addStepExecutionListeners(List<StepExecutionListener> stepExecutionListeners) { this.stepExecutionListeners.addAll(stepExecutionListeners); } public void addStepExecutionListener(StepExecutionListener stepExecutionListener) { this.stepExecutionListeners.add(stepExecutionListener); } public Integer getStartLimit() { return startLimit; } public void setStartLimit(Integer startLimit) { this.startLimit = startLimit; } public Boolean getAllowStartIfComplete() { return allowStartIfComplete; } public void setAllowStartIfComplete(Boolean allowStartIfComplete) { this.allowStartIfComplete = allowStartIfComplete; } private String name; } }
step.setJobRepository(properties.getJobRepository()); BatchStepObservationConvention observationConvention = properties.getObservationConvention(); if (observationConvention != null) { step.setObservationConvention(observationConvention); } ObservationRegistry observationRegistry = properties.getObservationRegistry(); if (observationRegistry != null) { step.setObservationRegistry(observationRegistry); } MeterRegistry meterRegistry = properties.getMeterRegistry(); if (meterRegistry != null) { step.setMeterRegistry(meterRegistry); } Boolean allowStartIfComplete = properties.allowStartIfComplete; if (allowStartIfComplete != null) { step.setAllowStartIfComplete(allowStartIfComplete); } step.setStartLimit(properties.startLimit); List<StepExecutionListener> listeners = properties.stepExecutionListeners; if (!listeners.isEmpty()) { step.setStepExecutionListeners(listeners.toArray(new StepExecutionListener[0])); }
1,730
283
2,013
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/BatchListenerFactoryHelper.java
BatchListenerFactoryHelper
getListeners
class BatchListenerFactoryHelper { public static <T> List<T> getListeners(StepListener[] listeners, Class<? super T> cls) {<FILL_FUNCTION_BODY>} }
List<T> list = new ArrayList<>(); for (StepListener stepListener : listeners) { if (cls.isAssignableFrom(stepListener.getClass())) { @SuppressWarnings("unchecked") T listener = (T) stepListener; list.add(listener); } } return list;
53
92
145
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/BatchRetryTemplate.java
InnerRetryTemplate
handleRetryExhausted
class InnerRetryTemplate extends RetryTemplate { @Override protected boolean canRetry(RetryPolicy retryPolicy, RetryContext context) { BatchRetryContext batchContext = (BatchRetryContext) context; for (RetryContext nextContext : batchContext.contexts) { if (!super.canRetry(retryPolicy, nextContext)) { return false; } } return true; } @Override protected RetryContext open(RetryPolicy retryPolicy, RetryState state) { BatchRetryState batchState = (BatchRetryState) state; Collection<RetryContext> contexts = new ArrayList<>(); for (RetryState retryState : batchState.keys) { contexts.add(super.open(retryPolicy, retryState)); } return new BatchRetryContext(RetrySynchronizationManager.getContext(), contexts); } @Override protected void registerThrowable(RetryPolicy retryPolicy, RetryState state, RetryContext context, Throwable e) { BatchRetryState batchState = (BatchRetryState) state; BatchRetryContext batchContext = (BatchRetryContext) context; Iterator<RetryContext> contextIterator = batchContext.contexts.iterator(); for (RetryState retryState : batchState.keys) { RetryContext nextContext = contextIterator.next(); super.registerThrowable(retryPolicy, retryState, nextContext, e); } } @Override protected void close(RetryPolicy retryPolicy, RetryContext context, RetryState state, boolean succeeded) { BatchRetryState batchState = (BatchRetryState) state; BatchRetryContext batchContext = (BatchRetryContext) context; Iterator<RetryContext> contextIterator = batchContext.contexts.iterator(); for (RetryState retryState : batchState.keys) { RetryContext nextContext = contextIterator.next(); super.close(retryPolicy, nextContext, retryState, succeeded); } } @Override protected <T> T handleRetryExhausted(RecoveryCallback<T> recoveryCallback, RetryContext context, RetryState state) throws Throwable {<FILL_FUNCTION_BODY>} }
BatchRetryState batchState = (BatchRetryState) state; BatchRetryContext batchContext = (BatchRetryContext) context; // Accumulate exceptions to be thrown so all the keys get a crack Throwable rethrowable = null; ExhaustedRetryException exhausted = null; Iterator<RetryContext> contextIterator = batchContext.contexts.iterator(); for (RetryState retryState : batchState.keys) { RetryContext nextContext = contextIterator.next(); try { super.handleRetryExhausted(null, nextContext, retryState); } catch (ExhaustedRetryException e) { exhausted = e; } catch (Throwable e) { rethrowable = e; } } if (recoveryCallback != null) { return recoveryCallback.recover(context); } if (exhausted != null) { throw exhausted; } throw rethrowable;
610
276
886
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkMonitor.java
ChunkMonitorData
getData
class ChunkMonitorData { public int offset; public int chunkSize; public ChunkMonitorData(int offset, int chunkSize) { this.offset = offset; this.chunkSize = chunkSize; } } private static final String OFFSET = "OFFSET"; private final CompositeItemStream stream = new CompositeItemStream(); private final ThreadLocal<ChunkMonitorData> holder = new ThreadLocal<>(); private ItemReader<?> reader; public ChunkMonitor() { this.setExecutionContextName(ChunkMonitor.class.getName()); } /** * @param stream the stream to set */ public void registerItemStream(ItemStream stream) { streamsRegistered = true; this.stream.register(stream); } /** * @param reader the reader to set */ public void setItemReader(ItemReader<?> reader) { this.reader = reader; } public void incrementOffset() { ChunkMonitorData data = getData(); data.offset++; if (data.offset >= data.chunkSize) { resetOffset(); } } public int getOffset() { return getData().offset; } public void resetOffset() { getData().offset = 0; } public void setChunkSize(int chunkSize) { getData().chunkSize = chunkSize; resetOffset(); } @Override public void close() throws ItemStreamException { super.close(); holder.set(null); if (streamsRegistered) { stream.close(); } } @Override public void open(ExecutionContext executionContext) throws ItemStreamException { super.open(executionContext); if (streamsRegistered) { stream.open(executionContext); ChunkMonitorData data = new ChunkMonitorData(executionContext.getInt(getExecutionContextKey(OFFSET), 0), 0); holder.set(data); if (reader == null) { logger.warn("No ItemReader set (must be concurrent step), so ignoring offset data."); return; } for (int i = 0; i < data.offset; i++) { try { reader.read(); } catch (Exception e) { throw new ItemStreamException("Could not position reader with offset: " + data.offset, e); } } resetOffset(); } } @Override public void update(ExecutionContext executionContext) throws ItemStreamException { super.update(executionContext); if (streamsRegistered) { ChunkMonitorData data = getData(); if (data.offset == 0) { // Only call the underlying update method if we are on a chunk // boundary stream.update(executionContext); executionContext.remove(getExecutionContextKey(OFFSET)); } else { executionContext.putInt(getExecutionContextKey(OFFSET), data.offset); } } } private ChunkMonitorData getData() {<FILL_FUNCTION_BODY>
ChunkMonitorData data = holder.get(); if (data == null) { if (streamsRegistered) { logger.warn("ItemStream was opened in a different thread. Restart data could be compromised."); } data = new ChunkMonitorData(0, 0); holder.set(data); } return data;
802
96
898
<methods>public non-sealed void <init>() ,public void close() ,public java.lang.String getExecutionContextKey(java.lang.String) ,public java.lang.String getName() ,public void open(org.springframework.batch.item.ExecutionContext) ,public void setName(java.lang.String) ,public void update(org.springframework.batch.item.ExecutionContext) <variables>private final org.springframework.batch.item.util.ExecutionContextUserSupport executionContextUserSupport
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkOrientedTasklet.java
ChunkOrientedTasklet
execute
class ChunkOrientedTasklet<I> implements Tasklet { private static final String INPUTS_KEY = "INPUTS"; private final ChunkProcessor<I> chunkProcessor; private final ChunkProvider<I> chunkProvider; private boolean buffering = true; private static final Log logger = LogFactory.getLog(ChunkOrientedTasklet.class); public ChunkOrientedTasklet(ChunkProvider<I> chunkProvider, ChunkProcessor<I> chunkProcessor) { this.chunkProvider = chunkProvider; this.chunkProcessor = chunkProcessor; } /** * Flag to indicate that items should be buffered once read. Defaults to true, which * is appropriate for forward-only, non-transactional item readers. Main (or only) use * case for setting this flag to false is a transactional JMS item reader. * @param buffering indicator */ public void setBuffering(boolean buffering) { this.buffering = buffering; } @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {<FILL_FUNCTION_BODY>} }
@SuppressWarnings("unchecked") Chunk<I> inputs = (Chunk<I>) chunkContext.getAttribute(INPUTS_KEY); if (inputs == null) { inputs = chunkProvider.provide(contribution); if (buffering) { chunkContext.setAttribute(INPUTS_KEY, inputs); } } chunkProcessor.process(contribution, inputs); chunkProvider.postProcess(contribution, inputs); // Allow a message coming back from the processor to say that we // are not done yet if (inputs.isBusy()) { logger.debug("Inputs still busy"); return RepeatStatus.CONTINUABLE; } chunkContext.removeAttribute(INPUTS_KEY); chunkContext.setComplete(); if (logger.isDebugEnabled()) { logger.debug("Inputs not busy, ended: " + inputs.isEnd()); } return RepeatStatus.continueIf(!inputs.isEnd());
291
274
565
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/DefaultItemFailureHandler.java
DefaultItemFailureHandler
onWriteError
class DefaultItemFailureHandler extends ItemListenerSupport<Object, Object> { protected static final Log logger = LogFactory.getLog(DefaultItemFailureHandler.class); @Override public void onReadError(Exception ex) { try { logger.error("Error encountered while reading", ex); } catch (Exception exception) { logger.error("Invalid type for logging: [" + exception + "]"); } } @Override public void onWriteError(Exception ex, Chunk<?> item) {<FILL_FUNCTION_BODY>} }
try { logger.error("Error encountered while writing item: [ " + item + "]", ex); } catch (Exception exception) { logger.error("Invalid type for logging: [" + exception + "]"); }
145
65
210
<methods>public non-sealed void <init>() <variables>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/FaultTolerantChunkProvider.java
FaultTolerantChunkProvider
shouldSkip
class FaultTolerantChunkProvider<I> extends SimpleChunkProvider<I> { /** * Hard limit for number of read skips in the same chunk. Should be sufficiently high * that it is only encountered in a runaway step where all items are skipped before * the chunk can complete (leading to a potential heap memory problem). */ public static final int DEFAULT_MAX_SKIPS_ON_READ = 100; private SkipPolicy skipPolicy = new LimitCheckingItemSkipPolicy(); private Classifier<Throwable, Boolean> rollbackClassifier = new BinaryExceptionClassifier(true); private int maxSkipsOnRead = DEFAULT_MAX_SKIPS_ON_READ; public FaultTolerantChunkProvider(ItemReader<? extends I> itemReader, RepeatOperations repeatOperations) { super(itemReader, repeatOperations); } /** * @param maxSkipsOnRead the maximum number of skips on read */ public void setMaxSkipsOnRead(int maxSkipsOnRead) { this.maxSkipsOnRead = maxSkipsOnRead; } /** * The policy that determines whether exceptions can be skipped on read. * @param skipPolicy instance of {@link SkipPolicy} to be used by * FaultTolerantChunkProvider. */ public void setSkipPolicy(SkipPolicy skipPolicy) { this.skipPolicy = skipPolicy; } /** * Classifier to determine whether exceptions have been marked as no-rollback (as * opposed to skippable). If encountered they are simply ignored, unless also * skippable. * @param rollbackClassifier the rollback classifier to set */ public void setRollbackClassifier(Classifier<Throwable, Boolean> rollbackClassifier) { this.rollbackClassifier = rollbackClassifier; } @Override protected I read(StepContribution contribution, Chunk<I> chunk) throws Exception { while (true) { try { return doRead(); } catch (Exception e) { if (shouldSkip(skipPolicy, e, contribution.getStepSkipCount())) { // increment skip count and try again contribution.incrementReadSkipCount(); chunk.skip(e); if (chunk.getErrors().size() >= maxSkipsOnRead) { throw new SkipOverflowException("Too many skips on read"); } logger.debug("Skipping failed input", e); } else { if (rollbackClassifier.classify(e)) { throw new NonSkippableReadException("Non-skippable exception during read", e); } logger.debug("No-rollback for non-skippable exception (ignored)", e); } } } } @Override public void postProcess(StepContribution contribution, Chunk<I> chunk) { for (Exception e : chunk.getErrors()) { try { getListener().onSkipInRead(e); } catch (RuntimeException ex) { throw new SkipListenerFailedException("Fatal exception in SkipListener.", ex, e); } } } /** * Convenience method for calling process skip policy. * @param policy the skip policy * @param e the cause of the skip * @param skipCount the current skip count */ private boolean shouldSkip(SkipPolicy policy, Throwable e, long skipCount) {<FILL_FUNCTION_BODY>} }
try { return policy.shouldSkip(e, skipCount); } catch (SkipException ex) { throw ex; } catch (RuntimeException ex) { throw new SkipPolicyFailedException("Fatal exception in SkipPolicy.", ex, e); }
891
79
970
<methods>public void <init>(ItemReader<? extends I>, org.springframework.batch.repeat.RepeatOperations) ,public void postProcess(org.springframework.batch.core.StepContribution, Chunk<I>) ,public Chunk<I> provide(org.springframework.batch.core.StepContribution) throws java.lang.Exception,public void registerListener(org.springframework.batch.core.StepListener) ,public void setListeners(List<? extends org.springframework.batch.core.StepListener>) ,public void setMeterRegistry(io.micrometer.core.instrument.MeterRegistry) <variables>protected final non-sealed ItemReader<? extends I> itemReader,private final MulticasterBatchListener<I,?> listener,protected final org.apache.commons.logging.Log logger,private io.micrometer.core.instrument.MeterRegistry meterRegistry,private final non-sealed org.springframework.batch.repeat.RepeatOperations repeatOperations
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleChunkProvider.java
SimpleChunkProvider
provide
class SimpleChunkProvider<I> implements ChunkProvider<I> { protected final Log logger = LogFactory.getLog(getClass()); protected final ItemReader<? extends I> itemReader; private final MulticasterBatchListener<I, ?> listener = new MulticasterBatchListener<>(); private final RepeatOperations repeatOperations; private MeterRegistry meterRegistry = Metrics.globalRegistry; public SimpleChunkProvider(ItemReader<? extends I> itemReader, RepeatOperations repeatOperations) { this.itemReader = itemReader; this.repeatOperations = repeatOperations; } /** * Register some {@link StepListener}s with the handler. Each will get the callbacks * in the order specified at the correct stage. * @param listeners list of {@link StepListener}s. */ public void setListeners(List<? extends StepListener> listeners) { for (StepListener listener : listeners) { registerListener(listener); } } /** * Set the meter registry to use for metrics. * @param meterRegistry the meter registry * @since 5.0 */ public void setMeterRegistry(MeterRegistry meterRegistry) { this.meterRegistry = meterRegistry; } /** * Register a listener for callbacks at the appropriate stages in a process. * @param listener a {@link StepListener} */ public void registerListener(StepListener listener) { this.listener.register(listener); } /** * @return the listener */ protected MulticasterBatchListener<I, ?> getListener() { return listener; } /** * Surrounds the read call with listener callbacks. * @return the item or {@code null} if the data source is exhausted * @throws Exception is thrown if error occurs during read. */ @Nullable protected final I doRead() throws Exception { try { listener.beforeRead(); I item = itemReader.read(); if (item != null) { listener.afterRead(item); } return item; } catch (Exception e) { if (logger.isDebugEnabled()) { logger.debug(e.getMessage() + " : " + e.getClass().getName()); } listener.onReadError(e); throw e; } } @Override public Chunk<I> provide(final StepContribution contribution) throws Exception {<FILL_FUNCTION_BODY>} private void stopTimer(Timer.Sample sample, StepExecution stepExecution, String status) { String fullyQualifiedMetricName = BatchMetrics.METRICS_PREFIX + "item.read"; sample.stop(BatchMetrics.createTimer(this.meterRegistry, "item.read", "Item reading duration", Tag.of(fullyQualifiedMetricName + ".job.name", stepExecution.getJobExecution().getJobInstance().getJobName()), Tag.of(fullyQualifiedMetricName + ".step.name", stepExecution.getStepName()), Tag.of(fullyQualifiedMetricName + ".status", status))); } @Override public void postProcess(StepContribution contribution, Chunk<I> chunk) { // do nothing } /** * Delegates to {@link #doRead()}. Subclasses can add additional behaviour (e.g. * exception handling). * @param contribution the current step execution contribution * @param chunk the current chunk * @return a new item for processing or {@code null} if the data source is exhausted * @throws SkipOverflowException if specifically the chunk is accumulating too much * data (e.g. skips) and it wants to force a commit. * @throws Exception if there is a generic issue */ @Nullable protected I read(StepContribution contribution, Chunk<I> chunk) throws SkipOverflowException, Exception { return doRead(); } }
final Chunk<I> inputs = new Chunk<>(); repeatOperations.iterate(context -> { I item; Timer.Sample sample = Timer.start(Metrics.globalRegistry); String status = BatchMetrics.STATUS_SUCCESS; try { item = read(contribution, inputs); } catch (SkipOverflowException e) { // read() tells us about an excess of skips by throwing an // exception status = BatchMetrics.STATUS_FAILURE; return RepeatStatus.FINISHED; } finally { stopTimer(sample, contribution.getStepExecution(), status); } if (item == null) { inputs.setEnd(); return RepeatStatus.FINISHED; } inputs.add(item); contribution.incrementReadCount(); return RepeatStatus.CONTINUABLE; }); return inputs;
998
256
1,254
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleRetryExceptionHandler.java
SimpleRetryExceptionHandler
handleException
class SimpleRetryExceptionHandler implements RetryListener, ExceptionHandler { /** * Attribute key, whose existence signals an exhausted retry. */ private static final String EXHAUSTED = SimpleRetryExceptionHandler.class.getName() + ".RETRY_EXHAUSTED"; private static final Log logger = LogFactory.getLog(SimpleRetryExceptionHandler.class); final private RetryPolicy retryPolicy; final private ExceptionHandler exceptionHandler; final private BinaryExceptionClassifier fatalExceptionClassifier; /** * Create an exception handler from its mandatory properties. * @param retryPolicy the retry policy that will be under effect when an exception is * encountered * @param exceptionHandler the delegate to use if an exception actually needs to be * handled * @param fatalExceptionClasses exceptions */ public SimpleRetryExceptionHandler(RetryPolicy retryPolicy, ExceptionHandler exceptionHandler, Collection<Class<? extends Throwable>> fatalExceptionClasses) { this.retryPolicy = retryPolicy; this.exceptionHandler = exceptionHandler; this.fatalExceptionClassifier = new BinaryExceptionClassifier(fatalExceptionClasses); } /** * Check if the exception is going to be retried, and veto the handling if it is. If * retry is exhausted or the exception is on the fatal list, then handle using the * delegate. * * @see ExceptionHandler#handleException(org.springframework.batch.repeat.RepeatContext, * java.lang.Throwable) */ @Override public void handleException(RepeatContext context, Throwable throwable) throws Throwable {<FILL_FUNCTION_BODY>} /** * If retry is exhausted set up some state in the context that can be used to signal * that the exception should be handled. * * @see org.springframework.retry.RetryListener#close(org.springframework.retry.RetryContext, * org.springframework.retry.RetryCallback, java.lang.Throwable) */ @Override public <T, E extends Throwable> void close(RetryContext context, RetryCallback<T, E> callback, Throwable throwable) { if (!retryPolicy.canRetry(context)) { if (logger.isDebugEnabled()) { logger.debug("Marking retry as exhausted: " + context); } getRepeatContext().setAttribute(EXHAUSTED, "true"); } } /** * Get the parent context (the retry is in an inner "chunk" loop and we want the * exception to be handled at the outer "step" level). * @return the {@link RepeatContext} that should hold the exhausted flag. */ private RepeatContext getRepeatContext() { RepeatContext context = RepeatSynchronizationManager.getContext(); if (context.getParent() != null) { return context.getParent(); } return context; } }
// Only bother to check the delegate exception handler if we know that // retry is exhausted if (fatalExceptionClassifier.classify(throwable) || context.hasAttribute(EXHAUSTED)) { logger.debug("Handled fatal exception"); exceptionHandler.handleException(context, throwable); } else { logger.debug("Handled non-fatal exception", throwable); }
741
107
848
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractor.java
DefaultJobParametersExtractor
getJobParameters
class DefaultJobParametersExtractor implements JobParametersExtractor { private Set<String> keys = new HashSet<>(); private boolean useAllParentParameters = true; private JobParametersConverter jobParametersConverter = new DefaultJobParametersConverter(); /** * The key names to pull out of the execution context or job parameters, if they * exist. If a key doesn't exist in the execution context then the job parameters from * the enclosing job execution are tried, and if there is nothing there either then no * parameter is extracted. * @param keys the keys to set */ public void setKeys(String[] keys) { this.keys = new HashSet<>(Arrays.asList(keys)); } /** * @see JobParametersExtractor#getJobParameters(Job, StepExecution) */ @Override public JobParameters getJobParameters(Job job, StepExecution stepExecution) {<FILL_FUNCTION_BODY>} /** * setter to support switching off all parent parameters * @param useAllParentParameters if false do not include parent parameters. True if * all parent parameters need to be included. */ public void setUseAllParentParameters(boolean useAllParentParameters) { this.useAllParentParameters = useAllParentParameters; } /** * Set the {@link JobParametersConverter} to use. * @param jobParametersConverter the converter to use. Must not be {@code null}. */ public void setJobParametersConverter(@NonNull JobParametersConverter jobParametersConverter) { Assert.notNull(jobParametersConverter, "jobParametersConverter must not be null"); this.jobParametersConverter = jobParametersConverter; } }
JobParametersBuilder builder = new JobParametersBuilder(); Map<String, JobParameter<?>> jobParameters = stepExecution.getJobParameters().getParameters(); ExecutionContext executionContext = stepExecution.getExecutionContext(); if (useAllParentParameters) { for (String key : jobParameters.keySet()) { builder.addJobParameter(key, jobParameters.get(key)); } } Properties properties = new Properties(); for (String key : keys) { if (executionContext.containsKey(key)) { properties.setProperty(key, executionContext.getString(key)); } else if (jobParameters.containsKey(key)) { builder.addJobParameter(key, jobParameters.get(key)); } } builder.addJobParameters(this.jobParametersConverter.getJobParameters(properties)); return builder.toJobParameters();
403
224
627
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/JobStep.java
JobStep
doExecute
class JobStep extends AbstractStep { /** * The key for the job parameters in the step execution context. Needed for restarts. */ private static final String JOB_PARAMETERS_KEY = JobStep.class.getName() + ".JOB_PARAMETERS"; private Job job; private JobLauncher jobLauncher; private JobParametersExtractor jobParametersExtractor = new DefaultJobParametersExtractor(); @Override public void afterPropertiesSet() throws Exception { super.afterPropertiesSet(); Assert.state(jobLauncher != null, "A JobLauncher must be provided"); Assert.state(job != null, "A Job must be provided"); } /** * The {@link Job} to delegate to in this step. * @param job a {@link Job} */ public void setJob(Job job) { this.job = job; } /** * A {@link JobLauncher} is required to be able to run the enclosed {@link Job}. * @param jobLauncher the {@link JobLauncher} to set */ public void setJobLauncher(JobLauncher jobLauncher) { this.jobLauncher = jobLauncher; } /** * The {@link JobParametersExtractor} is used to extract * {@link JobParametersExtractor} from the {@link StepExecution} to run the * {@link Job}. By default an instance will be provided that simply copies the * {@link JobParameters} from the parent job. * @param jobParametersExtractor the {@link JobParametersExtractor} to set */ public void setJobParametersExtractor(JobParametersExtractor jobParametersExtractor) { this.jobParametersExtractor = jobParametersExtractor; } /** * Execute the job provided by delegating to the {@link JobLauncher} to prevent * duplicate executions. The job parameters will be generated by the * {@link JobParametersExtractor} provided (if any), otherwise empty. On a restart, * the job parameters will be the same as the last (failed) execution. * * @see AbstractStep#doExecute(StepExecution) */ @Override protected void doExecute(StepExecution stepExecution) throws Exception {<FILL_FUNCTION_BODY>} /** * Determines the {@link ExitStatus} taking into consideration the {@link ExitStatus} * from the {@link StepExecution}, which invoked the {@link JobStep}, and the * {@link JobExecution}. * @param stepExecution the {@link StepExecution} which invoked the * {@link JobExecution} * @param jobExecution the {@link JobExecution} * @return the final {@link ExitStatus} */ private ExitStatus determineStepExitStatus(StepExecution stepExecution, JobExecution jobExecution) { ExitStatus exitStatus = stepExecution.getExitStatus() != null ? stepExecution.getExitStatus() : ExitStatus.COMPLETED; return exitStatus.and(jobExecution.getExitStatus()); } }
ExecutionContext executionContext = stepExecution.getExecutionContext(); executionContext.put(STEP_TYPE_KEY, this.getClass().getName()); JobParameters jobParameters; if (executionContext.containsKey(JOB_PARAMETERS_KEY)) { jobParameters = (JobParameters) executionContext.get(JOB_PARAMETERS_KEY); } else { jobParameters = jobParametersExtractor.getJobParameters(job, stepExecution); executionContext.put(JOB_PARAMETERS_KEY, jobParameters); } JobExecution jobExecution = jobLauncher.run(job, jobParameters); stepExecution.setExitStatus(determineStepExitStatus(stepExecution, jobExecution)); if (jobExecution.getStatus().isUnsuccessful()) { // AbstractStep will take care of the step execution status throw new UnexpectedJobExecutionException("Step failure: the delegate Job failed in JobStep."); } else if (jobExecution.getStatus().equals(BatchStatus.STOPPED)) { stepExecution.setStatus(BatchStatus.STOPPED); }
739
286
1,025
<methods>public void <init>() ,public void <init>(java.lang.String) ,public void afterPropertiesSet() throws java.lang.Exception,public final void execute(org.springframework.batch.core.StepExecution) throws org.springframework.batch.core.JobInterruptedException, org.springframework.batch.core.UnexpectedJobExecutionException,public java.lang.String getName() ,public int getStartLimit() ,public boolean isAllowStartIfComplete() ,public void registerStepExecutionListener(org.springframework.batch.core.StepExecutionListener) ,public void setAllowStartIfComplete(boolean) ,public void setBeanName(java.lang.String) ,public void setJobRepository(org.springframework.batch.core.repository.JobRepository) ,public void setMeterRegistry(io.micrometer.core.instrument.MeterRegistry) ,public void setName(java.lang.String) ,public void setObservationConvention(org.springframework.batch.core.observability.BatchStepObservationConvention) ,public void setObservationRegistry(io.micrometer.observation.ObservationRegistry) ,public void setStartLimit(int) ,public void setStepExecutionListeners(org.springframework.batch.core.StepExecutionListener[]) ,public java.lang.String toString() <variables>private boolean allowStartIfComplete,private org.springframework.batch.core.repository.JobRepository jobRepository,private static final org.apache.commons.logging.Log logger,private io.micrometer.core.instrument.MeterRegistry meterRegistry,private java.lang.String name,private org.springframework.batch.core.observability.BatchStepObservationConvention observationConvention,private io.micrometer.observation.ObservationRegistry observationRegistry,private int startLimit,private final org.springframework.batch.core.listener.CompositeStepExecutionListener stepExecutionListener
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/CompositeSkipPolicy.java
CompositeSkipPolicy
shouldSkip
class CompositeSkipPolicy implements SkipPolicy { private SkipPolicy[] skipPolicies; public CompositeSkipPolicy() { this(new SkipPolicy[0]); } public CompositeSkipPolicy(SkipPolicy[] skipPolicies) { this.skipPolicies = skipPolicies; } public void setSkipPolicies(SkipPolicy[] skipPolicies) { this.skipPolicies = skipPolicies; } @Override public boolean shouldSkip(Throwable t, long skipCount) throws SkipLimitExceededException {<FILL_FUNCTION_BODY>} }
for (SkipPolicy policy : skipPolicies) { if (policy.shouldSkip(t, skipCount)) { return true; } } return false;
155
48
203
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/LimitCheckingItemSkipPolicy.java
LimitCheckingItemSkipPolicy
shouldSkip
class LimitCheckingItemSkipPolicy implements SkipPolicy { private long skipLimit; private Classifier<Throwable, Boolean> skippableExceptionClassifier; /** * Convenience constructor that assumes all exception types are fatal. */ public LimitCheckingItemSkipPolicy() { this(0, Collections.<Class<? extends Throwable>, Boolean>emptyMap()); } /** * @param skipLimit the number of skippable exceptions that are allowed to be skipped * @param skippableExceptions exception classes that can be skipped (non-critical) */ public LimitCheckingItemSkipPolicy(int skipLimit, Map<Class<? extends Throwable>, Boolean> skippableExceptions) { this(skipLimit, new BinaryExceptionClassifier(skippableExceptions)); } /** * @param skipLimit the number of skippable exceptions that are allowed to be skipped * @param skippableExceptionClassifier exception classifier for those that can be * skipped (non-critical) */ public LimitCheckingItemSkipPolicy(int skipLimit, Classifier<Throwable, Boolean> skippableExceptionClassifier) { this.skipLimit = skipLimit; this.skippableExceptionClassifier = skippableExceptionClassifier; } /** * The absolute number of skips (of skippable exceptions) that can be tolerated before * a failure. * @param skipLimit the skip limit to set */ public void setSkipLimit(long skipLimit) { this.skipLimit = skipLimit; } /** * The classifier that will be used to decide on skippability. If an exception * classifies as "true" then it is skippable, and otherwise not. * @param skippableExceptionClassifier the skippableExceptionClassifier to set */ public void setSkippableExceptionClassifier(Classifier<Throwable, Boolean> skippableExceptionClassifier) { this.skippableExceptionClassifier = skippableExceptionClassifier; } /** * Set up the classifier through a convenient map from throwable class to boolean * (true if skippable). * @param skippableExceptions the skippable exceptions to set */ public void setSkippableExceptionMap(Map<Class<? extends Throwable>, Boolean> skippableExceptions) { this.skippableExceptionClassifier = new BinaryExceptionClassifier(skippableExceptions); } /** * Given the provided exception and skip count, determine whether or not processing * should continue for the given exception. If the exception is not classified as * skippable in the classifier, false will be returned. If the exception is classified * as skippable and {@link StepExecution} skipCount is greater than the skipLimit, * then a {@link SkipLimitExceededException} will be thrown. */ @Override public boolean shouldSkip(Throwable t, long skipCount) {<FILL_FUNCTION_BODY>} }
if (skippableExceptionClassifier.classify(t)) { if (skipCount < skipLimit) { return true; } else { throw new SkipLimitExceededException(skipLimit, t); } } else { return false; }
744
79
823
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/ConfigurableSystemProcessExitCodeMapper.java
ConfigurableSystemProcessExitCodeMapper
getExitStatus
class ConfigurableSystemProcessExitCodeMapper implements SystemProcessExitCodeMapper { public static final String ELSE_KEY = "else"; private Map<Object, ExitStatus> mappings; @Override public ExitStatus getExitStatus(int exitCode) {<FILL_FUNCTION_BODY>} /** * @param mappings <code>Integer</code> exit code keys to * {@link org.springframework.batch.core.ExitStatus} values. */ public void setMappings(Map<Object, ExitStatus> mappings) { Assert.notNull(mappings.get(ELSE_KEY), "Missing value for " + ELSE_KEY); this.mappings = mappings; } }
ExitStatus exitStatus = mappings.get(exitCode); if (exitStatus != null) { return exitStatus; } else { return mappings.get(ELSE_KEY); }
180
59
239
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/MethodInvokingTaskletAdapter.java
MethodInvokingTaskletAdapter
mapResult
class MethodInvokingTaskletAdapter extends AbstractMethodInvokingDelegator<Object> implements Tasklet { /** * Delegate execution to the target object and translate the return value to an * {@link ExitStatus} by invoking a method in the delegate POJO. Ignores the * {@link StepContribution} and the attributes. * * @see Tasklet#execute(StepContribution, ChunkContext) */ @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { if (getArguments() == null) { setArguments(new Object[] { contribution, chunkContext }); } contribution.setExitStatus(mapResult(invokeDelegateMethod())); return RepeatStatus.FINISHED; } /** * If the result is an {@link ExitStatus} already just return that, otherwise return * {@link ExitStatus#COMPLETED}. * @param result the value returned by the delegate method * @return an {@link ExitStatus} consistent with the result */ protected ExitStatus mapResult(Object result) {<FILL_FUNCTION_BODY>} }
if (result instanceof ExitStatus) { return (ExitStatus) result; } return ExitStatus.COMPLETED;
289
37
326
<methods>public non-sealed void <init>() ,public void afterPropertiesSet() throws java.lang.Exception,public void setArguments(java.lang.Object[]) ,public void setTargetMethod(java.lang.String) ,public void setTargetObject(java.lang.Object) <variables>private java.lang.Object[] arguments,private java.lang.String targetMethod,private java.lang.Object targetObject
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SimpleSystemProcessExitCodeMapper.java
SimpleSystemProcessExitCodeMapper
getExitStatus
class SimpleSystemProcessExitCodeMapper implements SystemProcessExitCodeMapper { @Override public ExitStatus getExitStatus(int exitCode) {<FILL_FUNCTION_BODY>} }
if (exitCode == 0) { return ExitStatus.COMPLETED; } else { return ExitStatus.FAILED; }
48
45
93
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemCommandTasklet.java
SystemCommandTasklet
afterPropertiesSet
class SystemCommandTasklet implements StepExecutionListener, StoppableTasklet, InitializingBean { protected static final Log logger = LogFactory.getLog(SystemCommandTasklet.class); private CommandRunner commandRunner = new JvmCommandRunner(); private String[] cmdArray; private String[] environmentParams = null; private File workingDirectory = null; private SystemProcessExitCodeMapper systemProcessExitCodeMapper = new SimpleSystemProcessExitCodeMapper(); private long timeout = 0; private long checkInterval = 1000; private StepExecution execution = null; private TaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); private boolean interruptOnCancel = false; private volatile boolean stopped = false; private JobExplorer jobExplorer; private boolean stoppable = false; /** * Execute system command and map its exit code to {@link ExitStatus} using * {@link SystemProcessExitCodeMapper}. */ @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { FutureTask<Integer> systemCommandTask = new FutureTask<>(() -> { Process process = commandRunner.exec(cmdArray, environmentParams, workingDirectory); return process.waitFor(); }); long t0 = System.currentTimeMillis(); taskExecutor.execute(systemCommandTask); while (true) { Thread.sleep(checkInterval);// moved to the end of the logic if (stoppable) { JobExecution jobExecution = jobExplorer .getJobExecution(chunkContext.getStepContext().getStepExecution().getJobExecutionId()); if (jobExecution.isStopping()) { stopped = true; } } if (systemCommandTask.isDone()) { Integer exitCode = systemCommandTask.get(); ExitStatus exitStatus = systemProcessExitCodeMapper.getExitStatus(exitCode); contribution.setExitStatus(exitStatus); if (ExitStatus.FAILED.equals(exitStatus)) { throw new SystemCommandException("Execution of system command failed with exit code " + exitCode); } else { return RepeatStatus.FINISHED; } } else if (System.currentTimeMillis() - t0 > timeout) { systemCommandTask.cancel(interruptOnCancel); throw new SystemCommandException("Execution of system command did not finish within the timeout"); } else if (execution.isTerminateOnly()) { systemCommandTask.cancel(interruptOnCancel); String command = String.join(" ", cmdArray); throw new JobInterruptedException("Job interrupted while executing system command '" + command + "'"); } else if (stopped) { systemCommandTask.cancel(interruptOnCancel); contribution.setExitStatus(ExitStatus.STOPPED); return RepeatStatus.FINISHED; } } } /** * Injection setter for the {@link CommandRunner}. * @param commandRunner {@link CommandRunner} instance to be used by * SystemCommandTasklet instance. Defaults to {@link JvmCommandRunner}. * @since 5.0 */ public void setCommandRunner(CommandRunner commandRunner) { this.commandRunner = commandRunner; } /** * Set the command to execute along with its arguments. For example: * * <pre>setCommand("myCommand", "myArg1", "myArg2");</pre> * @param command command to be executed in a separate system process. */ public void setCommand(String... command) { this.cmdArray = command; } /** * @param envp environment parameter values, inherited from parent process when not * set (or set to null). */ public void setEnvironmentParams(String[] envp) { this.environmentParams = envp; } /** * @param dir working directory of the spawned process, inherited from parent process * when not set (or set to null). */ public void setWorkingDirectory(String dir) { if (dir == null) { this.workingDirectory = null; return; } this.workingDirectory = new File(dir); Assert.isTrue(workingDirectory.exists(), "working directory must exist"); Assert.isTrue(workingDirectory.isDirectory(), "working directory value must be a directory"); } @Override public void afterPropertiesSet() throws Exception {<FILL_FUNCTION_BODY>} public void setJobExplorer(JobExplorer jobExplorer) { this.jobExplorer = jobExplorer; } /** * @param systemProcessExitCodeMapper maps system process return value to * <code>ExitStatus</code> returned by Tasklet. * {@link SimpleSystemProcessExitCodeMapper} is used by default. */ public void setSystemProcessExitCodeMapper(SystemProcessExitCodeMapper systemProcessExitCodeMapper) { this.systemProcessExitCodeMapper = systemProcessExitCodeMapper; } /** * Timeout in milliseconds. * @param timeout upper limit for how long the execution of the external program is * allowed to last. */ public void setTimeout(long timeout) { this.timeout = timeout; } /** * The time interval how often the tasklet will check for termination status. * @param checkInterval time interval in milliseconds (1 second by default). */ public void setTerminationCheckInterval(long checkInterval) { this.checkInterval = checkInterval; } /** * Get a reference to {@link StepExecution} for interrupt checks during system command * execution. */ @Override public void beforeStep(StepExecution stepExecution) { this.execution = stepExecution; } /** * Sets the task executor that will be used to execute the system command NB! Avoid * using a synchronous task executor * @param taskExecutor instance of {@link TaskExecutor}. */ public void setTaskExecutor(TaskExecutor taskExecutor) { this.taskExecutor = taskExecutor; } /** * If <code>true</code> tasklet will attempt to interrupt the thread executing the * system command if {@link #setTimeout(long)} has been exceeded or user interrupts * the job. <code>false</code> by default * @param interruptOnCancel boolean determines if process should be interrupted */ public void setInterruptOnCancel(boolean interruptOnCancel) { this.interruptOnCancel = interruptOnCancel; } /** * Will interrupt the thread executing the system command only if * {@link #setInterruptOnCancel(boolean)} has been set to true. Otherwise the * underlying command will be allowed to finish before the tasklet ends. * * @since 3.0 * @see StoppableTasklet#stop() */ @Override public void stop() { stopped = true; } }
Assert.state(commandRunner != null, "CommandRunner must be set"); Assert.state(cmdArray != null, "'cmdArray' property value must not be null"); Assert.state(!ObjectUtils.isEmpty(cmdArray), "'cmdArray' property value is required with at least 1 element"); Assert.state(StringUtils.hasText(cmdArray[0]), "'cmdArray' property value is required with at least 1 element"); Assert.state(systemProcessExitCodeMapper != null, "SystemProcessExitCodeMapper must be set"); Assert.state(timeout > 0, "timeout value must be greater than zero"); Assert.state(taskExecutor != null, "taskExecutor is required"); stoppable = jobExplorer != null;
1,734
190
1,924
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/Chunk.java
ChunkIterator
remove
class ChunkIterator implements Iterator<W> { final private Iterator<W> iterator; private W next; public ChunkIterator(List<W> items) { iterator = items.iterator(); } @Override public boolean hasNext() { return iterator.hasNext(); } @Override public W next() { next = iterator.next(); return next; } public void remove(Throwable e) { remove(); skips.add(new SkipWrapper<>(next, e)); } @Override public void remove() {<FILL_FUNCTION_BODY>} @Override public String toString() { return String.format("[items=%s, skips=%s]", items, skips); } }
if (next == null) { if (iterator.hasNext()) { next = iterator.next(); } else { return; } } iterator.remove();
221
56
277
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/KeyValueItemWriter.java
KeyValueItemWriter
afterPropertiesSet
class KeyValueItemWriter<K, V> implements ItemWriter<V>, InitializingBean { protected Converter<V, K> itemKeyMapper; protected boolean delete; @Override public void write(Chunk<? extends V> items) throws Exception { if (items == null) { return; } for (V item : items) { K key = itemKeyMapper.convert(item); writeKeyValue(key, item); } flush(); } /** * Flush items to the key/value store. * @throws Exception if unable to flush items */ protected void flush() throws Exception { } /** * Subclasses implement this method to write each item to key value store * @param key the key * @param value the item */ protected abstract void writeKeyValue(K key, V value); /** * afterPropertiesSet() hook */ protected abstract void init(); /** * Set the {@link Converter} to use to derive the key from the item * @param itemKeyMapper the {@link Converter} used to derive a key from an item. */ public void setItemKeyMapper(Converter<V, K> itemKeyMapper) { this.itemKeyMapper = itemKeyMapper; } /** * Sets the delete flag to have the item writer perform deletes * @param delete if true {@link ItemWriter} will perform deletes, if false not to * perform deletes. */ public void setDelete(boolean delete) { this.delete = delete; } @Override public void afterPropertiesSet() throws Exception {<FILL_FUNCTION_BODY>} }
Assert.state(itemKeyMapper != null, "itemKeyMapper requires a Converter type."); init();
421
32
453
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/AbstractMethodInvokingDelegator.java
AbstractMethodInvokingDelegator
targetClassDeclaresTargetMethod
class AbstractMethodInvokingDelegator<T> implements InitializingBean { private Object targetObject; private String targetMethod; private Object[] arguments; /** * Invoker the target method with arguments set by {@link #setArguments(Object[])}. * @return object returned by invoked method * @throws Exception exception thrown when executing the delegate method. */ protected T invokeDelegateMethod() throws Exception { MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); invoker.setArguments(arguments); return doInvoke(invoker); } /** * Invokes the target method with given argument. * @param object argument for the target method * @return object returned by target method * @throws Exception exception thrown when executing the delegate method. */ protected T invokeDelegateMethodWithArgument(Object object) throws Exception { MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); invoker.setArguments(object); return doInvoke(invoker); } /** * Invokes the target method with given arguments. * @param args arguments for the invoked method * @return object returned by invoked method * @throws Exception exception thrown when executing the delegate method. */ protected T invokeDelegateMethodWithArguments(Object[] args) throws Exception { MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); invoker.setArguments(args); return doInvoke(invoker); } /** * Create a new configured instance of {@link MethodInvoker}. */ private MethodInvoker createMethodInvoker(Object targetObject, String targetMethod) { HippyMethodInvoker invoker = new HippyMethodInvoker(); invoker.setTargetObject(targetObject); invoker.setTargetMethod(targetMethod); invoker.setArguments(arguments); return invoker; } /** * Prepare and invoke the invoker, rethrow checked exceptions as unchecked. * @param invoker configured invoker * @return return value of the invoked method */ @SuppressWarnings("unchecked") private T doInvoke(MethodInvoker invoker) throws Exception { try { invoker.prepare(); } catch (ClassNotFoundException | NoSuchMethodException e) { throw new DynamicMethodInvocationException(e); } try { return (T) invoker.invoke(); } catch (InvocationTargetException e) { if (e.getCause() instanceof Exception) { throw (Exception) e.getCause(); } else { throw new InvocationTargetThrowableWrapper(e.getCause()); } } catch (IllegalAccessException e) { throw new DynamicMethodInvocationException(e); } } @Override public void afterPropertiesSet() throws Exception { Assert.state(targetObject != null, "targetObject must not be null"); Assert.state(StringUtils.hasText(targetMethod), "targetMethod must not be empty"); Assert.state(targetClassDeclaresTargetMethod(), "target class must declare a method with matching name and parameter types"); } /** * @return true if target class declares a method matching target method name with * given number of arguments of appropriate type. */ private boolean targetClassDeclaresTargetMethod() {<FILL_FUNCTION_BODY>} /** * @param targetObject the delegate - bean id can be used to set this value in Spring * configuration */ public void setTargetObject(Object targetObject) { this.targetObject = targetObject; } /** * @param targetMethod name of the method to be invoked on * {@link #setTargetObject(Object)}. */ public void setTargetMethod(String targetMethod) { this.targetMethod = targetMethod; } /** * @param arguments arguments values for the { {@link #setTargetMethod(String)}. These * will be used only when the subclass tries to invoke the target method without * providing explicit argument values. * <p> * If arguments are set to not-null value {@link #afterPropertiesSet()} will check the * values are compatible with target method's signature. In case arguments are null * (not set) method signature will not be checked and it is assumed correct values * will be supplied at runtime. */ public void setArguments(Object[] arguments) { this.arguments = arguments == null ? null : Arrays.asList(arguments).toArray(); } /** * Return arguments. * @return arguments */ protected Object[] getArguments() { return arguments; } /** * @return the object on which the method will be invoked. * @since 5.1 */ protected Object getTargetObject() { return targetObject; } /** * @return the name of the method to be invoked. * @since 5.1 */ protected String getTargetMethod() { return targetMethod; } /** * Used to wrap a {@link Throwable} (not an {@link Exception}) thrown by a * reflectively-invoked delegate. * * @author Robert Kasanicky */ public static class InvocationTargetThrowableWrapper extends RuntimeException { public InvocationTargetThrowableWrapper(Throwable cause) { super(cause); } } }
MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); Method[] memberMethods = invoker.getTargetClass().getMethods(); Method[] declaredMethods = invoker.getTargetClass().getDeclaredMethods(); List<Method> allMethods = new ArrayList<>(); allMethods.addAll(Arrays.asList(memberMethods)); allMethods.addAll(Arrays.asList(declaredMethods)); String targetMethodName = invoker.getTargetMethod(); for (Method method : allMethods) { if (method.getName().equals(targetMethodName)) { Class<?>[] params = method.getParameterTypes(); if (arguments == null) { // don't check signature, assume arguments will be supplied // correctly at runtime return true; } if (arguments.length == params.length) { boolean argumentsMatchParameters = true; for (int j = 0; j < params.length; j++) { if (arguments[j] == null) { continue; } if (!(ClassUtils.isAssignableValue(params[j], arguments[j]))) { argumentsMatchParameters = false; } } if (argumentsMatchParameters) { return true; } } } } return false;
1,373
342
1,715
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/HippyMethodInvoker.java
HippyMethodInvoker
findMatchingMethod
class HippyMethodInvoker extends MethodInvoker { @Override protected Method findMatchingMethod() {<FILL_FUNCTION_BODY>} }
String targetMethod = getTargetMethod(); Object[] arguments = getArguments(); Method[] candidates = ReflectionUtils.getAllDeclaredMethods(getTargetClass()); int minTypeDiffWeight = Integer.MAX_VALUE; Method matchingMethod = null; Object[] transformedArguments = null; for (Method candidate : candidates) { if (candidate.getName().equals(targetMethod)) { Class<?>[] paramTypes = candidate.getParameterTypes(); Object[] candidateArguments = new Object[paramTypes.length]; int assignedParameterCount = 0; for (Object argument : arguments) { for (int i = 0; i < paramTypes.length; i++) { // Pick the first assignable of the right type that // matches this slot and hasn't already been filled... if (ClassUtils.isAssignableValue(paramTypes[i], argument) && candidateArguments[i] == null) { candidateArguments[i] = argument; assignedParameterCount++; break; } } } if (paramTypes.length == assignedParameterCount) { int typeDiffWeight = getTypeDifferenceWeight(paramTypes, candidateArguments); if (typeDiffWeight < minTypeDiffWeight) { minTypeDiffWeight = typeDiffWeight; matchingMethod = candidate; transformedArguments = candidateArguments; } } } } if (transformedArguments == null) { throw new IllegalArgumentException("No matching arguments found for method: " + targetMethod); } setArguments(transformedArguments); return matchingMethod;
42
410
452
<methods>public void <init>() ,public java.lang.Object[] getArguments() ,public java.lang.reflect.Method getPreparedMethod() throws java.lang.IllegalStateException,public Class<?> getTargetClass() ,public java.lang.String getTargetMethod() ,public java.lang.Object getTargetObject() ,public static int getTypeDifferenceWeight(Class<?>[], java.lang.Object[]) ,public java.lang.Object invoke() throws java.lang.reflect.InvocationTargetException, java.lang.IllegalAccessException,public boolean isPrepared() ,public void prepare() throws java.lang.ClassNotFoundException, java.lang.NoSuchMethodException,public transient void setArguments(java.lang.Object[]) ,public void setStaticMethod(java.lang.String) ,public void setTargetClass(Class<?>) ,public void setTargetMethod(java.lang.String) ,public void setTargetObject(java.lang.Object) <variables>private static final java.lang.Object[] EMPTY_ARGUMENTS,private java.lang.Object[] arguments,private java.lang.reflect.Method methodObject,private java.lang.String staticMethod,protected Class<?> targetClass,private java.lang.String targetMethod,private java.lang.Object targetObject
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/PropertyExtractingDelegatingItemWriter.java
PropertyExtractingDelegatingItemWriter
write
class PropertyExtractingDelegatingItemWriter<T> extends AbstractMethodInvokingDelegator<T> implements ItemWriter<T> { private String[] fieldsUsedAsTargetMethodArguments; /** * Extracts values from item's fields named in fieldsUsedAsTargetMethodArguments and * passes them as arguments to the delegate method. */ @Override public void write(Chunk<? extends T> items) throws Exception {<FILL_FUNCTION_BODY>} @Override public void afterPropertiesSet() throws Exception { super.afterPropertiesSet(); Assert.state(!ObjectUtils.isEmpty(fieldsUsedAsTargetMethodArguments), "fieldsUsedAsTargetMethodArguments must not be empty"); } /** * @param fieldsUsedAsMethodArguments the values of the these item's fields will be * used as arguments for the delegate method. Nested property values are supported, * e.g. <code>address.city</code> */ public void setFieldsUsedAsTargetMethodArguments(String[] fieldsUsedAsMethodArguments) { this.fieldsUsedAsTargetMethodArguments = Arrays.asList(fieldsUsedAsMethodArguments) .toArray(new String[fieldsUsedAsMethodArguments.length]); } }
for (T item : items) { // helper for extracting property values from a bean BeanWrapper beanWrapper = new BeanWrapperImpl(item); Object[] methodArguments = new Object[fieldsUsedAsTargetMethodArguments.length]; for (int i = 0; i < fieldsUsedAsTargetMethodArguments.length; i++) { methodArguments[i] = beanWrapper.getPropertyValue(fieldsUsedAsTargetMethodArguments[i]); } invokeDelegateMethodWithArguments(methodArguments); }
303
132
435
<methods>public non-sealed void <init>() ,public void afterPropertiesSet() throws java.lang.Exception,public void setArguments(java.lang.Object[]) ,public void setTargetMethod(java.lang.String) ,public void setTargetObject(java.lang.Object) <variables>private java.lang.Object[] arguments,private java.lang.String targetMethod,private java.lang.Object targetObject
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/AmqpItemReader.java
AmqpItemReader
read
class AmqpItemReader<T> implements ItemReader<T> { private final AmqpTemplate amqpTemplate; private Class<? extends T> itemType; /** * Initialize the AmqpItemReader. * @param amqpTemplate the template to be used. Must not be null. */ public AmqpItemReader(final AmqpTemplate amqpTemplate) { Assert.notNull(amqpTemplate, "AmqpTemplate must not be null"); this.amqpTemplate = amqpTemplate; } @Nullable @Override @SuppressWarnings("unchecked") public T read() {<FILL_FUNCTION_BODY>} /** * Establish the itemType for the reader. * @param itemType class type that will be returned by the reader. */ public void setItemType(Class<? extends T> itemType) { Assert.notNull(itemType, "Item type cannot be null"); this.itemType = itemType; } }
if (itemType != null && itemType.isAssignableFrom(Message.class)) { return (T) amqpTemplate.receive(); } Object result = amqpTemplate.receiveAndConvert(); if (itemType != null && result != null) { Assert.state(itemType.isAssignableFrom(result.getClass()), "Received message payload of wrong type: expected [" + itemType + "]"); } return (T) result;
261
131
392
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/AmqpItemWriter.java
AmqpItemWriter
write
class AmqpItemWriter<T> implements ItemWriter<T> { private final AmqpTemplate amqpTemplate; private final Log log = LogFactory.getLog(getClass()); public AmqpItemWriter(final AmqpTemplate amqpTemplate) { Assert.notNull(amqpTemplate, "AmqpTemplate must not be null"); this.amqpTemplate = amqpTemplate; } @Override public void write(final Chunk<? extends T> items) throws Exception {<FILL_FUNCTION_BODY>} }
if (log.isDebugEnabled()) { log.debug("Writing to AMQP with " + items.size() + " items."); } for (T item : items) { amqpTemplate.convertAndSend(item); }
143
69
212
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/builder/AmqpItemReaderBuilder.java
AmqpItemReaderBuilder
build
class AmqpItemReaderBuilder<T> { private AmqpTemplate amqpTemplate; private Class<? extends T> itemType; /** * Establish the amqpTemplate to be used by the AmqpItemReader. * @param amqpTemplate the template to be used. * @return this instance for method chaining * @see AmqpItemReader#AmqpItemReader(AmqpTemplate) */ public AmqpItemReaderBuilder<T> amqpTemplate(AmqpTemplate amqpTemplate) { this.amqpTemplate = amqpTemplate; return this; } /** * Establish the itemType for the reader. * @param itemType class type that will be returned by the reader. * @return this instance for method chaining. * @see AmqpItemReader#setItemType(Class) */ public AmqpItemReaderBuilder<T> itemType(Class<? extends T> itemType) { this.itemType = itemType; return this; } /** * Validates and builds a {@link AmqpItemReader}. * @return a {@link AmqpItemReader} */ public AmqpItemReader<T> build() {<FILL_FUNCTION_BODY>} }
Assert.notNull(this.amqpTemplate, "amqpTemplate is required."); AmqpItemReader<T> reader = new AmqpItemReader<>(this.amqpTemplate); if (this.itemType != null) { reader.setItemType(this.itemType); } return reader;
331
91
422
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/builder/AmqpItemWriterBuilder.java
AmqpItemWriterBuilder
build
class AmqpItemWriterBuilder<T> { private AmqpTemplate amqpTemplate; /** * Establish the amqpTemplate to be used by the AmqpItemWriter. * @param amqpTemplate the template to be used. * @return this instance for method chaining * @see AmqpItemWriter#AmqpItemWriter(AmqpTemplate) */ public AmqpItemWriterBuilder<T> amqpTemplate(AmqpTemplate amqpTemplate) { this.amqpTemplate = amqpTemplate; return this; } /** * Validates and builds a {@link AmqpItemWriter}. * @return a {@link AmqpItemWriter} */ public AmqpItemWriter<T> build() {<FILL_FUNCTION_BODY>} }
Assert.notNull(this.amqpTemplate, "amqpTemplate is required."); AmqpItemWriter<T> writer = new AmqpItemWriter<>(this.amqpTemplate); return writer;
212
61
273
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/avro/AvroItemReader.java
AvroItemReader
initializeReader
class AvroItemReader<T> extends AbstractItemCountingItemStreamItemReader<T> { private boolean embeddedSchema = true; private InputStreamReader<T> inputStreamReader; private DataFileStream<T> dataFileReader; private final InputStream inputStream; private final DatumReader<T> datumReader; /** * @param resource the {@link Resource} containing objects serialized with Avro. * @param clazz the data type to be deserialized. */ public AvroItemReader(Resource resource, Class<T> clazz) { setName(ClassUtils.getShortName(AvroItemReader.class)); Assert.notNull(resource, "'resource' is required."); Assert.notNull(clazz, "'class' is required."); try { this.inputStream = resource.getInputStream(); this.datumReader = datumReaderForClass(clazz); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } } /** * @param data the {@link Resource} containing the data to be read. * @param schema the {@link Resource} containing the Avro schema. */ public AvroItemReader(Resource data, Resource schema) { setName(ClassUtils.getShortName(AvroItemReader.class)); Assert.notNull(data, "'data' is required."); Assert.state(data.exists(), "'data' " + data.getFilename() + " does not exist."); Assert.notNull(schema, "'schema' is required"); Assert.state(schema.exists(), "'schema' " + schema.getFilename() + " does not exist."); try { this.inputStream = data.getInputStream(); Schema avroSchema = new Schema.Parser().parse(schema.getInputStream()); this.datumReader = new GenericDatumReader<>(avroSchema); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } } /** * Disable or enable reading an embedded Avro schema. True by default. * @param embeddedSchema set to false to if the input does not embed an Avro schema. */ public void setEmbeddedSchema(boolean embeddedSchema) { this.embeddedSchema = embeddedSchema; } @Nullable @Override protected T doRead() throws Exception { if (this.inputStreamReader != null) { return this.inputStreamReader.read(); } return this.dataFileReader.hasNext() ? this.dataFileReader.next() : null; } @Override protected void doOpen() throws Exception { initializeReader(); } @Override protected void doClose() throws Exception { if (this.inputStreamReader != null) { this.inputStreamReader.close(); return; } this.dataFileReader.close(); } private void initializeReader() throws IOException {<FILL_FUNCTION_BODY>} private InputStreamReader<T> createInputStreamReader(InputStream inputStream, DatumReader<T> datumReader) { return new InputStreamReader<>(inputStream, datumReader); } private static <T> DatumReader<T> datumReaderForClass(Class<T> clazz) { if (SpecificRecordBase.class.isAssignableFrom(clazz)) { return new SpecificDatumReader<>(clazz); } if (GenericRecord.class.isAssignableFrom(clazz)) { return new GenericDatumReader<>(); } return new ReflectDatumReader<>(clazz); } private static class InputStreamReader<T> { private final DatumReader<T> datumReader; private final BinaryDecoder binaryDecoder; private final InputStream inputStream; private InputStreamReader(InputStream inputStream, DatumReader<T> datumReader) { this.inputStream = inputStream; this.datumReader = datumReader; this.binaryDecoder = DecoderFactory.get().binaryDecoder(inputStream, null); } private T read() throws Exception { if (!this.binaryDecoder.isEnd()) { return this.datumReader.read(null, this.binaryDecoder); } return null; } private void close() { try { this.inputStream.close(); } catch (IOException e) { throw new ItemStreamException(e.getMessage(), e); } } } }
if (this.embeddedSchema) { this.dataFileReader = new DataFileStream<>(this.inputStream, this.datumReader); } else { this.inputStreamReader = createInputStreamReader(this.inputStream, this.datumReader); }
1,155
73
1,228
<methods>public non-sealed void <init>() ,public void close() throws org.springframework.batch.item.ItemStreamException,public int getCurrentItemCount() ,public boolean isSaveState() ,public void open(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException,public T read() throws java.lang.Exception,public void setCurrentItemCount(int) ,public void setMaxItemCount(int) ,public void setSaveState(boolean) ,public void update(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException<variables>private static final java.lang.String READ_COUNT,private static final java.lang.String READ_COUNT_MAX,private int currentItemCount,private int maxItemCount,private boolean saveState
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/avro/AvroItemWriter.java
AvroItemWriter
open
class AvroItemWriter<T> extends AbstractItemStreamItemWriter<T> { private DataFileWriter<T> dataFileWriter; private OutputStreamWriter<T> outputStreamWriter; private final WritableResource resource; private final Resource schemaResource; private final Class<T> clazz; private boolean embedSchema = true; /** * @param resource a {@link WritableResource} to which the objects will be serialized. * @param schema a {@link Resource} containing the Avro schema. * @param clazz the data type to be serialized. */ public AvroItemWriter(WritableResource resource, Resource schema, Class<T> clazz) { this.schemaResource = schema; this.resource = resource; this.clazz = clazz; } /** * This constructor will create an ItemWriter that does not embedded Avro schema. * @param resource a {@link WritableResource} to which the objects will be serialized. * @param clazz the data type to be serialized. */ public AvroItemWriter(WritableResource resource, Class<T> clazz) { this(resource, null, clazz); embedSchema = false; } @Override public void write(Chunk<? extends T> items) throws Exception { items.forEach(item -> { try { if (this.dataFileWriter != null) { this.dataFileWriter.append(item); } else { this.outputStreamWriter.write(item); } } catch (Exception e) { throw new ItemStreamException(e.getMessage(), e); } }); } /** * @see org.springframework.batch.item.ItemStream#open(ExecutionContext) */ @Override public void open(ExecutionContext executionContext) {<FILL_FUNCTION_BODY>} @Override public void close() { try { if (this.dataFileWriter != null) { this.dataFileWriter.close(); } else { this.outputStreamWriter.close(); } } catch (IOException e) { throw new ItemStreamException(e.getMessage(), e); } } private void initializeWriter() throws IOException { Assert.notNull(this.resource, "'resource' is required."); Assert.notNull(this.clazz, "'class' is required."); if (this.embedSchema) { Assert.notNull(this.schemaResource, "'schema' is required."); Assert.state(this.schemaResource.exists(), "'schema' " + this.schemaResource.getFilename() + " does not exist."); Schema schema; try { schema = new Schema.Parser().parse(this.schemaResource.getInputStream()); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } this.dataFileWriter = new DataFileWriter<>(datumWriterForClass(this.clazz)); this.dataFileWriter.create(schema, this.resource.getOutputStream()); } else { this.outputStreamWriter = createOutputStreamWriter(this.resource.getOutputStream(), datumWriterForClass(this.clazz)); } } private static <T> DatumWriter<T> datumWriterForClass(Class<T> clazz) { if (SpecificRecordBase.class.isAssignableFrom(clazz)) { return new SpecificDatumWriter<>(clazz); } if (GenericRecord.class.isAssignableFrom(clazz)) { return new GenericDatumWriter<>(); } return new ReflectDatumWriter<>(clazz); } private AvroItemWriter.OutputStreamWriter<T> createOutputStreamWriter(OutputStream outputStream, DatumWriter<T> datumWriter) { return new AvroItemWriter.OutputStreamWriter<>(outputStream, datumWriter); } private static class OutputStreamWriter<T> { private final DatumWriter<T> datumWriter; private final BinaryEncoder binaryEncoder; private final OutputStream outputStream; private OutputStreamWriter(OutputStream outputStream, DatumWriter<T> datumWriter) { this.outputStream = outputStream; this.datumWriter = datumWriter; this.binaryEncoder = EncoderFactory.get().binaryEncoder(outputStream, null); } private void write(T datum) throws Exception { this.datumWriter.write(datum, this.binaryEncoder); this.binaryEncoder.flush(); } private void close() { try { this.outputStream.close(); } catch (IOException e) { throw new ItemStreamException(e.getMessage(), e); } } } }
super.open(executionContext); try { initializeWriter(); } catch (IOException e) { throw new ItemStreamException(e.getMessage(), e); }
1,229
55
1,284
<methods>public non-sealed void <init>() <variables>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/avro/builder/AvroItemReaderBuilder.java
AvroItemReaderBuilder
resource
class AvroItemReaderBuilder<T> { private boolean saveState = true; private String name = AvroItemReader.class.getSimpleName(); private int maxItemCount = Integer.MAX_VALUE; private int currentItemCount; private Resource schema; private Resource resource; private Class<T> type; private boolean embeddedSchema = true; /** * Configure a {@link Resource} containing Avro serialized objects. * @param resource an existing Resource. * @return The current instance of the builder. */ public AvroItemReaderBuilder<T> resource(Resource resource) {<FILL_FUNCTION_BODY>} /** * Configure an Avro {@link Schema} from a {@link Resource}. * @param schema an existing schema Resource. * @return The current instance of the builder. */ public AvroItemReaderBuilder<T> schema(Resource schema) { Assert.notNull(schema, "A 'schema' Resource is required."); Assert.state(schema.exists(), "Resource " + schema.getFilename() + " does not exist."); this.schema = schema; return this; } /** * Configure an Avro {@link Schema} from a String. * @param schemaString the schema String. * @return The current instance of the builder. */ public AvroItemReaderBuilder<T> schema(String schemaString) { Assert.hasText(schemaString, "A 'schema' is required."); this.schema = new ByteArrayResource(schemaString.getBytes()); return this; } /** * Configure a type to be deserialized. * @param type the class to be deserialized. * @return The current instance of the builder. */ public AvroItemReaderBuilder<T> type(Class<T> type) { Assert.notNull(type, "A 'type' is required."); this.type = type; return this; } /** * Disable or enable reading an embedded Avro schema. True by default. * @param embeddedSchema set to false to if the input does not contain an Avro schema. * @return The current instance of the builder. */ public AvroItemReaderBuilder<T> embeddedSchema(boolean embeddedSchema) { this.embeddedSchema = embeddedSchema; return this; } /** * Configure if the state of the * {@link org.springframework.batch.item.ItemStreamSupport} should be persisted within * the {@link org.springframework.batch.item.ExecutionContext} for restart purposes. * @param saveState defaults to true * @return The current instance of the builder. */ public AvroItemReaderBuilder<T> saveState(boolean saveState) { this.saveState = saveState; return this; } /** * The name used to calculate the key within the * {@link org.springframework.batch.item.ExecutionContext}. Required if * {@link #saveState(boolean)} is set to true. * @param name name of the reader instance * @return The current instance of the builder. * @see org.springframework.batch.item.ItemStreamSupport#setName(String) */ public AvroItemReaderBuilder<T> name(String name) { this.name = name; return this; } /** * Configure the max number of items to be read. * @param maxItemCount the max items to be read * @return The current instance of the builder. * @see org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) */ public AvroItemReaderBuilder<T> maxItemCount(int maxItemCount) { this.maxItemCount = maxItemCount; return this; } /** * Index for the current item. Used on restarts to indicate where to start from. * @param currentItemCount current index * @return this instance for method chaining * @see org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) */ public AvroItemReaderBuilder<T> currentItemCount(int currentItemCount) { this.currentItemCount = currentItemCount; return this; } /** * Build an instance of {@link AvroItemReader}. * @return the instance; */ public AvroItemReader<T> build() { AvroItemReader<T> avroItemReader; Assert.notNull(this.resource, "A 'resource' is required."); if (this.type != null) { avroItemReader = buildForType(); } else { avroItemReader = buildForSchema(); } avroItemReader.setSaveState(this.saveState); if (this.saveState) { Assert.state(StringUtils.hasText(this.name), "A name is required when saveState is set to true."); } avroItemReader.setName(this.name); avroItemReader.setCurrentItemCount(this.currentItemCount); avroItemReader.setMaxItemCount(this.maxItemCount); avroItemReader.setEmbeddedSchema(this.embeddedSchema); return avroItemReader; } private AvroItemReader<T> buildForType() { Assert.isNull(this.schema, "You cannot specify a schema and 'type'."); return new AvroItemReader<>(this.resource, this.type); } private AvroItemReader<T> buildForSchema() { Assert.notNull(this.schema, "'schema' is required."); return new AvroItemReader<>(this.resource, this.schema); } }
Assert.notNull(resource, "A 'resource' is required."); Assert.state(resource.exists(), "Resource " + resource.getFilename() + " does not exist."); this.resource = resource; return this;
1,447
61
1,508
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/avro/builder/AvroItemWriterBuilder.java
AvroItemWriterBuilder
resource
class AvroItemWriterBuilder<T> { private Class<T> type; private WritableResource resource; private Resource schema; private String name = AvroItemWriter.class.getSimpleName(); /** * @param resource the {@link WritableResource} used to write the serialized data. * @return The current instance of the builder. */ public AvroItemWriterBuilder<T> resource(WritableResource resource) {<FILL_FUNCTION_BODY>} /** * @param schema the Resource containing the schema JSON used to serialize the output. * @return The current instance of the builder. */ public AvroItemWriterBuilder<T> schema(Resource schema) { Assert.notNull(schema, "A 'schema' is required."); Assert.state(schema.exists(), "Resource " + schema.getFilename() + "does not exist."); this.schema = schema; return this; } /** * @param schemaString the String containing the schema JSON used to serialize the * output. * @return The current instance of the builder. */ public AvroItemWriterBuilder<T> schema(String schemaString) { Assert.hasText(schemaString, "A 'schemaString' is required."); this.schema = new ByteArrayResource(schemaString.getBytes()); return this; } /** * @param type the Class of objects to be serialized. * @return The current instance of the builder. */ public AvroItemWriterBuilder<T> type(Class<T> type) { Assert.notNull(type, "A 'type' is required."); this.type = type; return this; } /** * The name used to calculate the key within the * {@link org.springframework.batch.item.ExecutionContext}. * @param name name of the reader instance * @return The current instance of the builder. * @see org.springframework.batch.item.ItemStreamSupport#setName(String) */ public AvroItemWriterBuilder<T> name(String name) { Assert.hasText(name, "A 'name' is required."); this.name = name; return this; } /** * Build an instance of {@link AvroItemWriter}. * @return the instance; */ public AvroItemWriter<T> build() { Assert.notNull(this.resource, "A 'resource' is required."); Assert.notNull(this.type, "A 'type' is required."); AvroItemWriter<T> avroItemWriter = this.schema != null ? new AvroItemWriter<>(this.resource, this.schema, this.type) : new AvroItemWriter<>(this.resource, this.type); avroItemWriter.setName(this.name); return avroItemWriter; } }
Assert.notNull(resource, "A 'resource' is required."); this.resource = resource; return this;
725
34
759
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/AbstractPaginatedDataItemReader.java
AbstractPaginatedDataItemReader
doRead
class AbstractPaginatedDataItemReader<T> extends AbstractItemCountingItemStreamItemReader<T> { protected volatile int page = 0; protected int pageSize = 10; protected Iterator<T> results; private final Lock lock = new ReentrantLock(); /** * The number of items to be read with each page. * @param pageSize the number of items. pageSize must be greater than zero. */ public void setPageSize(int pageSize) { Assert.isTrue(pageSize > 0, "pageSize must be greater than zero"); this.pageSize = pageSize; } @Nullable @Override protected T doRead() throws Exception {<FILL_FUNCTION_BODY>} /** * Method this {@link ItemStreamReader} delegates to for the actual work of reading a * page. Each time this method is called, the resulting {@link Iterator} should * contain the items read within the next page. <br> * <br> * If the {@link Iterator} is empty or null when it is returned, this * {@link ItemReader} will assume that the input has been exhausted. * @return an {@link Iterator} containing the items within a page. */ protected abstract Iterator<T> doPageRead(); @Override protected void doOpen() throws Exception { } @Override protected void doClose() throws Exception { } @Override protected void jumpToItem(int itemLastIndex) throws Exception { this.lock.lock(); try { page = itemLastIndex / pageSize; int current = itemLastIndex % pageSize; Iterator<T> initialPage = doPageRead(); for (; current >= 0; current--) { initialPage.next(); } } finally { this.lock.unlock(); } } }
this.lock.lock(); try { if (results == null || !results.hasNext()) { results = doPageRead(); page++; if (results == null || !results.hasNext()) { return null; } } if (results.hasNext()) { return results.next(); } else { return null; } } finally { this.lock.unlock(); }
474
129
603
<methods>public non-sealed void <init>() ,public void close() throws org.springframework.batch.item.ItemStreamException,public int getCurrentItemCount() ,public boolean isSaveState() ,public void open(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException,public T read() throws java.lang.Exception,public void setCurrentItemCount(int) ,public void setMaxItemCount(int) ,public void setSaveState(boolean) ,public void update(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException<variables>private static final java.lang.String READ_COUNT,private static final java.lang.String READ_COUNT_MAX,private int currentItemCount,private int maxItemCount,private boolean saveState
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/MongoItemReader.java
MongoItemReader
afterPropertiesSet
class MongoItemReader<T> extends AbstractPaginatedDataItemReader<T> implements InitializingBean { protected MongoOperations template; protected Query query; protected String queryString; protected Class<? extends T> type; protected Sort sort; protected String hint; protected String fields; protected String collection; protected List<Object> parameterValues = new ArrayList<>(); public MongoItemReader() { super(); setName(ClassUtils.getShortName(MongoItemReader.class)); } /** * A Mongo Query to be used. * @param query Mongo Query to be used. */ public void setQuery(Query query) { this.query = query; } /** * Used to perform operations against the MongoDB instance. Also handles the mapping * of documents to objects. * @param template the MongoOperations instance to use * @see MongoOperations */ public void setTemplate(MongoOperations template) { this.template = template; } /** * A JSON formatted MongoDB query. Parameterization of the provided query is allowed * via ?&lt;index&gt; placeholders where the &lt;index&gt; indicates the index of the * parameterValue to substitute. * @param queryString JSON formatted Mongo query */ public void setQuery(String queryString) { this.queryString = queryString; } /** * The type of object to be returned for each {@link #read()} call. * @param type the type of object to return */ public void setTargetType(Class<? extends T> type) { this.type = type; } /** * {@link List} of values to be substituted in for each of the parameters in the * query. * @param parameterValues values */ public void setParameterValues(List<Object> parameterValues) { Assert.notNull(parameterValues, "Parameter values must not be null"); this.parameterValues = parameterValues; } /** * JSON defining the fields to be returned from the matching documents by MongoDB. * @param fields JSON string that identifies the fields to sort by. */ public void setFields(String fields) { this.fields = fields; } /** * {@link Map} of property * names/{@link org.springframework.data.domain.Sort.Direction} values to sort the * input by. * @param sorts map of properties and direction to sort each. */ public void setSort(Map<String, Sort.Direction> sorts) { Assert.notNull(sorts, "Sorts must not be null"); this.sort = convertToSort(sorts); } /** * @param collection Mongo collection to be queried. */ public void setCollection(String collection) { this.collection = collection; } /** * JSON String telling MongoDB what index to use. * @param hint string indicating what index to use. */ public void setHint(String hint) { this.hint = hint; } @Override @SuppressWarnings("unchecked") protected Iterator<T> doPageRead() { if (queryString != null) { Pageable pageRequest = PageRequest.of(page, pageSize, sort); String populatedQuery = replacePlaceholders(queryString, parameterValues); Query mongoQuery; if (StringUtils.hasText(fields)) { mongoQuery = new BasicQuery(populatedQuery, fields); } else { mongoQuery = new BasicQuery(populatedQuery); } mongoQuery.with(pageRequest); if (StringUtils.hasText(hint)) { mongoQuery.withHint(hint); } if (StringUtils.hasText(collection)) { return (Iterator<T>) template.find(mongoQuery, type, collection).iterator(); } else { return (Iterator<T>) template.find(mongoQuery, type).iterator(); } } else { Pageable pageRequest = PageRequest.of(page, pageSize); query.with(pageRequest); if (StringUtils.hasText(collection)) { return (Iterator<T>) template.find(query, type, collection).iterator(); } else { return (Iterator<T>) template.find(query, type).iterator(); } } } /** * Checks mandatory properties * * @see InitializingBean#afterPropertiesSet() */ @Override public void afterPropertiesSet() throws Exception {<FILL_FUNCTION_BODY>} protected String replacePlaceholders(String input, List<Object> values) { ParameterBindingJsonReader reader = new ParameterBindingJsonReader(input, values.toArray()); DecoderContext decoderContext = DecoderContext.builder().build(); Document document = new ParameterBindingDocumentCodec().decode(reader, decoderContext); return document.toJson(); } protected Sort convertToSort(Map<String, Sort.Direction> sorts) { List<Sort.Order> sortValues = new ArrayList<>(sorts.size()); for (Map.Entry<String, Sort.Direction> curSort : sorts.entrySet()) { sortValues.add(new Sort.Order(curSort.getValue(), curSort.getKey())); } return Sort.by(sortValues); } }
Assert.state(template != null, "An implementation of MongoOperations is required."); Assert.state(type != null, "A type to convert the input into is required."); Assert.state(queryString != null || query != null, "A query is required."); if (queryString != null) { Assert.state(sort != null, "A sort is required."); }
1,394
106
1,500
<methods>public non-sealed void <init>() ,public void setPageSize(int) <variables>private final java.util.concurrent.locks.Lock lock,protected volatile int page,protected int pageSize,protected Iterator<T> results
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/Neo4jItemReader.java
Neo4jItemReader
doPageRead
class Neo4jItemReader<T> extends AbstractPaginatedDataItemReader<T> implements InitializingBean { protected Log logger = LogFactory.getLog(getClass()); private SessionFactory sessionFactory; private String startStatement; private String returnStatement; private String matchStatement; private String whereStatement; private String orderByStatement; private Class<T> targetType; private Map<String, Object> parameterValues; /** * Optional parameters to be used in the cypher query. * @param parameterValues the parameter values to be used in the cypher query */ public void setParameterValues(Map<String, Object> parameterValues) { this.parameterValues = parameterValues; } protected final Map<String, Object> getParameterValues() { return this.parameterValues; } /** * The start segment of the cypher query. START is prepended to the statement provided * and should <em>not</em> be included. * @param startStatement the start fragment of the cypher query. */ public void setStartStatement(String startStatement) { this.startStatement = startStatement; } /** * The return statement of the cypher query. RETURN is prepended to the statement * provided and should <em>not</em> be included * @param returnStatement the return fragment of the cypher query. */ public void setReturnStatement(String returnStatement) { this.returnStatement = returnStatement; } /** * An optional match fragment of the cypher query. MATCH is prepended to the statement * provided and should <em>not</em> be included. * @param matchStatement the match fragment of the cypher query */ public void setMatchStatement(String matchStatement) { this.matchStatement = matchStatement; } /** * An optional where fragment of the cypher query. WHERE is prepended to the statement * provided and should <em>not</em> be included. * @param whereStatement where fragment of the cypher query */ public void setWhereStatement(String whereStatement) { this.whereStatement = whereStatement; } /** * A list of properties to order the results by. This is required so that subsequent * page requests pull back the segment of results correctly. ORDER BY is prepended to * the statement provided and should <em>not</em> be included. * @param orderByStatement order by fragment of the cypher query. */ public void setOrderByStatement(String orderByStatement) { this.orderByStatement = orderByStatement; } protected SessionFactory getSessionFactory() { return sessionFactory; } /** * Establish the session factory for the reader. * @param sessionFactory the factory to use for the reader. */ public void setSessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } /** * The object type to be returned from each call to {@link #read()} * @param targetType the type of object to return. */ public void setTargetType(Class<T> targetType) { this.targetType = targetType; } protected final Class<T> getTargetType() { return this.targetType; } protected String generateLimitCypherQuery() { StringBuilder query = new StringBuilder(128); query.append("START ").append(startStatement); query.append(matchStatement != null ? " MATCH " + matchStatement : ""); query.append(whereStatement != null ? " WHERE " + whereStatement : ""); query.append(" RETURN ").append(returnStatement); query.append(" ORDER BY ").append(orderByStatement); query.append(" SKIP ").append(pageSize * page); query.append(" LIMIT ").append(pageSize); String resultingQuery = query.toString(); if (logger.isDebugEnabled()) { logger.debug(resultingQuery); } return resultingQuery; } /** * Checks mandatory properties * * @see InitializingBean#afterPropertiesSet() */ @Override public void afterPropertiesSet() throws Exception { Assert.state(sessionFactory != null, "A SessionFactory is required"); Assert.state(targetType != null, "The type to be returned is required"); Assert.state(StringUtils.hasText(startStatement), "A START statement is required"); Assert.state(StringUtils.hasText(returnStatement), "A RETURN statement is required"); Assert.state(StringUtils.hasText(orderByStatement), "A ORDER BY statement is required"); } @SuppressWarnings("unchecked") @Override protected Iterator<T> doPageRead() {<FILL_FUNCTION_BODY>} }
Session session = getSessionFactory().openSession(); Iterable<T> queryResults = session.query(getTargetType(), generateLimitCypherQuery(), getParameterValues()); if (queryResults != null) { return queryResults.iterator(); } else { return new ArrayList<T>().iterator(); }
1,226
88
1,314
<methods>public non-sealed void <init>() ,public void setPageSize(int) <variables>private final java.util.concurrent.locks.Lock lock,protected volatile int page,protected int pageSize,protected Iterator<T> results
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/Neo4jItemWriter.java
Neo4jItemWriter
save
class Neo4jItemWriter<T> implements ItemWriter<T>, InitializingBean { protected static final Log logger = LogFactory.getLog(Neo4jItemWriter.class); private boolean delete = false; private SessionFactory sessionFactory; /** * Boolean flag indicating whether the writer should save or delete the item at write * time. * @param delete true if write should delete item, false if item should be saved. * Default is false. */ public void setDelete(boolean delete) { this.delete = delete; } /** * Establish the session factory that will be used to create {@link Session} instances * for interacting with Neo4j. * @param sessionFactory sessionFactory to be used. */ public void setSessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } /** * Checks mandatory properties * * @see InitializingBean#afterPropertiesSet() */ @Override public void afterPropertiesSet() throws Exception { Assert.state(this.sessionFactory != null, "A SessionFactory is required"); } /** * Write all items to the data store. * * @see org.springframework.batch.item.ItemWriter#write(Chunk) */ @Override public void write(Chunk<? extends T> chunk) throws Exception { if (!chunk.isEmpty()) { doWrite(chunk); } } /** * Performs the actual write using the template. This can be overridden by a subclass * if necessary. * @param items the list of items to be persisted. */ protected void doWrite(Chunk<? extends T> items) { if (delete) { delete(items); } else { save(items); } } private void delete(Chunk<? extends T> items) { Session session = this.sessionFactory.openSession(); for (T item : items) { session.delete(item); } } private void save(Chunk<? extends T> items) {<FILL_FUNCTION_BODY>} }
Session session = this.sessionFactory.openSession(); for (T item : items) { session.save(item); }
548
40
588
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/RepositoryItemReader.java
RepositoryItemReader
createMethodInvoker
class RepositoryItemReader<T> extends AbstractItemCountingItemStreamItemReader<T> implements InitializingBean { protected Log logger = LogFactory.getLog(getClass()); private PagingAndSortingRepository<?, ?> repository; private Sort sort; private volatile int page = 0; private int pageSize = 10; private volatile int current = 0; private List<?> arguments; private volatile List<T> results; private final Lock lock = new ReentrantLock(); private String methodName; public RepositoryItemReader() { setName(ClassUtils.getShortName(RepositoryItemReader.class)); } /** * Arguments to be passed to the data providing method. * @param arguments list of method arguments to be passed to the repository */ public void setArguments(List<?> arguments) { this.arguments = arguments; } /** * Provides ordering of the results so that order is maintained between paged queries * @param sorts the fields to sort by and the directions */ public void setSort(Map<String, Sort.Direction> sorts) { this.sort = convertToSort(sorts); } /** * @param pageSize The number of items to retrieve per page. Must be greater than 0. */ public void setPageSize(int pageSize) { this.pageSize = pageSize; } /** * The {@link org.springframework.data.repository.PagingAndSortingRepository} * implementation used to read input from. * @param repository underlying repository for input to be read from. */ public void setRepository(PagingAndSortingRepository<?, ?> repository) { this.repository = repository; } /** * Specifies what method on the repository to call. This method must take * {@link org.springframework.data.domain.Pageable} as the <em>last</em> argument. * @param methodName name of the method to invoke */ public void setMethodName(String methodName) { this.methodName = methodName; } @Override public void afterPropertiesSet() throws Exception { Assert.state(repository != null, "A PagingAndSortingRepository is required"); Assert.state(pageSize > 0, "Page size must be greater than 0"); Assert.state(sort != null, "A sort is required"); Assert.state(this.methodName != null && !this.methodName.isEmpty(), "methodName is required."); if (isSaveState()) { Assert.state(StringUtils.hasText(getName()), "A name is required when saveState is set to true."); } } @Nullable @Override protected T doRead() throws Exception { this.lock.lock(); try { boolean nextPageNeeded = (results != null && current >= results.size()); if (results == null || nextPageNeeded) { if (logger.isDebugEnabled()) { logger.debug("Reading page " + page); } results = doPageRead(); page++; if (results.isEmpty()) { return null; } if (nextPageNeeded) { current = 0; } } if (current < results.size()) { T curLine = results.get(current); current++; return curLine; } else { return null; } } finally { this.lock.unlock(); } } @Override protected void jumpToItem(int itemLastIndex) throws Exception { this.lock.lock(); try { page = itemLastIndex / pageSize; current = itemLastIndex % pageSize; } finally { this.lock.unlock(); } } /** * Performs the actual reading of a page via the repository. Available for overriding * as needed. * @return the list of items that make up the page * @throws Exception Based on what the underlying method throws or related to the * calling of the method */ @SuppressWarnings("unchecked") protected List<T> doPageRead() throws Exception { Pageable pageRequest = PageRequest.of(page, pageSize, sort); MethodInvoker invoker = createMethodInvoker(repository, methodName); List<Object> parameters = new ArrayList<>(); if (arguments != null && arguments.size() > 0) { parameters.addAll(arguments); } parameters.add(pageRequest); invoker.setArguments(parameters.toArray()); Slice<T> curPage = (Slice<T>) doInvoke(invoker); return curPage.getContent(); } @Override protected void doOpen() throws Exception { } @Override protected void doClose() throws Exception { this.lock.lock(); try { current = 0; page = 0; results = null; } finally { this.lock.unlock(); } } private Sort convertToSort(Map<String, Sort.Direction> sorts) { List<Sort.Order> sortValues = new ArrayList<>(); for (Map.Entry<String, Sort.Direction> curSort : sorts.entrySet()) { sortValues.add(new Sort.Order(curSort.getValue(), curSort.getKey())); } return Sort.by(sortValues); } private Object doInvoke(MethodInvoker invoker) throws Exception { try { invoker.prepare(); } catch (ClassNotFoundException | NoSuchMethodException e) { throw new DynamicMethodInvocationException(e); } try { return invoker.invoke(); } catch (InvocationTargetException e) { if (e.getCause() instanceof Exception) { throw (Exception) e.getCause(); } else { throw new InvocationTargetThrowableWrapper(e.getCause()); } } catch (IllegalAccessException e) { throw new DynamicMethodInvocationException(e); } } private MethodInvoker createMethodInvoker(Object targetObject, String targetMethod) {<FILL_FUNCTION_BODY>} }
MethodInvoker invoker = new MethodInvoker(); invoker.setTargetObject(targetObject); invoker.setTargetMethod(targetMethod); return invoker;
1,612
49
1,661
<methods>public non-sealed void <init>() ,public void close() throws org.springframework.batch.item.ItemStreamException,public int getCurrentItemCount() ,public boolean isSaveState() ,public void open(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException,public T read() throws java.lang.Exception,public void setCurrentItemCount(int) ,public void setMaxItemCount(int) ,public void setSaveState(boolean) ,public void update(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException<variables>private static final java.lang.String READ_COUNT,private static final java.lang.String READ_COUNT_MAX,private int currentItemCount,private int maxItemCount,private boolean saveState
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/RepositoryItemWriter.java
RepositoryItemWriter
createMethodInvoker
class RepositoryItemWriter<T> implements ItemWriter<T>, InitializingBean { protected static final Log logger = LogFactory.getLog(RepositoryItemWriter.class); private CrudRepository<T, ?> repository; private String methodName; /** * Specifies what method on the repository to call. This method must have the type of * object passed to this writer as the <em>sole</em> argument. * @param methodName {@link String} containing the method name. */ public void setMethodName(String methodName) { this.methodName = methodName; } /** * Set the {@link org.springframework.data.repository.CrudRepository} implementation * for persistence * @param repository the Spring Data repository to be set */ public void setRepository(CrudRepository<T, ?> repository) { this.repository = repository; } /** * Write all items to the data store via a Spring Data repository. * * @see org.springframework.batch.item.ItemWriter#write(Chunk) */ @Override public void write(Chunk<? extends T> chunk) throws Exception { if (!chunk.isEmpty()) { doWrite(chunk); } } /** * Performs the actual write to the repository. This can be overridden by a subclass * if necessary. * @param items the list of items to be persisted. * @throws Exception thrown if error occurs during writing. */ protected void doWrite(Chunk<? extends T> items) throws Exception { if (logger.isDebugEnabled()) { logger.debug("Writing to the repository with " + items.size() + " items."); } if (this.methodName == null) { this.repository.saveAll(items); return; } MethodInvoker invoker = createMethodInvoker(repository, methodName); for (T object : items) { invoker.setArguments(object); doInvoke(invoker); } } /** * Check mandatory properties - there must be a repository. */ @Override public void afterPropertiesSet() throws Exception { Assert.state(repository != null, "A CrudRepository implementation is required"); if (this.methodName != null) { Assert.state(StringUtils.hasText(this.methodName), "methodName must not be empty."); } else { logger.debug("No method name provided, CrudRepository.saveAll will be used."); } } private Object doInvoke(MethodInvoker invoker) throws Exception { try { invoker.prepare(); } catch (ClassNotFoundException | NoSuchMethodException e) { throw new DynamicMethodInvocationException(e); } try { return invoker.invoke(); } catch (InvocationTargetException e) { if (e.getCause() instanceof Exception) { throw (Exception) e.getCause(); } else { throw new InvocationTargetThrowableWrapper(e.getCause()); } } catch (IllegalAccessException e) { throw new DynamicMethodInvocationException(e); } } private MethodInvoker createMethodInvoker(Object targetObject, String targetMethod) {<FILL_FUNCTION_BODY>} }
MethodInvoker invoker = new MethodInvoker(); invoker.setTargetObject(targetObject); invoker.setTargetMethod(targetMethod); return invoker;
860
49
909
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/builder/MongoItemWriterBuilder.java
MongoItemWriterBuilder
build
class MongoItemWriterBuilder<T> { private MongoOperations template; private String collection; private Mode mode = Mode.UPSERT; /** * Indicates if the items being passed to the writer are to be saved or removed from * the data store. If set to false (default), the items will be saved. If set to true, * the items will be removed. * @param delete removal indicator * @return The current instance of the builder * @see MongoItemWriter#setDelete(boolean) * @deprecated Use {@link MongoItemWriterBuilder#mode(Mode)} instead. Scheduled for * removal in v5.3 or later. */ @Deprecated(since = "5.1", forRemoval = true) public MongoItemWriterBuilder<T> delete(boolean delete) { this.mode = (delete) ? Mode.REMOVE : Mode.UPSERT; return this; } /** * Set the operating {@link Mode} to be applied by this writer. Defaults to * {@link Mode#UPSERT}. * @param mode the mode to be used. * @return The current instance of the builder * @see MongoItemWriter#setMode(Mode) * @since 5.1 */ public MongoItemWriterBuilder<T> mode(final Mode mode) { this.mode = mode; return this; } /** * Set the {@link MongoOperations} to be used to save items to be written. * @param template the template implementation to be used. * @return The current instance of the builder * @see MongoItemWriter#setTemplate(MongoOperations) */ public MongoItemWriterBuilder<T> template(MongoOperations template) { this.template = template; return this; } /** * Set the name of the Mongo collection to be written to. * @param collection the name of the collection. * @return The current instance of the builder * @see MongoItemWriter#setCollection(String) * */ public MongoItemWriterBuilder<T> collection(String collection) { this.collection = collection; return this; } /** * Validates and builds a {@link MongoItemWriter}. * @return a {@link MongoItemWriter} */ public MongoItemWriter<T> build() {<FILL_FUNCTION_BODY>} }
Assert.notNull(this.template, "template is required."); MongoItemWriter<T> writer = new MongoItemWriter<>(); writer.setTemplate(this.template); writer.setMode(this.mode); writer.setCollection(this.collection); return writer;
611
82
693
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/builder/Neo4jItemWriterBuilder.java
Neo4jItemWriterBuilder
build
class Neo4jItemWriterBuilder<T> { private boolean delete = false; private SessionFactory sessionFactory; /** * Boolean flag indicating whether the writer should save or delete the item at write * time. * @param delete true if write should delete item, false if item should be saved. * Default is false. * @return The current instance of the builder * @see Neo4jItemWriter#setDelete(boolean) */ public Neo4jItemWriterBuilder<T> delete(boolean delete) { this.delete = delete; return this; } /** * Establish the session factory that will be used to create {@link Session} instances * for interacting with Neo4j. * @param sessionFactory sessionFactory to be used. * @return The current instance of the builder * @see Neo4jItemWriter#setSessionFactory(SessionFactory) */ public Neo4jItemWriterBuilder<T> sessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; return this; } /** * Validates and builds a {@link org.springframework.batch.item.data.Neo4jItemWriter}. * @return a {@link Neo4jItemWriter} */ public Neo4jItemWriter<T> build() {<FILL_FUNCTION_BODY>} }
Assert.notNull(sessionFactory, "sessionFactory is required."); Neo4jItemWriter<T> writer = new Neo4jItemWriter<>(); writer.setDelete(this.delete); writer.setSessionFactory(this.sessionFactory); return writer;
342
74
416
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/builder/RepositoryItemReaderBuilder.java
RepositoryItemReaderBuilder
build
class RepositoryItemReaderBuilder<T> { private PagingAndSortingRepository<?, ?> repository; private Map<String, Sort.Direction> sorts; private List<?> arguments; private int pageSize = 10; private String methodName; private boolean saveState = true; private String name; private int maxItemCount = Integer.MAX_VALUE; private int currentItemCount; /** * Configure if the state of the * {@link org.springframework.batch.item.ItemStreamSupport} should be persisted within * the {@link org.springframework.batch.item.ExecutionContext} for restart purposes. * @param saveState defaults to true * @return The current instance of the builder. */ public RepositoryItemReaderBuilder<T> saveState(boolean saveState) { this.saveState = saveState; return this; } /** * The name used to calculate the key within the * {@link org.springframework.batch.item.ExecutionContext}. Required if * {@link #saveState(boolean)} is set to true. * @param name name of the reader instance * @return The current instance of the builder. * @see org.springframework.batch.item.ItemStreamSupport#setName(String) */ public RepositoryItemReaderBuilder<T> name(String name) { this.name = name; return this; } /** * Configure the max number of items to be read. * @param maxItemCount the max items to be read * @return The current instance of the builder. * @see org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) */ public RepositoryItemReaderBuilder<T> maxItemCount(int maxItemCount) { this.maxItemCount = maxItemCount; return this; } /** * Index for the current item. Used on restarts to indicate where to start from. * @param currentItemCount current index * @return this instance for method chaining * @see org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) */ public RepositoryItemReaderBuilder<T> currentItemCount(int currentItemCount) { this.currentItemCount = currentItemCount; return this; } /** * Arguments to be passed to the data providing method. * @param arguments list of method arguments to be passed to the repository. * @return The current instance of the builder. * @see RepositoryItemReader#setArguments(List) */ public RepositoryItemReaderBuilder<T> arguments(List<?> arguments) { this.arguments = arguments; return this; } /** * Arguments to be passed to the data providing method. * @param arguments the method arguments to be passed to the repository. * @return The current instance of the builder. * @see RepositoryItemReader#setArguments(List) */ public RepositoryItemReaderBuilder<T> arguments(Object... arguments) { return arguments(Arrays.asList(arguments)); } /** * Provides ordering of the results so that order is maintained between paged queries. * @param sorts the fields to sort by and the directions. * @return The current instance of the builder. * @see RepositoryItemReader#setSort(Map) */ public RepositoryItemReaderBuilder<T> sorts(Map<String, Sort.Direction> sorts) { this.sorts = sorts; return this; } /** * Establish the pageSize for the generated RepositoryItemReader. * @param pageSize The number of items to retrieve per page. Must be greater than 0. * @return The current instance of the builder. * @see RepositoryItemReader#setPageSize(int) */ public RepositoryItemReaderBuilder<T> pageSize(int pageSize) { this.pageSize = pageSize; return this; } /** * The {@link org.springframework.data.repository.PagingAndSortingRepository} * implementation used to read input from. * @param repository underlying repository for input to be read from. * @return The current instance of the builder. * @see RepositoryItemReader#setRepository(PagingAndSortingRepository) */ public RepositoryItemReaderBuilder<T> repository(PagingAndSortingRepository<?, ?> repository) { this.repository = repository; return this; } /** * Specifies what method on the repository to call. This method must take * {@link org.springframework.data.domain.Pageable} as the <em>last</em> argument. * @param methodName name of the method to invoke. * @return The current instance of the builder. * @see RepositoryItemReader#setMethodName(String) */ public RepositoryItemReaderBuilder<T> methodName(String methodName) { this.methodName = methodName; return this; } /** * Builds the {@link RepositoryItemReader}. * @return a {@link RepositoryItemReader} */ public RepositoryItemReader<T> build() {<FILL_FUNCTION_BODY>} }
Assert.notNull(this.sorts, "sorts map is required."); Assert.notNull(this.repository, "repository is required."); Assert.isTrue(this.pageSize > 0, "Page size must be greater than 0"); Assert.hasText(this.methodName, "methodName is required."); if (this.saveState) { Assert.state(StringUtils.hasText(this.name), "A name is required when saveState is set to true."); } RepositoryItemReader<T> reader = new RepositoryItemReader<>(); reader.setArguments(this.arguments); reader.setRepository(this.repository); reader.setMethodName(this.methodName); reader.setPageSize(this.pageSize); reader.setCurrentItemCount(this.currentItemCount); reader.setMaxItemCount(this.maxItemCount); reader.setSaveState(this.saveState); reader.setSort(this.sorts); reader.setName(this.name); return reader;
1,308
274
1,582
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/builder/RepositoryItemWriterBuilder.java
RepositoryItemWriterBuilder
build
class RepositoryItemWriterBuilder<T> { private static final Log logger = LogFactory.getLog(RepositoryItemWriterBuilder.class.getName()); private CrudRepository<T, ?> repository; private String methodName; private RepositoryMethodReference repositoryMethodReference; /** * Specifies what method on the repository to call. This method must have the type of * object passed to this writer as the <em>sole</em> argument. * @param methodName the name of the method to be used for saving the item. * @return The current instance of the builder. * @see RepositoryItemWriter#setMethodName(String) */ public RepositoryItemWriterBuilder<T> methodName(String methodName) { this.methodName = methodName; return this; } /** * Set the {@link org.springframework.data.repository.CrudRepository} implementation * for persistence * @param repository the Spring Data repository to be set * @return The current instance of the builder. * @see RepositoryItemWriter#setRepository(CrudRepository) */ public RepositoryItemWriterBuilder<T> repository(CrudRepository<T, ?> repository) { this.repository = repository; return this; } /** * Specifies a repository and the type-safe method to call for the writer. The method * configured via this mechanism must take * {@link org.springframework.data.domain.Pageable} as the <em>last</em> argument. * This method can be used in place of {@link #repository(CrudRepository)}, * {@link #methodName(String)}}. * <p> * Note: The repository that is used by the repositoryMethodReference must be * non-final. * @param repositoryMethodReference of the used to get a repository and type-safe * method for use by the writer. * @return The current instance of the builder. * @see RepositoryItemWriter#setMethodName(String) * @see RepositoryItemWriter#setRepository(CrudRepository) * */ public RepositoryItemWriterBuilder<T> repository( RepositoryItemWriterBuilder.RepositoryMethodReference repositoryMethodReference) { this.repositoryMethodReference = repositoryMethodReference; return this; } /** * Builds the {@link RepositoryItemWriter}. * @return a {@link RepositoryItemWriter} */ @SuppressWarnings("unchecked") public RepositoryItemWriter<T> build() {<FILL_FUNCTION_BODY>} /** * Establishes a proxy that will capture a the Repository and the associated * methodName that will be used by the writer. * * @param <T> The type of repository that will be used by the writer. The class must * not be final. */ public static class RepositoryMethodReference<T> { private final RepositoryMethodInterceptor repositoryInvocationHandler; private final CrudRepository<?, ?> repository; public RepositoryMethodReference(CrudRepository<?, ?> repository) { this.repository = repository; this.repositoryInvocationHandler = new RepositoryMethodInterceptor(); } /** * The proxy returned prevents actual method execution and is only used to gather, * information about the method. * @return T is a proxy of the object passed in in the constructor */ @SuppressWarnings("unchecked") public T methodIs() { Enhancer enhancer = new Enhancer(); enhancer.setSuperclass(this.repository.getClass()); enhancer.setCallback(this.repositoryInvocationHandler); return (T) enhancer.create(); } CrudRepository<?, ?> getRepository() { return this.repository; } String getMethodName() { return this.repositoryInvocationHandler.getMethodName(); } } private static class RepositoryMethodInterceptor implements MethodInterceptor { private String methodName; @Override public Object intercept(Object o, Method method, Object[] objects, MethodProxy methodProxy) throws Throwable { this.methodName = method.getName(); return null; } String getMethodName() { return this.methodName; } } }
if (this.repositoryMethodReference != null) { this.methodName = this.repositoryMethodReference.getMethodName(); this.repository = this.repositoryMethodReference.getRepository(); } Assert.notNull(this.repository, "repository is required."); RepositoryItemWriter<T> writer = new RepositoryItemWriter<>(); writer.setRepository(this.repository); if (this.methodName != null) { Assert.hasText(this.methodName, "methodName must not be empty."); writer.setMethodName(this.methodName); } else { logger.debug("No method name provided, CrudRepository.saveAll will be used."); } return writer;
1,073
186
1,259
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/AbstractPagingItemReader.java
AbstractPagingItemReader
jumpToItem
class AbstractPagingItemReader<T> extends AbstractItemCountingItemStreamItemReader<T> implements InitializingBean { protected Log logger = LogFactory.getLog(getClass()); private volatile boolean initialized = false; private int pageSize = 10; private volatile int current = 0; private volatile int page = 0; protected volatile List<T> results; private final Lock lock = new ReentrantLock(); public AbstractPagingItemReader() { setName(ClassUtils.getShortName(AbstractPagingItemReader.class)); } /** * The current page number. * @return the current page */ public int getPage() { return page; } /** * The page size configured for this reader. * @return the page size */ public int getPageSize() { return pageSize; } /** * The number of rows to retrieve at a time. * @param pageSize the number of rows to fetch per page */ public void setPageSize(int pageSize) { this.pageSize = pageSize; } /** * Check mandatory properties. * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() */ @Override public void afterPropertiesSet() throws Exception { Assert.state(pageSize > 0, "pageSize must be greater than zero"); } @Nullable @Override protected T doRead() throws Exception { this.lock.lock(); try { if (results == null || current >= pageSize) { if (logger.isDebugEnabled()) { logger.debug("Reading page " + getPage()); } doReadPage(); page++; if (current >= pageSize) { current = 0; } } int next = current++; if (next < results.size()) { return results.get(next); } else { return null; } } finally { this.lock.unlock(); } } abstract protected void doReadPage(); @Override protected void doOpen() throws Exception { Assert.state(!initialized, "Cannot open an already opened ItemReader, call close first"); initialized = true; } @Override protected void doClose() throws Exception { this.lock.lock(); try { initialized = false; current = 0; page = 0; results = null; } finally { this.lock.unlock(); } } @Override protected void jumpToItem(int itemIndex) throws Exception {<FILL_FUNCTION_BODY>} }
this.lock.lock(); try { page = itemIndex / pageSize; current = itemIndex % pageSize; } finally { this.lock.unlock(); } if (logger.isDebugEnabled()) { logger.debug("Jumping to page " + getPage() + " and index " + current); }
701
99
800
<methods>public non-sealed void <init>() ,public void close() throws org.springframework.batch.item.ItemStreamException,public int getCurrentItemCount() ,public boolean isSaveState() ,public void open(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException,public T read() throws java.lang.Exception,public void setCurrentItemCount(int) ,public void setMaxItemCount(int) ,public void setSaveState(boolean) ,public void update(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException<variables>private static final java.lang.String READ_COUNT,private static final java.lang.String READ_COUNT_MAX,private int currentItemCount,private int maxItemCount,private boolean saveState
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateCursorItemReader.java
HibernateCursorItemReader
doOpen
class HibernateCursorItemReader<T> extends AbstractItemCountingItemStreamItemReader<T> implements InitializingBean { private final HibernateItemReaderHelper<T> helper = new HibernateItemReaderHelper<>(); public HibernateCursorItemReader() { setName(ClassUtils.getShortName(HibernateCursorItemReader.class)); } private ScrollableResults<? extends T> cursor; private boolean initialized = false; private int fetchSize; private Map<String, Object> parameterValues; @Override public void afterPropertiesSet() throws Exception { Assert.state(fetchSize >= 0, "fetchSize must not be negative"); helper.afterPropertiesSet(); } /** * The parameter values to apply to a query (map of name:value). * @param parameterValues the parameter values to set */ public void setParameterValues(Map<String, Object> parameterValues) { this.parameterValues = parameterValues; } /** * A query name for an externalized query. Either this or the { * {@link #setQueryString(String) query string} or the { * {@link #setQueryProvider(HibernateQueryProvider) query provider} should be set. * @param queryName name of a hibernate named query */ public void setQueryName(String queryName) { helper.setQueryName(queryName); } /** * Fetch size used internally by Hibernate to limit amount of data fetched from * database per round trip. * @param fetchSize the fetch size to pass down to Hibernate */ public void setFetchSize(int fetchSize) { this.fetchSize = fetchSize; } /** * A query provider. Either this or the {{@link #setQueryString(String) query string} * or the {{@link #setQueryName(String) query name} should be set. * @param queryProvider Hibernate query provider */ public void setQueryProvider(HibernateQueryProvider<T> queryProvider) { helper.setQueryProvider(queryProvider); } /** * A query string in HQL. Either this or the { * {@link #setQueryProvider(HibernateQueryProvider) query provider} or the { * {@link #setQueryName(String) query name} should be set. * @param queryString HQL query string */ public void setQueryString(String queryString) { helper.setQueryString(queryString); } /** * The Hibernate SessionFactory to use the create a session. * @param sessionFactory the {@link SessionFactory} to set */ public void setSessionFactory(SessionFactory sessionFactory) { helper.setSessionFactory(sessionFactory); } /** * Can be set only in uninitialized state. * @param useStatelessSession <code>true</code> to use {@link StatelessSession} * <code>false</code> to use standard hibernate {@link Session} */ public void setUseStatelessSession(boolean useStatelessSession) { helper.setUseStatelessSession(useStatelessSession); } @Nullable @Override protected T doRead() throws Exception { if (cursor.next()) { return cursor.get(); } return null; } /** * Open hibernate session and create a forward-only cursor for the query. */ @Override protected void doOpen() throws Exception {<FILL_FUNCTION_BODY>} /** * Update the context and clear the session if stateful. * @param executionContext the current {@link ExecutionContext} * @throws ItemStreamException if there is a problem */ @Override public void update(ExecutionContext executionContext) throws ItemStreamException { super.update(executionContext); helper.clear(); } /** * Wind forward through the result set to the item requested. Also clears the session * every now and then (if stateful) to avoid memory problems. The frequency of session * clearing is the larger of the fetch size (if set) and 100. * @param itemIndex the first item to read * @throws Exception if there is a problem * @see AbstractItemCountingItemStreamItemReader#jumpToItem(int) */ @Override protected void jumpToItem(int itemIndex) throws Exception { int flushSize = Math.max(fetchSize, 100); helper.jumpToItem(cursor, itemIndex, flushSize); } /** * Close the cursor and hibernate session. */ @Override protected void doClose() throws Exception { if (initialized) { if (cursor != null) { cursor.close(); } helper.close(); } initialized = false; } }
Assert.state(!initialized, "Cannot open an already opened ItemReader, call close first"); cursor = helper.getForwardOnlyCursor(fetchSize, parameterValues); initialized = true;
1,213
53
1,266
<methods>public non-sealed void <init>() ,public void close() throws org.springframework.batch.item.ItemStreamException,public int getCurrentItemCount() ,public boolean isSaveState() ,public void open(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException,public T read() throws java.lang.Exception,public void setCurrentItemCount(int) ,public void setMaxItemCount(int) ,public void setSaveState(boolean) ,public void update(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException<variables>private static final java.lang.String READ_COUNT,private static final java.lang.String READ_COUNT_MAX,private int currentItemCount,private int maxItemCount,private boolean saveState
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateItemReaderHelper.java
HibernateItemReaderHelper
readPage
class HibernateItemReaderHelper<T> implements InitializingBean { private SessionFactory sessionFactory; private String queryString = ""; private String queryName = ""; private HibernateQueryProvider<? extends T> queryProvider; private boolean useStatelessSession = true; private StatelessSession statelessSession; private Session statefulSession; /** * @param queryName name of a hibernate named query */ public void setQueryName(String queryName) { this.queryName = queryName; } /** * @param queryString HQL query string */ public void setQueryString(String queryString) { this.queryString = queryString; } /** * @param queryProvider Hibernate query provider */ public void setQueryProvider(HibernateQueryProvider<? extends T> queryProvider) { this.queryProvider = queryProvider; } /** * Can be set only in uninitialized state. * @param useStatelessSession <code>true</code> to use {@link StatelessSession} * <code>false</code> to use standard hibernate {@link Session} */ public void setUseStatelessSession(boolean useStatelessSession) { Assert.state(statefulSession == null && statelessSession == null, "The useStatelessSession flag can only be set before a session is initialized."); this.useStatelessSession = useStatelessSession; } /** * @param sessionFactory hibernate session factory */ public void setSessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } @Override public void afterPropertiesSet() throws Exception { Assert.state(sessionFactory != null, "A SessionFactory must be provided"); if (queryProvider == null) { Assert.state(sessionFactory != null, "session factory must be set"); Assert.state(StringUtils.hasText(queryString) ^ StringUtils.hasText(queryName), "queryString or queryName must be set"); } } /** * Get a cursor over all of the results, with the forward-only flag set. * @param fetchSize the fetch size to use retrieving the results * @param parameterValues the parameter values to use (or null if none). * @return a forward-only {@link ScrollableResults} */ public ScrollableResults<? extends T> getForwardOnlyCursor(int fetchSize, Map<String, Object> parameterValues) { Query<? extends T> query = createQuery(); if (!CollectionUtils.isEmpty(parameterValues)) { query.setProperties(parameterValues); } return query.setFetchSize(fetchSize).scroll(ScrollMode.FORWARD_ONLY); } /** * Open appropriate type of hibernate session and create the query. * @return a Hibernate Query */ public Query<? extends T> createQuery() { if (useStatelessSession) { if (statelessSession == null) { statelessSession = sessionFactory.openStatelessSession(); } if (queryProvider != null) { queryProvider.setStatelessSession(statelessSession); } else { if (StringUtils.hasText(queryName)) { return statelessSession.getNamedQuery(queryName); } else { return statelessSession.createQuery(queryString); } } } else { if (statefulSession == null) { statefulSession = sessionFactory.openSession(); } if (queryProvider != null) { queryProvider.setSession(statefulSession); } else { if (StringUtils.hasText(queryName)) { return statefulSession.getNamedQuery(queryName); } else { return statefulSession.createQuery(queryString); } } } // If queryProvider is set use it to create a query return queryProvider.createQuery(); } /** * Scroll through the results up to the item specified. * @param cursor the results to scroll over * @param itemIndex index to scroll to * @param flushInterval the number of items to scroll past before flushing */ public void jumpToItem(ScrollableResults cursor, int itemIndex, int flushInterval) { for (int i = 0; i < itemIndex; i++) { cursor.next(); if (i % flushInterval == 0 && !useStatelessSession) { statefulSession.clear(); // Clears in-memory cache } } } /** * Close the open session (stateful or otherwise). */ public void close() { if (statelessSession != null) { statelessSession.close(); statelessSession = null; } if (statefulSession != null) { statefulSession.close(); statefulSession = null; } } /** * Read a page of data, clearing the existing session (if necessary) first, and * creating a new session before executing the query. * @param page the page to read (starting at 0) * @param pageSize the size of the page or maximum number of items to read * @param fetchSize the fetch size to use * @param parameterValues the parameter values to use (if any, otherwise null) * @return a collection of items */ public Collection<? extends T> readPage(int page, int pageSize, int fetchSize, Map<String, Object> parameterValues) {<FILL_FUNCTION_BODY>} /** * Clear the session if stateful. */ public void clear() { if (statefulSession != null) { statefulSession.clear(); } } }
clear(); Query<? extends T> query = createQuery(); if (!CollectionUtils.isEmpty(parameterValues)) { query.setProperties(parameterValues); } return query.setFetchSize(fetchSize).setFirstResult(page * pageSize).setMaxResults(pageSize).list();
1,457
83
1,540
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateItemWriter.java
HibernateItemWriter
doWrite
class HibernateItemWriter<T> implements ItemWriter<T>, InitializingBean { protected static final Log logger = LogFactory.getLog(HibernateItemWriter.class); private SessionFactory sessionFactory; private boolean clearSession = true; /** * Flag to indicate that the session should be cleared and flushed at the end of the * write (default true). * @param clearSession the flag value to set */ public void setClearSession(boolean clearSession) { this.clearSession = clearSession; } /** * Set the Hibernate SessionFactory to be used internally. * @param sessionFactory session factory to be used by the writer */ public void setSessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } /** * Check mandatory properties - there must be a sessionFactory. */ @Override public void afterPropertiesSet() { Assert.state(sessionFactory != null, "SessionFactory must be provided"); } /** * Save or update any entities not in the current hibernate session and then flush the * hibernate session. * * @see org.springframework.batch.item.ItemWriter#write(Chunk) */ @Override public void write(Chunk<? extends T> items) { doWrite(sessionFactory, items); sessionFactory.getCurrentSession().flush(); if (clearSession) { sessionFactory.getCurrentSession().clear(); } } /** * Do perform the actual write operation using Hibernate's API. This can be overridden * in a subclass if necessary. * @param sessionFactory Hibernate SessionFactory to be used * @param items the list of items to use for the write */ protected void doWrite(SessionFactory sessionFactory, Chunk<? extends T> items) {<FILL_FUNCTION_BODY>} }
if (logger.isDebugEnabled()) { logger.debug("Writing to Hibernate with " + items.size() + " items."); } Session currentSession = sessionFactory.getCurrentSession(); if (!items.isEmpty()) { long saveOrUpdateCount = 0; for (T item : items) { if (!currentSession.contains(item)) { currentSession.saveOrUpdate(item); saveOrUpdateCount++; } } if (logger.isDebugEnabled()) { logger.debug(saveOrUpdateCount + " entities saved/updated."); logger.debug((items.size() - saveOrUpdateCount) + " entities found in session."); } }
477
185
662
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernatePagingItemReader.java
HibernatePagingItemReader
doReadPage
class HibernatePagingItemReader<T> extends AbstractPagingItemReader<T> implements InitializingBean { private final HibernateItemReaderHelper<T> helper = new HibernateItemReaderHelper<>(); private Map<String, Object> parameterValues; private int fetchSize; public HibernatePagingItemReader() { setName(ClassUtils.getShortName(HibernatePagingItemReader.class)); } /** * The parameter values to apply to a query (map of name:value). * @param parameterValues the parameter values to set */ public void setParameterValues(Map<String, Object> parameterValues) { this.parameterValues = parameterValues; } /** * A query name for an externalized query. Either this or the { * {@link #setQueryString(String) query string} or the { * {@link #setQueryProvider(HibernateQueryProvider) query provider} should be set. * @param queryName name of a hibernate named query */ public void setQueryName(String queryName) { helper.setQueryName(queryName); } /** * Fetch size used internally by Hibernate to limit amount of data fetched from * database per round trip. * @param fetchSize the fetch size to pass down to Hibernate */ public void setFetchSize(int fetchSize) { this.fetchSize = fetchSize; } /** * A query provider. Either this or the {{@link #setQueryString(String) query string} * or the {{@link #setQueryName(String) query name} should be set. * @param queryProvider Hibernate query provider */ public void setQueryProvider(HibernateQueryProvider<? extends T> queryProvider) { helper.setQueryProvider(queryProvider); } /** * A query string in HQL. Either this or the { * {@link #setQueryProvider(HibernateQueryProvider) query provider} or the { * {@link #setQueryName(String) query name} should be set. * @param queryString HQL query string */ public void setQueryString(String queryString) { helper.setQueryString(queryString); } /** * The Hibernate SessionFactory to use the create a session. * @param sessionFactory the {@link SessionFactory} to set */ public void setSessionFactory(SessionFactory sessionFactory) { helper.setSessionFactory(sessionFactory); } /** * Can be set only in uninitialized state. * @param useStatelessSession <code>true</code> to use {@link StatelessSession} * <code>false</code> to use standard hibernate {@link Session} */ public void setUseStatelessSession(boolean useStatelessSession) { helper.setUseStatelessSession(useStatelessSession); } @Override public void afterPropertiesSet() throws Exception { super.afterPropertiesSet(); Assert.state(fetchSize >= 0, "fetchSize must not be negative"); helper.afterPropertiesSet(); } @Override protected void doOpen() throws Exception { super.doOpen(); } @Override protected void doReadPage() {<FILL_FUNCTION_BODY>} @Override protected void doClose() throws Exception { helper.close(); super.doClose(); } }
if (results == null) { results = new CopyOnWriteArrayList<>(); } else { results.clear(); } results.addAll(helper.readPage(getPage(), getPageSize(), fetchSize, parameterValues));
851
70
921
<methods>public void <init>() ,public void afterPropertiesSet() throws java.lang.Exception,public int getPage() ,public int getPageSize() ,public void setPageSize(int) <variables>private volatile int current,private volatile boolean initialized,private final java.util.concurrent.locks.Lock lock,protected org.apache.commons.logging.Log logger,private volatile int page,private int pageSize,protected volatile List<T> results
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcBatchItemWriter.java
JdbcBatchItemWriter
afterPropertiesSet
class JdbcBatchItemWriter<T> implements ItemWriter<T>, InitializingBean { protected static final Log logger = LogFactory.getLog(JdbcBatchItemWriter.class); protected NamedParameterJdbcOperations namedParameterJdbcTemplate; protected ItemPreparedStatementSetter<T> itemPreparedStatementSetter; protected ItemSqlParameterSourceProvider<T> itemSqlParameterSourceProvider; protected String sql; protected boolean assertUpdates = true; protected int parameterCount; protected boolean usingNamedParameters; /** * Public setter for the flag that determines whether an assertion is made that all * items cause at least one row to be updated. * @param assertUpdates the flag to set. Defaults to true; */ public void setAssertUpdates(boolean assertUpdates) { this.assertUpdates = assertUpdates; } /** * Public setter for the query string to execute on write. The parameters should * correspond to those known to the {@link ItemPreparedStatementSetter}. * @param sql the query to set */ public void setSql(String sql) { this.sql = sql; } /** * Public setter for the {@link ItemPreparedStatementSetter}. * @param preparedStatementSetter the {@link ItemPreparedStatementSetter} to set. This * is required when using traditional '?' placeholders for the SQL statement. */ public void setItemPreparedStatementSetter(ItemPreparedStatementSetter<T> preparedStatementSetter) { this.itemPreparedStatementSetter = preparedStatementSetter; } /** * Public setter for the {@link ItemSqlParameterSourceProvider}. * @param itemSqlParameterSourceProvider the {@link ItemSqlParameterSourceProvider} to * set. This is required when using named parameters for the SQL statement and the * type to be written does not implement {@link Map}. */ public void setItemSqlParameterSourceProvider(ItemSqlParameterSourceProvider<T> itemSqlParameterSourceProvider) { this.itemSqlParameterSourceProvider = itemSqlParameterSourceProvider; } /** * Public setter for the data source for injection purposes. * @param dataSource {@link javax.sql.DataSource} to use for querying against */ public void setDataSource(DataSource dataSource) { if (namedParameterJdbcTemplate == null) { this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource); } } /** * Public setter for the {@link NamedParameterJdbcOperations}. * @param namedParameterJdbcTemplate the {@link NamedParameterJdbcOperations} to set */ public void setJdbcTemplate(NamedParameterJdbcOperations namedParameterJdbcTemplate) { this.namedParameterJdbcTemplate = namedParameterJdbcTemplate; } /** * Check mandatory properties - there must be a NamedParameterJdbcOperations and an * SQL statement plus a parameter source. */ @Override public void afterPropertiesSet() {<FILL_FUNCTION_BODY>} @SuppressWarnings("unchecked") @Override public void write(final Chunk<? extends T> chunk) throws Exception { if (!chunk.isEmpty()) { if (logger.isDebugEnabled()) { logger.debug("Executing batch with " + chunk.size() + " items."); } int[] updateCounts; if (usingNamedParameters) { if (chunk.getItems().get(0) instanceof Map && this.itemSqlParameterSourceProvider == null) { updateCounts = namedParameterJdbcTemplate.batchUpdate(sql, chunk.getItems().toArray(new Map[chunk.size()])); } else { SqlParameterSource[] batchArgs = new SqlParameterSource[chunk.size()]; int i = 0; for (T item : chunk) { batchArgs[i++] = itemSqlParameterSourceProvider.createSqlParameterSource(item); } updateCounts = namedParameterJdbcTemplate.batchUpdate(sql, batchArgs); } } else { updateCounts = namedParameterJdbcTemplate.getJdbcOperations() .execute(sql, (PreparedStatementCallback<int[]>) ps -> { for (T item : chunk) { itemPreparedStatementSetter.setValues(item, ps); ps.addBatch(); } return ps.executeBatch(); }); } if (assertUpdates) { for (int i = 0; i < updateCounts.length; i++) { int value = updateCounts[i]; if (value == 0) { throw new EmptyResultDataAccessException("Item " + i + " of " + updateCounts.length + " did not update any rows: [" + chunk.getItems().get(i) + "]", 1); } } } processUpdateCounts(updateCounts); } } /** * Extension point to post process the update counts for each item. * @param updateCounts the array of update counts for each item * @since 5.1 */ protected void processUpdateCounts(int[] updateCounts) { // No Op } }
Assert.state(namedParameterJdbcTemplate != null, "A DataSource or a NamedParameterJdbcTemplate is required."); Assert.state(sql != null, "An SQL statement is required."); List<String> namedParameters = new ArrayList<>(); parameterCount = JdbcParameterUtils.countParameterPlaceholders(sql, namedParameters); if (namedParameters.size() > 0) { if (parameterCount != namedParameters.size()) { throw new InvalidDataAccessApiUsageException( "You can't use both named parameters and classic \"?\" placeholders: " + sql); } usingNamedParameters = true; } if (!usingNamedParameters) { Assert.state(itemPreparedStatementSetter != null, "Using SQL statement with '?' placeholders requires an ItemPreparedStatementSetter"); }
1,327
216
1,543
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcCursorItemReader.java
JdbcCursorItemReader
openCursor
class JdbcCursorItemReader<T> extends AbstractCursorItemReader<T> { private PreparedStatement preparedStatement; private PreparedStatementSetter preparedStatementSetter; private String sql; private RowMapper<T> rowMapper; public JdbcCursorItemReader() { super(); setName(ClassUtils.getShortName(JdbcCursorItemReader.class)); } /** * Set the RowMapper to be used for all calls to read(). * @param rowMapper the mapper used to map each item */ public void setRowMapper(RowMapper<T> rowMapper) { this.rowMapper = rowMapper; } /** * Set the SQL statement to be used when creating the cursor. This statement should be * a complete and valid SQL statement, as it will be run directly without any * modification. * @param sql SQL statement */ public void setSql(String sql) { this.sql = sql; } /** * Set the PreparedStatementSetter to use if any parameter values that need to be set * in the supplied query. * @param preparedStatementSetter PreparedStatementSetter responsible for filling out * the statement */ public void setPreparedStatementSetter(PreparedStatementSetter preparedStatementSetter) { this.preparedStatementSetter = preparedStatementSetter; } /** * Assert that mandatory properties are set. * @throws IllegalArgumentException if either data source or SQL properties not set. */ @Override public void afterPropertiesSet() throws Exception { super.afterPropertiesSet(); Assert.state(sql != null, "The SQL query must be provided"); Assert.state(rowMapper != null, "RowMapper must be provided"); } @Override protected void openCursor(Connection con) {<FILL_FUNCTION_BODY>} @Nullable @Override protected T readCursor(ResultSet rs, int currentRow) throws SQLException { return rowMapper.mapRow(rs, currentRow); } /** * Close the cursor and database connection. * @param connection to the database */ @Override protected void cleanupOnClose(Connection connection) { JdbcUtils.closeStatement(this.preparedStatement); JdbcUtils.closeConnection(connection); } @Override public String getSql() { return this.sql; } }
try { if (isUseSharedExtendedConnection()) { preparedStatement = con.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); } else { preparedStatement = con.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } applyStatementSettings(preparedStatement); if (this.preparedStatementSetter != null) { preparedStatementSetter.setValues(preparedStatement); } this.rs = preparedStatement.executeQuery(); handleWarnings(preparedStatement); } catch (SQLException se) { close(); throw translateSqlException("Executing query", getSql(), se); }
596
232
828
<methods>public void <init>() ,public void afterPropertiesSet() throws java.lang.Exception,public javax.sql.DataSource getDataSource() ,public abstract java.lang.String getSql() ,public boolean isUseSharedExtendedConnection() ,public void setConnectionAutoCommit(boolean) ,public void setDataSource(javax.sql.DataSource) ,public void setDriverSupportsAbsolute(boolean) ,public void setFetchSize(int) ,public void setIgnoreWarnings(boolean) ,public void setMaxRows(int) ,public void setQueryTimeout(int) ,public void setUseSharedExtendedConnection(boolean) ,public void setVerifyCursorPosition(boolean) <variables>public static final int VALUE_NOT_SET,private java.sql.Connection con,private java.lang.Boolean connectionAutoCommit,private javax.sql.DataSource dataSource,private boolean driverSupportsAbsolute,private org.springframework.jdbc.support.SQLExceptionTranslator exceptionTranslator,private int fetchSize,private boolean ignoreWarnings,private boolean initialConnectionAutoCommit,private boolean initialized,protected final org.apache.commons.logging.Log log,private int maxRows,private int queryTimeout,protected java.sql.ResultSet rs,private boolean useSharedExtendedConnection,private boolean verifyCursorPosition
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcParameterUtils.java
JdbcParameterUtils
countParameterPlaceholders
class JdbcParameterUtils { private JdbcParameterUtils() { } /** * Count the occurrences of the character placeholder in an SQL string * <code>sql</code>. The character placeholder is not counted if it appears within a * literal, that is, surrounded by single or double quotes. This method will count * traditional placeholders in the form of a question mark ('?') as well as named * parameters indicated with a leading ':' or '&amp;'. * <p> * The code for this method is taken from an early version of the * {@link org.springframework.jdbc.core.namedparam.NamedParameterUtils} class. That * method was later removed after some refactoring, but the code is useful here for * the Spring Batch project. The code has been altered to better suite the batch * processing requirements. * @param sql String to search in. Returns 0 if the given String is <code>null</code>. * @param namedParameterHolder holder for the named parameters * @return the number of named parameter placeholders */ public static int countParameterPlaceholders(String sql, List<String> namedParameterHolder) {<FILL_FUNCTION_BODY>} /** * Determine whether a parameter name continues at the current position, that is, does * not end delimited by any whitespace character yet. * @param statement the SQL statement * @param pos the position within the statement */ private static boolean parameterNameContinues(String statement, int pos) { char character = statement.charAt(pos); return (character != ' ' && character != ',' && character != ')' && character != '"' && character != '\'' && character != '|' && character != ';' && character != '\n' && character != '\r'); } }
if (sql == null) { return 0; } boolean withinQuotes = false; Map<String, StringBuilder> namedParameters = new HashMap<>(); char currentQuote = '-'; int parameterCount = 0; int i = 0; while (i < sql.length()) { if (withinQuotes) { if (sql.charAt(i) == currentQuote) { withinQuotes = false; currentQuote = '-'; } } else { if (sql.charAt(i) == '"' || sql.charAt(i) == '\'') { withinQuotes = true; currentQuote = sql.charAt(i); } else { if (sql.charAt(i) == ':' || sql.charAt(i) == '&') { int j = i + 1; StringBuilder parameter = new StringBuilder(); while (j < sql.length() && parameterNameContinues(sql, j)) { parameter.append(sql.charAt(j)); j++; } if (j - i > 1) { if (!namedParameters.containsKey(parameter.toString())) { parameterCount++; namedParameters.put(parameter.toString(), parameter); i = j - 1; } } } else { if (sql.charAt(i) == '?') { parameterCount++; } } } } i++; } if (namedParameterHolder != null) { namedParameterHolder.addAll(namedParameters.keySet()); } return parameterCount;
453
441
894
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JpaCursorItemReader.java
JpaCursorItemReader
createQuery
class JpaCursorItemReader<T> extends AbstractItemCountingItemStreamItemReader<T> implements InitializingBean { private EntityManagerFactory entityManagerFactory; private EntityManager entityManager; private String queryString; private JpaQueryProvider queryProvider; private Map<String, Object> parameterValues; private Map<String, Object> hintValues; private Iterator<T> iterator; /** * Create a new {@link JpaCursorItemReader}. */ public JpaCursorItemReader() { setName(ClassUtils.getShortName(JpaCursorItemReader.class)); } /** * Set the JPA entity manager factory. * @param entityManagerFactory JPA entity manager factory */ public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { this.entityManagerFactory = entityManagerFactory; } /** * Set the JPA query provider. * @param queryProvider JPA query provider */ public void setQueryProvider(JpaQueryProvider queryProvider) { this.queryProvider = queryProvider; } /** * Set the JPQL query string. * @param queryString JPQL query string */ public void setQueryString(String queryString) { this.queryString = queryString; } /** * Set the parameter values to be used for the query execution. * @param parameterValues the values keyed by parameter names used in the query * string. */ public void setParameterValues(Map<String, Object> parameterValues) { this.parameterValues = parameterValues; } /** * Set the query hint values for the JPA query. Query hints can be used to give * instructions to the JPA provider. * @param hintValues a map where each key is the name of the hint, and the * corresponding value is the hint's value. * @since 5.2 */ public void setHintValues(Map<String, Object> hintValues) { this.hintValues = hintValues; } @Override public void afterPropertiesSet() throws Exception { Assert.state(this.entityManagerFactory != null, "EntityManagerFactory is required"); if (this.queryProvider == null) { Assert.state(StringUtils.hasLength(this.queryString), "Query string is required when queryProvider is null"); } } @Override @SuppressWarnings("unchecked") protected void doOpen() throws Exception { this.entityManager = this.entityManagerFactory.createEntityManager(); if (this.entityManager == null) { throw new DataAccessResourceFailureException("Unable to create an EntityManager"); } if (this.queryProvider != null) { this.queryProvider.setEntityManager(this.entityManager); } Query query = createQuery(); if (this.parameterValues != null) { this.parameterValues.forEach(query::setParameter); } if (this.hintValues != null) { this.hintValues.forEach(query::setHint); } this.iterator = query.getResultStream().iterator(); } private Query createQuery() {<FILL_FUNCTION_BODY>} @Override protected T doRead() { return this.iterator.hasNext() ? this.iterator.next() : null; } @Override public void update(ExecutionContext executionContext) throws ItemStreamException { super.update(executionContext); this.entityManager.clear(); } @Override protected void doClose() { if (this.entityManager != null) { this.entityManager.close(); } } }
if (this.queryProvider == null) { return this.entityManager.createQuery(this.queryString); } else { return this.queryProvider.createQuery(); }
920
52
972
<methods>public non-sealed void <init>() ,public void close() throws org.springframework.batch.item.ItemStreamException,public int getCurrentItemCount() ,public boolean isSaveState() ,public void open(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException,public T read() throws java.lang.Exception,public void setCurrentItemCount(int) ,public void setMaxItemCount(int) ,public void setSaveState(boolean) ,public void update(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException<variables>private static final java.lang.String READ_COUNT,private static final java.lang.String READ_COUNT_MAX,private int currentItemCount,private int maxItemCount,private boolean saveState
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JpaItemWriter.java
JpaItemWriter
doWrite
class JpaItemWriter<T> implements ItemWriter<T>, InitializingBean { protected static final Log logger = LogFactory.getLog(JpaItemWriter.class); private EntityManagerFactory entityManagerFactory; private boolean usePersist = false; private boolean clearPersistenceContext = true; /** * Set the EntityManager to be used internally. * @param entityManagerFactory the entityManagerFactory to set */ public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { this.entityManagerFactory = entityManagerFactory; } /** * Set whether the EntityManager should perform a persist instead of a merge. * @param usePersist whether to use persist instead of merge. */ public void setUsePersist(boolean usePersist) { this.usePersist = usePersist; } /** * Flag to indicate that the persistence context should be cleared and flushed at the * end of the write (default true). * @param clearPersistenceContext the flag value to set * @since 5.1 */ public void setClearPersistenceContext(boolean clearPersistenceContext) { this.clearPersistenceContext = clearPersistenceContext; } /** * Check mandatory properties - there must be an entityManagerFactory. */ @Override public void afterPropertiesSet() throws Exception { Assert.state(entityManagerFactory != null, "An EntityManagerFactory is required"); } /** * Merge all provided items that aren't already in the persistence context and then * flush the entity manager. * * @see org.springframework.batch.item.ItemWriter#write(Chunk) */ @Override public void write(Chunk<? extends T> items) { EntityManager entityManager = EntityManagerFactoryUtils.getTransactionalEntityManager(entityManagerFactory); if (entityManager == null) { throw new DataAccessResourceFailureException("Unable to obtain a transactional EntityManager"); } doWrite(entityManager, items); entityManager.flush(); if (this.clearPersistenceContext) { entityManager.clear(); } } /** * Do perform the actual write operation. This can be overridden in a subclass if * necessary. * @param entityManager the EntityManager to use for the operation * @param items the list of items to use for the write */ protected void doWrite(EntityManager entityManager, Chunk<? extends T> items) {<FILL_FUNCTION_BODY>} }
if (logger.isDebugEnabled()) { logger.debug("Writing to JPA with " + items.size() + " items."); } if (!items.isEmpty()) { long addedToContextCount = 0; for (T item : items) { if (!entityManager.contains(item)) { if (usePersist) { entityManager.persist(item); } else { entityManager.merge(item); } addedToContextCount++; } } if (logger.isDebugEnabled()) { logger.debug(addedToContextCount + " entities " + (usePersist ? " persisted." : "merged.")); logger.debug((items.size() - addedToContextCount) + " entities found in persistence context."); } }
636
214
850
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JpaPagingItemReader.java
JpaPagingItemReader
doReadPage
class JpaPagingItemReader<T> extends AbstractPagingItemReader<T> { private EntityManagerFactory entityManagerFactory; private EntityManager entityManager; private final Map<String, Object> jpaPropertyMap = new HashMap<>(); private String queryString; private JpaQueryProvider queryProvider; private Map<String, Object> parameterValues; private Map<String, Object> hintValues; private boolean transacted = true;// default value public JpaPagingItemReader() { setName(ClassUtils.getShortName(JpaPagingItemReader.class)); } /** * Create a query using an appropriate query provider (entityManager OR * queryProvider). */ private Query createQuery() { if (queryProvider == null) { return entityManager.createQuery(queryString); } else { return queryProvider.createQuery(); } } public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { this.entityManagerFactory = entityManagerFactory; } /** * The parameter values to be used for the query execution. * @param parameterValues the values keyed by the parameter named used in the query * string. */ public void setParameterValues(Map<String, Object> parameterValues) { this.parameterValues = parameterValues; } /** * Set the query hint values for the JPA query. Query hints can be used to give * instructions to the JPA provider. * @param hintValues a map where each key is the name of the hint, and the * corresponding value is the hint's value. * @since 5.2 */ public void setHintValues(Map<String, Object> hintValues) { this.hintValues = hintValues; } /** * By default (true) the EntityTransaction will be started and committed around the * read. Can be overridden (false) in cases where the JPA implementation doesn't * support a particular transaction. (e.g. Hibernate with a JTA transaction). NOTE: * may cause problems in guaranteeing the object consistency in the * EntityManagerFactory. * @param transacted indicator */ public void setTransacted(boolean transacted) { this.transacted = transacted; } @Override public void afterPropertiesSet() throws Exception { super.afterPropertiesSet(); if (queryProvider == null) { Assert.state(entityManagerFactory != null, "EntityManager is required when queryProvider is null"); Assert.state(StringUtils.hasLength(queryString), "Query string is required when queryProvider is null"); } } /** * @param queryString JPQL query string */ public void setQueryString(String queryString) { this.queryString = queryString; } /** * @param queryProvider JPA query provider */ public void setQueryProvider(JpaQueryProvider queryProvider) { this.queryProvider = queryProvider; } @Override protected void doOpen() throws Exception { super.doOpen(); entityManager = entityManagerFactory.createEntityManager(jpaPropertyMap); if (entityManager == null) { throw new DataAccessResourceFailureException("Unable to obtain an EntityManager"); } // set entityManager to queryProvider, so it participates // in JpaPagingItemReader's managed transaction if (queryProvider != null) { queryProvider.setEntityManager(entityManager); } } @Override @SuppressWarnings("unchecked") protected void doReadPage() {<FILL_FUNCTION_BODY>} @Override protected void doClose() throws Exception { entityManager.close(); super.doClose(); } }
EntityTransaction tx = null; if (transacted) { tx = entityManager.getTransaction(); tx.begin(); entityManager.flush(); entityManager.clear(); } // end if Query query = createQuery().setFirstResult(getPage() * getPageSize()).setMaxResults(getPageSize()); if (parameterValues != null) { for (Map.Entry<String, Object> me : parameterValues.entrySet()) { query.setParameter(me.getKey(), me.getValue()); } } if (this.hintValues != null) { this.hintValues.forEach(query::setHint); } if (results == null) { results = new CopyOnWriteArrayList<>(); } else { results.clear(); } if (!transacted) { List<T> queryResult = query.getResultList(); for (T entity : queryResult) { entityManager.detach(entity); results.add(entity); } // end if } else { results.addAll(query.getResultList()); tx.commit(); } // end if
948
318
1,266
<methods>public void <init>() ,public void afterPropertiesSet() throws java.lang.Exception,public int getPage() ,public int getPageSize() ,public void setPageSize(int) <variables>private volatile int current,private volatile boolean initialized,private final java.util.concurrent.locks.Lock lock,protected org.apache.commons.logging.Log logger,private volatile int page,private int pageSize,protected volatile List<T> results
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/StoredProcedureItemReader.java
StoredProcedureItemReader
openCursor
class StoredProcedureItemReader<T> extends AbstractCursorItemReader<T> { private CallableStatement callableStatement; private PreparedStatementSetter preparedStatementSetter; private String procedureName; private String callString; private RowMapper<T> rowMapper; private SqlParameter[] parameters = new SqlParameter[0]; private boolean function = false; private int refCursorPosition = 0; public StoredProcedureItemReader() { super(); setName(ClassUtils.getShortName(StoredProcedureItemReader.class)); } /** * Set the RowMapper to be used for all calls to read(). * @param rowMapper the RowMapper to use to map the results */ public void setRowMapper(RowMapper<T> rowMapper) { this.rowMapper = rowMapper; } /** * Set the SQL statement to be used when creating the cursor. This statement should be * a complete and valid SQL statement, as it will be run directly without any * modification. * @param sprocedureName the SQL used to call the statement */ public void setProcedureName(String sprocedureName) { this.procedureName = sprocedureName; } /** * Set the PreparedStatementSetter to use if any parameter values that need to be set * in the supplied query. * @param preparedStatementSetter used to populate the SQL */ public void setPreparedStatementSetter(PreparedStatementSetter preparedStatementSetter) { this.preparedStatementSetter = preparedStatementSetter; } /** * Add one or more declared parameters. Used for configuring this operation when used * in a bean factory. Each parameter will specify SQL type and (optionally) the * parameter's name. * @param parameters Array containing the declared <code>SqlParameter</code> objects */ public void setParameters(SqlParameter[] parameters) { this.parameters = parameters; } /** * Set whether this stored procedure is a function. * @param function indicator */ public void setFunction(boolean function) { this.function = function; } /** * Set the parameter position of the REF CURSOR. Only used for Oracle and PostgreSQL * that use REF CURSORs. For any other database this should be kept as 0 which is the * default. * @param refCursorPosition The parameter position of the REF CURSOR */ public void setRefCursorPosition(int refCursorPosition) { this.refCursorPosition = refCursorPosition; } /** * Assert that mandatory properties are set. * @throws IllegalArgumentException if either data source or SQL properties not set. */ @Override public void afterPropertiesSet() throws Exception { super.afterPropertiesSet(); Assert.state(procedureName != null, "The name of the stored procedure must be provided"); Assert.state(rowMapper != null, "RowMapper must be provided"); } @Override protected void openCursor(Connection con) {<FILL_FUNCTION_BODY>} @Nullable @Override protected T readCursor(ResultSet rs, int currentRow) throws SQLException { return rowMapper.mapRow(rs, currentRow); } /** * Close the cursor and database connection. * @param connection to the database */ @Override protected void cleanupOnClose(Connection connection) { JdbcUtils.closeStatement(this.callableStatement); JdbcUtils.closeConnection(connection); } @Override public String getSql() { if (callString != null) { return this.callString; } else { return "PROCEDURE NAME: " + procedureName; } } }
Assert.state(procedureName != null, "Procedure Name must not be null."); Assert.state(refCursorPosition >= 0, "invalid refCursorPosition specified as " + refCursorPosition + "; it can't be " + "specified as a negative number."); Assert.state(refCursorPosition == 0 || refCursorPosition > 0, "invalid refCursorPosition specified as " + refCursorPosition + "; there are " + parameters.length + " parameters defined."); CallMetaDataContext callContext = new CallMetaDataContext(); callContext.setAccessCallParameterMetaData(false); callContext.setProcedureName(procedureName); callContext.setFunction(function); callContext.initializeMetaData(getDataSource()); callContext.processParameters(Arrays.asList(parameters)); SqlParameter cursorParameter = callContext.createReturnResultSetParameter("cursor", rowMapper); this.callString = callContext.createCallString(); if (log.isDebugEnabled()) { log.debug("Call string is: " + callString); } int cursorSqlType = Types.OTHER; if (function) { if (cursorParameter instanceof SqlOutParameter) { cursorSqlType = cursorParameter.getSqlType(); } } else { if (refCursorPosition > 0 && refCursorPosition <= parameters.length) { cursorSqlType = parameters[refCursorPosition - 1].getSqlType(); } } try { if (isUseSharedExtendedConnection()) { callableStatement = con.prepareCall(callString, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); } else { callableStatement = con.prepareCall(callString, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } applyStatementSettings(callableStatement); if (this.preparedStatementSetter != null) { preparedStatementSetter.setValues(callableStatement); } if (function) { callableStatement.registerOutParameter(1, cursorSqlType); } else { if (refCursorPosition > 0) { callableStatement.registerOutParameter(refCursorPosition, cursorSqlType); } } boolean results = callableStatement.execute(); if (results) { rs = callableStatement.getResultSet(); } else { if (function) { rs = (ResultSet) callableStatement.getObject(1); } else { rs = (ResultSet) callableStatement.getObject(refCursorPosition); } } handleWarnings(callableStatement); } catch (SQLException se) { close(); throw translateSqlException("Executing stored procedure", getSql(), se); }
939
766
1,705
<methods>public void <init>() ,public void afterPropertiesSet() throws java.lang.Exception,public javax.sql.DataSource getDataSource() ,public abstract java.lang.String getSql() ,public boolean isUseSharedExtendedConnection() ,public void setConnectionAutoCommit(boolean) ,public void setDataSource(javax.sql.DataSource) ,public void setDriverSupportsAbsolute(boolean) ,public void setFetchSize(int) ,public void setIgnoreWarnings(boolean) ,public void setMaxRows(int) ,public void setQueryTimeout(int) ,public void setUseSharedExtendedConnection(boolean) ,public void setVerifyCursorPosition(boolean) <variables>public static final int VALUE_NOT_SET,private java.sql.Connection con,private java.lang.Boolean connectionAutoCommit,private javax.sql.DataSource dataSource,private boolean driverSupportsAbsolute,private org.springframework.jdbc.support.SQLExceptionTranslator exceptionTranslator,private int fetchSize,private boolean ignoreWarnings,private boolean initialConnectionAutoCommit,private boolean initialized,protected final org.apache.commons.logging.Log log,private int maxRows,private int queryTimeout,protected java.sql.ResultSet rs,private boolean useSharedExtendedConnection,private boolean verifyCursorPosition
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/builder/HibernateItemWriterBuilder.java
HibernateItemWriterBuilder
build
class HibernateItemWriterBuilder<T> { private boolean clearSession = true; private SessionFactory sessionFactory; /** * If set to false, the {@link org.hibernate.Session} will not be cleared at the end * of the chunk. * @param clearSession defaults to true * @return this instance for method chaining * @see HibernateItemWriter#setClearSession(boolean) */ public HibernateItemWriterBuilder<T> clearSession(boolean clearSession) { this.clearSession = clearSession; return this; } /** * The Hibernate {@link SessionFactory} to obtain a session from. Required. * @param sessionFactory the {@link SessionFactory} * @return this instance for method chaining * @see HibernateItemWriter#setSessionFactory(SessionFactory) */ public HibernateItemWriterBuilder<T> sessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; return this; } /** * Returns a fully built {@link HibernateItemWriter} * @return the writer */ public HibernateItemWriter<T> build() {<FILL_FUNCTION_BODY>} }
Assert.state(this.sessionFactory != null, "SessionFactory must be provided"); HibernateItemWriter<T> writer = new HibernateItemWriter<>(); writer.setSessionFactory(this.sessionFactory); writer.setClearSession(this.clearSession); return writer;
305
81
386
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/builder/HibernatePagingItemReaderBuilder.java
HibernatePagingItemReaderBuilder
build
class HibernatePagingItemReaderBuilder<T> { private int pageSize = 10; private Map<String, Object> parameterValues; private String queryName; private int fetchSize; private HibernateQueryProvider<? extends T> queryProvider; private String queryString; private SessionFactory sessionFactory; private boolean statelessSession = true; private boolean saveState = true; private String name; private int maxItemCount = Integer.MAX_VALUE; private int currentItemCount; /** * Configure if the state of the * {@link org.springframework.batch.item.ItemStreamSupport} should be persisted within * the {@link org.springframework.batch.item.ExecutionContext} for restart purposes. * @param saveState defaults to true * @return The current instance of the builder. */ public HibernatePagingItemReaderBuilder<T> saveState(boolean saveState) { this.saveState = saveState; return this; } /** * The name used to calculate the key within the * {@link org.springframework.batch.item.ExecutionContext}. Required if * {@link #saveState(boolean)} is set to true. * @param name name of the reader instance * @return The current instance of the builder. * @see org.springframework.batch.item.ItemStreamSupport#setName(String) */ public HibernatePagingItemReaderBuilder<T> name(String name) { this.name = name; return this; } /** * Configure the max number of items to be read. * @param maxItemCount the max items to be read * @return The current instance of the builder. * @see org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) */ public HibernatePagingItemReaderBuilder<T> maxItemCount(int maxItemCount) { this.maxItemCount = maxItemCount; return this; } /** * Index for the current item. Used on restarts to indicate where to start from. * @param currentItemCount current index * @return this instance for method chaining * @see org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) */ public HibernatePagingItemReaderBuilder<T> currentItemCount(int currentItemCount) { this.currentItemCount = currentItemCount; return this; } /** * The number of records to request per page/query. Defaults to 10. Must be greater * than zero. * @param pageSize number of items * @return this instance for method chaining * @see HibernatePagingItemReader#setPageSize(int) */ public HibernatePagingItemReaderBuilder<T> pageSize(int pageSize) { this.pageSize = pageSize; return this; } /** * A map of parameter values to be set on the query. The key of the map is the name of * the parameter to be set with the value being the value to be set. * @param parameterValues map of values * @return this instance for method chaining * @see HibernatePagingItemReader#setParameterValues(Map) */ public HibernatePagingItemReaderBuilder<T> parameterValues(Map<String, Object> parameterValues) { this.parameterValues = parameterValues; return this; } /** * The name of the Hibernate named query to be executed for this reader. * @param queryName name of the query to execute * @return this instance for method chaining * @see HibernatePagingItemReader#setQueryName(String) */ public HibernatePagingItemReaderBuilder<T> queryName(String queryName) { this.queryName = queryName; return this; } /** * Fetch size used internally by Hibernate to limit amount of data fetched from * database per round trip. * @param fetchSize number of records * @return this instance for method chaining * @see HibernatePagingItemReader#setFetchSize(int) */ public HibernatePagingItemReaderBuilder<T> fetchSize(int fetchSize) { this.fetchSize = fetchSize; return this; } /** * A query provider. This should be set only if {@link #queryString(String)} and * {@link #queryName(String)} have not been set. * @param queryProvider the query provider * @return this instance for method chaining * @see HibernatePagingItemReader#setQueryProvider(HibernateQueryProvider) */ public HibernatePagingItemReaderBuilder<T> queryProvider(HibernateQueryProvider<T> queryProvider) { this.queryProvider = queryProvider; return this; } /** * The HQL query string to execute. This should only be set if * {@link #queryProvider(HibernateQueryProvider)} and {@link #queryName(String)} have * not been set. * @param queryString the HQL query * @return this instance for method chaining * @see HibernatePagingItemReader#setQueryString(String) */ public HibernatePagingItemReaderBuilder<T> queryString(String queryString) { this.queryString = queryString; return this; } /** * The Hibernate {@link SessionFactory} to execute the query against. * @param sessionFactory the session factory * @return this instance for method chaining * @see HibernatePagingItemReader#setSessionFactory(SessionFactory) */ public HibernatePagingItemReaderBuilder<T> sessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; return this; } /** * Indicator for whether to use a {@link org.hibernate.StatelessSession} * (<code>true</code>) or a {@link org.hibernate.Session} (<code>false</code>). * @param useStatelessSession Defaults to false * @return this instance for method chaining * @see HibernatePagingItemReader#setUseStatelessSession(boolean) */ public HibernatePagingItemReaderBuilder<T> useStatelessSession(boolean useStatelessSession) { this.statelessSession = useStatelessSession; return this; } /** * Returns a fully constructed {@link HibernatePagingItemReader}. * @return a new {@link HibernatePagingItemReader} */ public HibernatePagingItemReader<T> build() {<FILL_FUNCTION_BODY>} }
Assert.notNull(this.sessionFactory, "A SessionFactory must be provided"); Assert.state(this.fetchSize >= 0, "fetchSize must not be negative"); if (this.saveState) { Assert.hasText(this.name, "A name is required when saveState is set to true"); } if (this.queryProvider == null) { Assert.state(StringUtils.hasText(queryString) ^ StringUtils.hasText(queryName), "queryString or queryName must be set"); } HibernatePagingItemReader<T> reader = new HibernatePagingItemReader<>(); reader.setSessionFactory(this.sessionFactory); reader.setSaveState(this.saveState); reader.setMaxItemCount(this.maxItemCount); reader.setCurrentItemCount(this.currentItemCount); reader.setName(this.name); reader.setFetchSize(this.fetchSize); reader.setParameterValues(this.parameterValues); reader.setQueryName(this.queryName); reader.setQueryProvider(this.queryProvider); reader.setQueryString(this.queryString); reader.setPageSize(this.pageSize); reader.setUseStatelessSession(this.statelessSession); return reader;
1,688
346
2,034
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/builder/JdbcBatchItemWriterBuilder.java
JdbcBatchItemWriterBuilder
build
class JdbcBatchItemWriterBuilder<T> { private boolean assertUpdates = true; private String sql; private ItemPreparedStatementSetter<T> itemPreparedStatementSetter; private ItemSqlParameterSourceProvider<T> itemSqlParameterSourceProvider; private DataSource dataSource; private NamedParameterJdbcOperations namedParameterJdbcTemplate; private BigInteger mapped = new BigInteger("0"); /** * Configure the {@link DataSource} to be used. * @param dataSource the DataSource * @return The current instance of the builder for chaining. * @see JdbcBatchItemWriter#setDataSource(DataSource) */ public JdbcBatchItemWriterBuilder<T> dataSource(DataSource dataSource) { this.dataSource = dataSource; return this; } /** * If set to true, confirms that every insert results in the update of at least one * row in the database. Defaults to true. * @param assertUpdates boolean indicator * @return The current instance of the builder for chaining * @see JdbcBatchItemWriter#setAssertUpdates(boolean) */ public JdbcBatchItemWriterBuilder<T> assertUpdates(boolean assertUpdates) { this.assertUpdates = assertUpdates; return this; } /** * Set the SQL statement to be used for each item's updates. This is a required field. * @param sql SQL string * @return The current instance of the builder for chaining * @see JdbcBatchItemWriter#setSql(String) */ public JdbcBatchItemWriterBuilder<T> sql(String sql) { this.sql = sql; return this; } /** * Configures a {@link ItemPreparedStatementSetter} for use by the writer. This should * only be used if {@link #columnMapped()} isn't called. * @param itemPreparedStatementSetter The {@link ItemPreparedStatementSetter} * @return The current instance of the builder for chaining * @see JdbcBatchItemWriter#setItemPreparedStatementSetter(ItemPreparedStatementSetter) */ public JdbcBatchItemWriterBuilder<T> itemPreparedStatementSetter( ItemPreparedStatementSetter<T> itemPreparedStatementSetter) { this.itemPreparedStatementSetter = itemPreparedStatementSetter; return this; } /** * Configures a {@link ItemSqlParameterSourceProvider} for use by the writer. This * should only be used if {@link #beanMapped()} isn't called. * @param itemSqlParameterSourceProvider The {@link ItemSqlParameterSourceProvider} * @return The current instance of the builder for chaining * @see JdbcBatchItemWriter#setItemSqlParameterSourceProvider(ItemSqlParameterSourceProvider) */ public JdbcBatchItemWriterBuilder<T> itemSqlParameterSourceProvider( ItemSqlParameterSourceProvider<T> itemSqlParameterSourceProvider) { this.itemSqlParameterSourceProvider = itemSqlParameterSourceProvider; return this; } /** * The {@link NamedParameterJdbcOperations} instance to use. If one isn't provided, a * {@link DataSource} is required. * @param namedParameterJdbcOperations The template * @return The current instance of the builder for chaining */ public JdbcBatchItemWriterBuilder<T> namedParametersJdbcTemplate( NamedParameterJdbcOperations namedParameterJdbcOperations) { this.namedParameterJdbcTemplate = namedParameterJdbcOperations; return this; } /** * Creates a {@link ColumnMapItemPreparedStatementSetter} to be used as your * {@link ItemPreparedStatementSetter}. * <p> * NOTE: The item type for this {@link org.springframework.batch.item.ItemWriter} must * be castable to <code>Map&lt;String,Object&gt;&gt;</code>. * @return The current instance of the builder for chaining * @see ColumnMapItemPreparedStatementSetter */ public JdbcBatchItemWriterBuilder<T> columnMapped() { this.mapped = this.mapped.setBit(0); return this; } /** * Creates a {@link BeanPropertyItemSqlParameterSourceProvider} to be used as your * {@link ItemSqlParameterSourceProvider}. * @return The current instance of the builder for chaining * @see BeanPropertyItemSqlParameterSourceProvider */ public JdbcBatchItemWriterBuilder<T> beanMapped() { this.mapped = this.mapped.setBit(1); return this; } /** * Validates configuration and builds the {@link JdbcBatchItemWriter}. * @return a {@link JdbcBatchItemWriter} */ @SuppressWarnings("unchecked") public JdbcBatchItemWriter<T> build() {<FILL_FUNCTION_BODY>} }
Assert.state(this.dataSource != null || this.namedParameterJdbcTemplate != null, "Either a DataSource or a NamedParameterJdbcTemplate is required"); Assert.notNull(this.sql, "A SQL statement is required"); int mappedValue = this.mapped.intValue(); Assert.state(mappedValue != 3, "Either an item can be mapped via db column or via bean spec, can't be both"); JdbcBatchItemWriter<T> writer = new JdbcBatchItemWriter<>(); writer.setSql(this.sql); writer.setAssertUpdates(this.assertUpdates); writer.setItemSqlParameterSourceProvider(this.itemSqlParameterSourceProvider); writer.setItemPreparedStatementSetter(this.itemPreparedStatementSetter); if (mappedValue == 1) { ((JdbcBatchItemWriter<Map<String, Object>>) writer) .setItemPreparedStatementSetter(new ColumnMapItemPreparedStatementSetter()); } else if (mappedValue == 2) { writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>()); } if (this.dataSource != null) { this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(this.dataSource); } writer.setJdbcTemplate(this.namedParameterJdbcTemplate); return writer;
1,257
372
1,629
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/builder/JpaCursorItemReaderBuilder.java
JpaCursorItemReaderBuilder
build
class JpaCursorItemReaderBuilder<T> { private EntityManagerFactory entityManagerFactory; private String queryString; private JpaQueryProvider queryProvider; private Map<String, Object> parameterValues; private Map<String, Object> hintValues; private boolean saveState = true; private String name; private int maxItemCount = Integer.MAX_VALUE; private int currentItemCount; /** * Configure if the state of the {@link ItemStreamSupport} should be persisted within * the {@link ExecutionContext} for restart purposes. * @param saveState defaults to true * @return The current instance of the builder. */ public JpaCursorItemReaderBuilder<T> saveState(boolean saveState) { this.saveState = saveState; return this; } /** * The name used to calculate the key within the {@link ExecutionContext}. Required if * {@link #saveState(boolean)} is set to true. * @param name name of the reader instance * @return The current instance of the builder. * @see ItemStreamSupport#setName(String) */ public JpaCursorItemReaderBuilder<T> name(String name) { this.name = name; return this; } /** * Configure the max number of items to be read. * @param maxItemCount the max items to be read * @return The current instance of the builder. * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) */ public JpaCursorItemReaderBuilder<T> maxItemCount(int maxItemCount) { this.maxItemCount = maxItemCount; return this; } /** * Index for the current item. Used on restarts to indicate where to start from. * @param currentItemCount current index * @return this instance for method chaining * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) */ public JpaCursorItemReaderBuilder<T> currentItemCount(int currentItemCount) { this.currentItemCount = currentItemCount; return this; } /** * A map of parameter values to be set on the query. The key of the map is the name of * the parameter to be set with the value being the value to be set. * @param parameterValues map of values * @return this instance for method chaining * @see JpaCursorItemReader#setParameterValues(Map) */ public JpaCursorItemReaderBuilder<T> parameterValues(Map<String, Object> parameterValues) { this.parameterValues = parameterValues; return this; } /** * A map of hint values to be set on the query. The key of the map is the name of the * hint to be applied, with the value being the specific setting for that hint. * @param hintValues map of query hints * @return this instance for method chaining * @see JpaCursorItemReader#setHintValues(Map) * @since 5.2 */ public JpaCursorItemReaderBuilder<T> hintValues(Map<String, Object> hintValues) { this.hintValues = hintValues; return this; } /** * A query provider. This should be set only if {@link #queryString(String)} have not * been set. * @param queryProvider the query provider * @return this instance for method chaining * @see JpaCursorItemReader#setQueryProvider(JpaQueryProvider) */ public JpaCursorItemReaderBuilder<T> queryProvider(JpaQueryProvider queryProvider) { this.queryProvider = queryProvider; return this; } /** * The JPQL query string to execute. This should only be set if * {@link #queryProvider(JpaQueryProvider)} has not been set. * @param queryString the JPQL query * @return this instance for method chaining * @see JpaCursorItemReader#setQueryString(String) */ public JpaCursorItemReaderBuilder<T> queryString(String queryString) { this.queryString = queryString; return this; } /** * The {@link EntityManagerFactory} to be used for executing the configured * {@link #queryString}. * @param entityManagerFactory {@link EntityManagerFactory} used to create * {@link jakarta.persistence.EntityManager} * @return this instance for method chaining */ public JpaCursorItemReaderBuilder<T> entityManagerFactory(EntityManagerFactory entityManagerFactory) { this.entityManagerFactory = entityManagerFactory; return this; } /** * Returns a fully constructed {@link JpaCursorItemReader}. * @return a new {@link JpaCursorItemReader} */ public JpaCursorItemReader<T> build() {<FILL_FUNCTION_BODY>} }
Assert.notNull(this.entityManagerFactory, "An EntityManagerFactory is required"); if (this.saveState) { Assert.hasText(this.name, "A name is required when saveState is set to true"); } if (this.queryProvider == null) { Assert.hasLength(this.queryString, "Query string is required when queryProvider is null"); } JpaCursorItemReader<T> reader = new JpaCursorItemReader<>(); reader.setEntityManagerFactory(this.entityManagerFactory); reader.setQueryProvider(this.queryProvider); reader.setQueryString(this.queryString); reader.setParameterValues(this.parameterValues); reader.setHintValues(this.hintValues); reader.setCurrentItemCount(this.currentItemCount); reader.setMaxItemCount(this.maxItemCount); reader.setSaveState(this.saveState); reader.setName(this.name); return reader;
1,221
263
1,484
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/builder/JpaItemWriterBuilder.java
JpaItemWriterBuilder
build
class JpaItemWriterBuilder<T> { private EntityManagerFactory entityManagerFactory; private boolean usePersist = false; private boolean clearPersistenceContext = true; /** * The JPA {@link EntityManagerFactory} to obtain an entity manager from. Required. * @param entityManagerFactory the {@link EntityManagerFactory} * @return this instance for method chaining * @see JpaItemWriter#setEntityManagerFactory(EntityManagerFactory) */ public JpaItemWriterBuilder<T> entityManagerFactory(EntityManagerFactory entityManagerFactory) { this.entityManagerFactory = entityManagerFactory; return this; } /** * Set whether the entity manager should perform a persist instead of a merge. * @param usePersist defaults to false * @return this instance for method chaining * @see JpaItemWriter#setUsePersist(boolean) */ public JpaItemWriterBuilder<T> usePersist(boolean usePersist) { this.usePersist = usePersist; return this; } /** * If set to false, the {@link jakarta.persistence.EntityManager} will not be cleared * at the end of the chunk. defaults to true * @param clearPersistenceContext true if the persistence context should be cleared * after writing items, false otherwise * @return this instance for method chaining * @see org.springframework.batch.item.database.JpaItemWriter#setClearPersistenceContext(boolean) * @since 5.1 */ public JpaItemWriterBuilder<T> clearPersistenceContext(boolean clearPersistenceContext) { this.clearPersistenceContext = clearPersistenceContext; return this; } /** * Returns a fully built {@link JpaItemWriter}. * @return the writer */ public JpaItemWriter<T> build() {<FILL_FUNCTION_BODY>} }
Assert.state(this.entityManagerFactory != null, "EntityManagerFactory must be provided"); JpaItemWriter<T> writer = new JpaItemWriter<>(); writer.setEntityManagerFactory(this.entityManagerFactory); writer.setUsePersist(this.usePersist); writer.setClearPersistenceContext(this.clearPersistenceContext); return writer;
480
103
583
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/builder/JpaPagingItemReaderBuilder.java
JpaPagingItemReaderBuilder
build
class JpaPagingItemReaderBuilder<T> { private int pageSize = 10; private EntityManagerFactory entityManagerFactory; private Map<String, Object> parameterValues; private Map<String, Object> hintValues; private boolean transacted = true; private String queryString; private JpaQueryProvider queryProvider; private boolean saveState = true; private String name; private int maxItemCount = Integer.MAX_VALUE; private int currentItemCount; /** * Configure if the state of the * {@link org.springframework.batch.item.ItemStreamSupport} should be persisted within * the {@link org.springframework.batch.item.ExecutionContext} for restart purposes. * @param saveState defaults to true * @return The current instance of the builder. */ public JpaPagingItemReaderBuilder<T> saveState(boolean saveState) { this.saveState = saveState; return this; } /** * The name used to calculate the key within the * {@link org.springframework.batch.item.ExecutionContext}. Required if * {@link #saveState(boolean)} is set to true. * @param name name of the reader instance * @return The current instance of the builder. * @see org.springframework.batch.item.ItemStreamSupport#setName(String) */ public JpaPagingItemReaderBuilder<T> name(String name) { this.name = name; return this; } /** * Configure the max number of items to be read. * @param maxItemCount the max items to be read * @return The current instance of the builder. * @see org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) */ public JpaPagingItemReaderBuilder<T> maxItemCount(int maxItemCount) { this.maxItemCount = maxItemCount; return this; } /** * Index for the current item. Used on restarts to indicate where to start from. * @param currentItemCount current index * @return this instance for method chaining * @see org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) */ public JpaPagingItemReaderBuilder<T> currentItemCount(int currentItemCount) { this.currentItemCount = currentItemCount; return this; } /** * The number of records to request per page/query. Defaults to 10. Must be greater * than zero. * @param pageSize number of items * @return this instance for method chaining * @see JpaPagingItemReader#setPageSize(int) */ public JpaPagingItemReaderBuilder<T> pageSize(int pageSize) { this.pageSize = pageSize; return this; } /** * A map of parameter values to be set on the query. The key of the map is the name of * the parameter to be set with the value being the value to be set. * @param parameterValues map of values * @return this instance for method chaining * @see JpaPagingItemReader#setParameterValues(Map) */ public JpaPagingItemReaderBuilder<T> parameterValues(Map<String, Object> parameterValues) { this.parameterValues = parameterValues; return this; } /** * A map of hint values to be set on the query. The key of the map is the name of the * hint to be applied, with the value being the specific setting for that hint. * @param hintValues map of query hints * @return this instance for method chaining * @see JpaPagingItemReader#setHintValues(Map) * @since 5.2 */ public JpaPagingItemReaderBuilder<T> hintValues(Map<String, Object> hintValues) { this.hintValues = hintValues; return this; } /** * A query provider. This should be set only if {@link #queryString(String)} have not * been set. * @param queryProvider the query provider * @return this instance for method chaining * @see JpaPagingItemReader#setQueryProvider(JpaQueryProvider) */ public JpaPagingItemReaderBuilder<T> queryProvider(JpaQueryProvider queryProvider) { this.queryProvider = queryProvider; return this; } /** * The HQL query string to execute. This should only be set if * {@link #queryProvider(JpaQueryProvider)} has not been set. * @param queryString the HQL query * @return this instance for method chaining * @see JpaPagingItemReader#setQueryString(String) */ public JpaPagingItemReaderBuilder<T> queryString(String queryString) { this.queryString = queryString; return this; } /** * Indicates if a transaction should be created around the read (true by default). Can * be set to false in cases where JPA implementation doesn't support a particular * transaction, however this may cause object inconsistency in the * EntityManagerFactory. * @param transacted defaults to true * @return this instance for method chaining * @see JpaPagingItemReader#setTransacted(boolean) */ public JpaPagingItemReaderBuilder<T> transacted(boolean transacted) { this.transacted = transacted; return this; } /** * The {@link EntityManagerFactory} to be used for executing the configured * {@link #queryString}. * @param entityManagerFactory {@link EntityManagerFactory} used to create * {@link jakarta.persistence.EntityManager} * @return this instance for method chaining */ public JpaPagingItemReaderBuilder<T> entityManagerFactory(EntityManagerFactory entityManagerFactory) { this.entityManagerFactory = entityManagerFactory; return this; } /** * Returns a fully constructed {@link JpaPagingItemReader}. * @return a new {@link JpaPagingItemReader} */ public JpaPagingItemReader<T> build() {<FILL_FUNCTION_BODY>} }
Assert.isTrue(this.pageSize > 0, "pageSize must be greater than zero"); Assert.notNull(this.entityManagerFactory, "An EntityManagerFactory is required"); if (this.saveState) { Assert.hasText(this.name, "A name is required when saveState is set to true"); } if (this.queryProvider == null) { Assert.hasLength(this.queryString, "Query string is required when queryProvider is null"); } JpaPagingItemReader<T> reader = new JpaPagingItemReader<>(); reader.setQueryString(this.queryString); reader.setPageSize(this.pageSize); reader.setParameterValues(this.parameterValues); reader.setHintValues(this.hintValues); reader.setEntityManagerFactory(this.entityManagerFactory); reader.setQueryProvider(this.queryProvider); reader.setTransacted(this.transacted); reader.setCurrentItemCount(this.currentItemCount); reader.setMaxItemCount(this.maxItemCount); reader.setSaveState(this.saveState); reader.setName(this.name); return reader;
1,570
320
1,890
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/HibernateNativeQueryProvider.java
HibernateNativeQueryProvider
afterPropertiesSet
class HibernateNativeQueryProvider<E> extends AbstractHibernateQueryProvider<E> { private String sqlQuery; private Class<E> entityClass; /** * <p> * Create an {@link NativeQuery} from the session provided (preferring stateless if * both are available). * </p> */ @Override @SuppressWarnings("unchecked") public NativeQuery<E> createQuery() { if (isStatelessSession()) { return getStatelessSession().createNativeQuery(sqlQuery).addEntity(entityClass); } else { return getStatefulSession().createNativeQuery(sqlQuery).addEntity(entityClass); } } public void setSqlQuery(String sqlQuery) { this.sqlQuery = sqlQuery; } public void setEntityClass(Class<E> entityClazz) { this.entityClass = entityClazz; } public void afterPropertiesSet() throws Exception {<FILL_FUNCTION_BODY>} }
Assert.state(StringUtils.hasText(sqlQuery), "Native SQL query cannot be empty"); Assert.state(entityClass != null, "Entity class cannot be NULL");
256
47
303
<methods>public non-sealed void <init>() ,public boolean isStatelessSession() ,public void setSession(org.hibernate.Session) ,public void setStatelessSession(org.hibernate.StatelessSession) <variables>private org.hibernate.Session statefulSession,private org.hibernate.StatelessSession statelessSession
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/JpaNamedQueryProvider.java
JpaNamedQueryProvider
afterPropertiesSet
class JpaNamedQueryProvider<E> extends AbstractJpaQueryProvider { private Class<E> entityClass; private String namedQuery; @Override public Query createQuery() { return getEntityManager().createNamedQuery(this.namedQuery, this.entityClass); } /** * @param namedQuery name of a jpa named query */ public void setNamedQuery(String namedQuery) { this.namedQuery = namedQuery; } /** * @param entityClazz name of a jpa entity class */ public void setEntityClass(Class<E> entityClazz) { this.entityClass = entityClazz; } @Override public void afterPropertiesSet() throws Exception {<FILL_FUNCTION_BODY>} }
Assert.state(StringUtils.hasText(this.namedQuery), "Named query cannot be empty"); Assert.state(this.entityClass != null, "Entity class cannot be NULL");
191
50
241
<methods>public non-sealed void <init>() ,public void setEntityManager(jakarta.persistence.EntityManager) <variables>private jakarta.persistence.EntityManager entityManager
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/JpaNativeQueryProvider.java
JpaNativeQueryProvider
afterPropertiesSet
class JpaNativeQueryProvider<E> extends AbstractJpaQueryProvider { private Class<E> entityClass; private String sqlQuery; @Override public Query createQuery() { return getEntityManager().createNativeQuery(sqlQuery, entityClass); } public void setSqlQuery(String sqlQuery) { this.sqlQuery = sqlQuery; } public void setEntityClass(Class<E> entityClazz) { this.entityClass = entityClazz; } @Override public void afterPropertiesSet() throws Exception {<FILL_FUNCTION_BODY>} }
Assert.state(StringUtils.hasText(sqlQuery), "Native SQL query cannot be empty"); Assert.state(entityClass != null, "Entity class cannot be NULL");
148
47
195
<methods>public non-sealed void <init>() ,public void setEntityManager(jakarta.persistence.EntityManager) <variables>private jakarta.persistence.EntityManager entityManager
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/AbstractSqlPagingQueryProvider.java
AbstractSqlPagingQueryProvider
setWhereClause
class AbstractSqlPagingQueryProvider implements PagingQueryProvider { private String selectClause; private String fromClause; private String whereClause; private Map<String, Order> sortKeys = new LinkedHashMap<>(); private String groupClause; private int parameterCount; private boolean usingNamedParameters; /** * The setter for the group by clause * @param groupClause SQL GROUP BY clause part of the SQL query string */ public void setGroupClause(String groupClause) { if (StringUtils.hasText(groupClause)) { this.groupClause = removeKeyWord("group by", groupClause); } else { this.groupClause = null; } } /** * The getter for the group by clause * @return SQL GROUP BY clause part of the SQL query string */ public String getGroupClause() { return this.groupClause; } /** * @param selectClause SELECT clause part of SQL query string */ public void setSelectClause(String selectClause) { this.selectClause = removeKeyWord("select", selectClause); } /** * @return SQL SELECT clause part of SQL query string */ protected String getSelectClause() { return selectClause; } /** * @param fromClause FROM clause part of SQL query string */ public void setFromClause(String fromClause) { this.fromClause = removeKeyWord("from", fromClause); } /** * @return SQL FROM clause part of SQL query string */ protected String getFromClause() { return fromClause; } /** * @param whereClause WHERE clause part of SQL query string */ public void setWhereClause(String whereClause) {<FILL_FUNCTION_BODY>} /** * @return SQL WHERE clause part of SQL query string */ protected String getWhereClause() { return whereClause; } /** * @param sortKeys key to use to sort and limit page content */ public void setSortKeys(Map<String, Order> sortKeys) { this.sortKeys = sortKeys; } /** * A Map&lt;String, Boolean&gt; of sort columns as the key and boolean for * ascending/descending (ascending = true). * @return sortKey key to use to sort and limit page content */ @Override public Map<String, Order> getSortKeys() { return sortKeys; } @Override public int getParameterCount() { return parameterCount; } @Override public boolean isUsingNamedParameters() { return usingNamedParameters; } /** * The sort key placeholder will vary depending on whether named parameters or * traditional placeholders are used in query strings. * @return place holder for sortKey. */ @Override public String getSortKeyPlaceHolder(String keyName) { return usingNamedParameters ? ":_" + keyName : "?"; } /** * Check mandatory properties. * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() */ @Override public void init(DataSource dataSource) throws Exception { Assert.notNull(dataSource, "A DataSource is required"); Assert.hasLength(selectClause, "selectClause must be specified"); Assert.hasLength(fromClause, "fromClause must be specified"); Assert.notEmpty(sortKeys, "sortKey must be specified"); StringBuilder sql = new StringBuilder(64); sql.append("SELECT ").append(selectClause); sql.append(" FROM ").append(fromClause); if (whereClause != null) { sql.append(" WHERE ").append(whereClause); } if (groupClause != null) { sql.append(" GROUP BY ").append(groupClause); } List<String> namedParameters = new ArrayList<>(); parameterCount = JdbcParameterUtils.countParameterPlaceholders(sql.toString(), namedParameters); if (namedParameters.size() > 0) { if (parameterCount != namedParameters.size()) { throw new InvalidDataAccessApiUsageException( "You can't use both named parameters and classic \"?\" placeholders: " + sql); } usingNamedParameters = true; } } /** * Method generating the query string to be used for retrieving the first page. This * method must be implemented in sub classes. * @param pageSize number of rows to read per page * @return query string */ @Override public abstract String generateFirstPageQuery(int pageSize); /** * Method generating the query string to be used for retrieving the pages following * the first page. This method must be implemented in sub classes. * @param pageSize number of rows to read per page * @return query string */ @Override public abstract String generateRemainingPagesQuery(int pageSize); private String removeKeyWord(String keyWord, String clause) { String temp = clause.trim(); int length = keyWord.length(); if (temp.toLowerCase().startsWith(keyWord) && Character.isWhitespace(temp.charAt(length)) && temp.length() > length + 1) { return temp.substring(length + 1); } else { return temp; } } /** * @return sortKey key to use to sort and limit page content (without alias) */ @Override public Map<String, Order> getSortKeysWithoutAliases() { Map<String, Order> sortKeysWithoutAliases = new LinkedHashMap<>(); for (Map.Entry<String, Order> sortKeyEntry : sortKeys.entrySet()) { String key = sortKeyEntry.getKey(); int separator = key.indexOf('.'); if (separator > 0) { int columnIndex = separator + 1; if (columnIndex < key.length()) { sortKeysWithoutAliases.put(key.substring(columnIndex), sortKeyEntry.getValue()); } } else { sortKeysWithoutAliases.put(sortKeyEntry.getKey(), sortKeyEntry.getValue()); } } return sortKeysWithoutAliases; } }
if (StringUtils.hasText(whereClause)) { this.whereClause = removeKeyWord("where", whereClause); } else { this.whereClause = null; }
1,598
56
1,654
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/ColumnMapItemPreparedStatementSetter.java
ColumnMapItemPreparedStatementSetter
setValues
class ColumnMapItemPreparedStatementSetter implements ItemPreparedStatementSetter<Map<String, Object>> { @Override public void setValues(Map<String, Object> item, PreparedStatement ps) throws SQLException {<FILL_FUNCTION_BODY>} }
Assert.isInstanceOf(Map.class, item, "Input to map PreparedStatement parameters must be of type Map."); int counter = 1; for (Object value : item.values()) { StatementCreatorUtils.setParameterValue(ps, counter, SqlTypeValue.TYPE_UNKNOWN, value); counter++; }
69
89
158
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/Db2PagingQueryProvider.java
Db2PagingQueryProvider
generateRemainingPagesQuery
class Db2PagingQueryProvider extends SqlWindowingPagingQueryProvider { @Override public String generateFirstPageQuery(int pageSize) { return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); } @Override public String generateRemainingPagesQuery(int pageSize) {<FILL_FUNCTION_BODY>} @Override protected Object getSubQueryAlias() { return "AS TMP_SUB "; } private String buildLimitClause(int pageSize) { return new StringBuilder().append("FETCH FIRST ").append(pageSize).append(" ROWS ONLY").toString(); } }
if (StringUtils.hasText(getGroupClause())) { return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); } else { return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); }
169
81
250
<methods>public non-sealed void <init>() ,public java.lang.String generateFirstPageQuery(int) ,public java.lang.String generateRemainingPagesQuery(int) <variables>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/DefaultDataFieldMaxValueIncrementerFactory.java
DefaultDataFieldMaxValueIncrementerFactory
getSupportedIncrementerTypes
class DefaultDataFieldMaxValueIncrementerFactory implements DataFieldMaxValueIncrementerFactory { private final DataSource dataSource; private String incrementerColumnName = "ID"; /** * Public setter for the column name (defaults to "ID") in the incrementer. Only used * by some platforms (Derby, HSQL, MySQL, SQL Server and Sybase), and should be fine * for use with Spring Batch meta data as long as the default batch schema hasn't been * changed. * @param incrementerColumnName the primary key column name to set */ public void setIncrementerColumnName(String incrementerColumnName) { this.incrementerColumnName = incrementerColumnName; } public DefaultDataFieldMaxValueIncrementerFactory(DataSource dataSource) { this.dataSource = dataSource; } @Override public DataFieldMaxValueIncrementer getIncrementer(String incrementerType, String incrementerName) { DatabaseType databaseType = DatabaseType.valueOf(incrementerType.toUpperCase()); if (databaseType == DB2 || databaseType == DB2AS400) { return new Db2LuwMaxValueIncrementer(dataSource, incrementerName); } else if (databaseType == DB2ZOS) { return new Db2MainframeMaxValueIncrementer(dataSource, incrementerName); } else if (databaseType == DERBY) { return new DerbyMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); } else if (databaseType == HSQL) { return new HsqlMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); } else if (databaseType == H2) { return new H2SequenceMaxValueIncrementer(dataSource, incrementerName); } else if (databaseType == HANA) { return new HanaSequenceMaxValueIncrementer(dataSource, incrementerName); } else if (databaseType == MYSQL) { MySQLMaxValueIncrementer mySQLMaxValueIncrementer = new MySQLMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); mySQLMaxValueIncrementer.setUseNewConnection(true); return mySQLMaxValueIncrementer; } else if (databaseType == MARIADB) { return new MariaDBSequenceMaxValueIncrementer(dataSource, incrementerName); } else if (databaseType == ORACLE) { return new OracleSequenceMaxValueIncrementer(dataSource, incrementerName); } else if (databaseType == POSTGRES) { return new PostgresSequenceMaxValueIncrementer(dataSource, incrementerName); } else if (databaseType == SQLITE) { return new SqliteMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); } else if (databaseType == SQLSERVER) { return new SqlServerSequenceMaxValueIncrementer(dataSource, incrementerName); } else if (databaseType == SYBASE) { return new SybaseMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); } throw new IllegalArgumentException("databaseType argument was not on the approved list"); } @Override public boolean isSupportedIncrementerType(String incrementerType) { for (DatabaseType type : DatabaseType.values()) { if (type.name().equalsIgnoreCase(incrementerType)) { return true; } } return false; } @Override public String[] getSupportedIncrementerTypes() {<FILL_FUNCTION_BODY>} }
List<String> types = new ArrayList<>(); for (DatabaseType type : DatabaseType.values()) { types.add(type.name()); } return types.toArray(new String[types.size()]);
950
63
1,013
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/DerbyPagingQueryProvider.java
DerbyPagingQueryProvider
init
class DerbyPagingQueryProvider extends SqlWindowingPagingQueryProvider { private static final String MINIMAL_DERBY_VERSION = "10.4.1.3"; @Override public void init(DataSource dataSource) throws Exception {<FILL_FUNCTION_BODY>} // derby version numbering is M.m.f.p [ {alpha|beta} ] see // https://db.apache.org/derby/papers/versionupgrade.html#Basic+Numbering+Scheme private boolean isDerbyVersionSupported(String version) { String[] minimalVersionParts = MINIMAL_DERBY_VERSION.split("\\."); String[] versionParts = version.split("[\\. ]"); for (int i = 0; i < minimalVersionParts.length; i++) { int minimalVersionPart = Integer.parseInt(minimalVersionParts[i]); int versionPart = Integer.parseInt(versionParts[i]); if (versionPart < minimalVersionPart) { return false; } else if (versionPart > minimalVersionPart) { return true; } } return true; } @Override protected String getOrderedQueryAlias() { return "TMP_ORDERED"; } @Override protected String getOverClause() { return ""; } @Override protected String getOverSubstituteClauseStart() { return " FROM (SELECT " + getSelectClause(); } @Override protected String getOverSubstituteClauseEnd() { return " ) AS " + getOrderedQueryAlias(); } }
super.init(dataSource); String version = JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getDatabaseProductVersion); if (!isDerbyVersionSupported(version)) { throw new InvalidDataAccessResourceUsageException( "Apache Derby version " + version + " is not supported by this class, Only version " + MINIMAL_DERBY_VERSION + " or later is supported"); }
406
111
517
<methods>public non-sealed void <init>() ,public java.lang.String generateFirstPageQuery(int) ,public java.lang.String generateRemainingPagesQuery(int) <variables>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/HanaPagingQueryProvider.java
HanaPagingQueryProvider
generateRemainingPagesQuery
class HanaPagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String generateFirstPageQuery(int pageSize) { return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); } @Override public String generateRemainingPagesQuery(int pageSize) {<FILL_FUNCTION_BODY>} private String buildLimitClause(int pageSize) { return new StringBuilder().append("LIMIT ").append(pageSize).toString(); } }
if (StringUtils.hasText(getGroupClause())) { return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); } else { return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); }
130
81
211
<methods>public non-sealed void <init>() ,public abstract java.lang.String generateFirstPageQuery(int) ,public abstract java.lang.String generateRemainingPagesQuery(int) ,public java.lang.String getGroupClause() ,public int getParameterCount() ,public java.lang.String getSortKeyPlaceHolder(java.lang.String) ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeys() ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeysWithoutAliases() ,public void init(javax.sql.DataSource) throws java.lang.Exception,public boolean isUsingNamedParameters() ,public void setFromClause(java.lang.String) ,public void setGroupClause(java.lang.String) ,public void setSelectClause(java.lang.String) ,public void setSortKeys(Map<java.lang.String,org.springframework.batch.item.database.Order>) ,public void setWhereClause(java.lang.String) <variables>private java.lang.String fromClause,private java.lang.String groupClause,private int parameterCount,private java.lang.String selectClause,private Map<java.lang.String,org.springframework.batch.item.database.Order> sortKeys,private boolean usingNamedParameters,private java.lang.String whereClause
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/HsqlPagingQueryProvider.java
HsqlPagingQueryProvider
generateRemainingPagesQuery
class HsqlPagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String generateFirstPageQuery(int pageSize) { return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); } @Override public String generateRemainingPagesQuery(int pageSize) {<FILL_FUNCTION_BODY>} private String buildTopClause(int pageSize) { return new StringBuilder().append("TOP ").append(pageSize).toString(); } }
if (StringUtils.hasText(getGroupClause())) { return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, true, buildTopClause(pageSize)); } else { return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); }
130
83
213
<methods>public non-sealed void <init>() ,public abstract java.lang.String generateFirstPageQuery(int) ,public abstract java.lang.String generateRemainingPagesQuery(int) ,public java.lang.String getGroupClause() ,public int getParameterCount() ,public java.lang.String getSortKeyPlaceHolder(java.lang.String) ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeys() ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeysWithoutAliases() ,public void init(javax.sql.DataSource) throws java.lang.Exception,public boolean isUsingNamedParameters() ,public void setFromClause(java.lang.String) ,public void setGroupClause(java.lang.String) ,public void setSelectClause(java.lang.String) ,public void setSortKeys(Map<java.lang.String,org.springframework.batch.item.database.Order>) ,public void setWhereClause(java.lang.String) <variables>private java.lang.String fromClause,private java.lang.String groupClause,private int parameterCount,private java.lang.String selectClause,private Map<java.lang.String,org.springframework.batch.item.database.Order> sortKeys,private boolean usingNamedParameters,private java.lang.String whereClause
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/MariaDBPagingQueryProvider.java
MariaDBPagingQueryProvider
generateRemainingPagesQuery
class MariaDBPagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String generateFirstPageQuery(int pageSize) { return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); } @Override public String generateRemainingPagesQuery(int pageSize) {<FILL_FUNCTION_BODY>} private String buildLimitClause(int pageSize) { return new StringBuilder().append("LIMIT ").append(pageSize).toString(); } }
if (StringUtils.hasText(getGroupClause())) { return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); } else { return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); }
130
81
211
<methods>public non-sealed void <init>() ,public abstract java.lang.String generateFirstPageQuery(int) ,public abstract java.lang.String generateRemainingPagesQuery(int) ,public java.lang.String getGroupClause() ,public int getParameterCount() ,public java.lang.String getSortKeyPlaceHolder(java.lang.String) ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeys() ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeysWithoutAliases() ,public void init(javax.sql.DataSource) throws java.lang.Exception,public boolean isUsingNamedParameters() ,public void setFromClause(java.lang.String) ,public void setGroupClause(java.lang.String) ,public void setSelectClause(java.lang.String) ,public void setSortKeys(Map<java.lang.String,org.springframework.batch.item.database.Order>) ,public void setWhereClause(java.lang.String) <variables>private java.lang.String fromClause,private java.lang.String groupClause,private int parameterCount,private java.lang.String selectClause,private Map<java.lang.String,org.springframework.batch.item.database.Order> sortKeys,private boolean usingNamedParameters,private java.lang.String whereClause
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/MySqlPagingQueryProvider.java
MySqlPagingQueryProvider
generateRemainingPagesQuery
class MySqlPagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String generateFirstPageQuery(int pageSize) { return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); } @Override public String generateRemainingPagesQuery(int pageSize) {<FILL_FUNCTION_BODY>} private String buildLimitClause(int pageSize) { return new StringBuilder().append("LIMIT ").append(pageSize).toString(); } }
if (StringUtils.hasText(getGroupClause())) { return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); } else { return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); }
130
81
211
<methods>public non-sealed void <init>() ,public abstract java.lang.String generateFirstPageQuery(int) ,public abstract java.lang.String generateRemainingPagesQuery(int) ,public java.lang.String getGroupClause() ,public int getParameterCount() ,public java.lang.String getSortKeyPlaceHolder(java.lang.String) ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeys() ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeysWithoutAliases() ,public void init(javax.sql.DataSource) throws java.lang.Exception,public boolean isUsingNamedParameters() ,public void setFromClause(java.lang.String) ,public void setGroupClause(java.lang.String) ,public void setSelectClause(java.lang.String) ,public void setSortKeys(Map<java.lang.String,org.springframework.batch.item.database.Order>) ,public void setWhereClause(java.lang.String) <variables>private java.lang.String fromClause,private java.lang.String groupClause,private int parameterCount,private java.lang.String selectClause,private Map<java.lang.String,org.springframework.batch.item.database.Order> sortKeys,private boolean usingNamedParameters,private java.lang.String whereClause
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/PostgresPagingQueryProvider.java
PostgresPagingQueryProvider
generateRemainingPagesQuery
class PostgresPagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String generateFirstPageQuery(int pageSize) { return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); } @Override public String generateRemainingPagesQuery(int pageSize) {<FILL_FUNCTION_BODY>} private String buildLimitClause(int pageSize) { return new StringBuilder().append("LIMIT ").append(pageSize).toString(); } }
if (StringUtils.hasText(getGroupClause())) { return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); } else { return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); }
130
81
211
<methods>public non-sealed void <init>() ,public abstract java.lang.String generateFirstPageQuery(int) ,public abstract java.lang.String generateRemainingPagesQuery(int) ,public java.lang.String getGroupClause() ,public int getParameterCount() ,public java.lang.String getSortKeyPlaceHolder(java.lang.String) ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeys() ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeysWithoutAliases() ,public void init(javax.sql.DataSource) throws java.lang.Exception,public boolean isUsingNamedParameters() ,public void setFromClause(java.lang.String) ,public void setGroupClause(java.lang.String) ,public void setSelectClause(java.lang.String) ,public void setSortKeys(Map<java.lang.String,org.springframework.batch.item.database.Order>) ,public void setWhereClause(java.lang.String) <variables>private java.lang.String fromClause,private java.lang.String groupClause,private int parameterCount,private java.lang.String selectClause,private Map<java.lang.String,org.springframework.batch.item.database.Order> sortKeys,private boolean usingNamedParameters,private java.lang.String whereClause
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlPagingQueryProviderFactoryBean.java
SqlPagingQueryProviderFactoryBean
getObject
class SqlPagingQueryProviderFactoryBean implements FactoryBean<PagingQueryProvider> { private DataSource dataSource; private String databaseType; private String fromClause; private String whereClause; private String selectClause; private String groupClause; private Map<String, Order> sortKeys; private final Map<DatabaseType, AbstractSqlPagingQueryProvider> providers = new HashMap<>(); { providers.put(DB2, new Db2PagingQueryProvider()); providers.put(DB2VSE, new Db2PagingQueryProvider()); providers.put(DB2ZOS, new Db2PagingQueryProvider()); providers.put(DB2AS400, new Db2PagingQueryProvider()); providers.put(DERBY, new DerbyPagingQueryProvider()); providers.put(HSQL, new HsqlPagingQueryProvider()); providers.put(H2, new H2PagingQueryProvider()); providers.put(HANA, new HanaPagingQueryProvider()); providers.put(MYSQL, new MySqlPagingQueryProvider()); providers.put(MARIADB, new MariaDBPagingQueryProvider()); providers.put(ORACLE, new OraclePagingQueryProvider()); providers.put(POSTGRES, new PostgresPagingQueryProvider()); providers.put(SQLITE, new SqlitePagingQueryProvider()); providers.put(SQLSERVER, new SqlServerPagingQueryProvider()); providers.put(SYBASE, new SybasePagingQueryProvider()); } /** * @param groupClause SQL GROUP BY clause part of the SQL query string */ public void setGroupClause(String groupClause) { this.groupClause = groupClause; } /** * @param databaseType the databaseType to set */ public void setDatabaseType(String databaseType) { this.databaseType = databaseType; } /** * @param dataSource the dataSource to set */ public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } /** * @param fromClause the fromClause to set */ public void setFromClause(String fromClause) { this.fromClause = fromClause; } /** * @param whereClause the whereClause to set */ public void setWhereClause(String whereClause) { this.whereClause = whereClause; } /** * @param selectClause the selectClause to set */ public void setSelectClause(String selectClause) { this.selectClause = selectClause; } /** * @param sortKeys the sortKeys to set */ public void setSortKeys(Map<String, Order> sortKeys) { this.sortKeys = sortKeys; } public void setSortKey(String key) { Assert.doesNotContain(key, ",", "String setter is valid for a single ASC key only"); Map<String, Order> keys = new LinkedHashMap<>(); keys.put(key, Order.ASCENDING); this.sortKeys = keys; } /** * Get a {@link PagingQueryProvider} instance using the provided properties and * appropriate for the given database type. * * @see FactoryBean#getObject() */ @Override public PagingQueryProvider getObject() throws Exception {<FILL_FUNCTION_BODY>} /** * Always returns {@link PagingQueryProvider}. * * @see FactoryBean#getObjectType() */ @Override public Class<PagingQueryProvider> getObjectType() { return PagingQueryProvider.class; } /** * Always returns true. * @see FactoryBean#isSingleton() */ @Override public boolean isSingleton() { return true; } }
DatabaseType type; try { type = databaseType != null ? DatabaseType.valueOf(databaseType.toUpperCase()) : DatabaseType.fromMetaData(dataSource); } catch (MetaDataAccessException e) { throw new IllegalArgumentException( "Could not inspect meta data for database type. You have to supply it explicitly.", e); } AbstractSqlPagingQueryProvider provider = providers.get(type); Assert.state(provider != null, "Should not happen: missing PagingQueryProvider for DatabaseType=" + type); provider.setFromClause(fromClause); provider.setWhereClause(whereClause); provider.setSortKeys(sortKeys); if (StringUtils.hasText(selectClause)) { provider.setSelectClause(selectClause); } if (StringUtils.hasText(groupClause)) { provider.setGroupClause(groupClause); } provider.init(dataSource); return provider;
1,003
274
1,277
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlServerPagingQueryProvider.java
SqlServerPagingQueryProvider
generateRemainingPagesQuery
class SqlServerPagingQueryProvider extends SqlWindowingPagingQueryProvider { @Override public String generateFirstPageQuery(int pageSize) { return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); } @Override public String generateRemainingPagesQuery(int pageSize) {<FILL_FUNCTION_BODY>} @Override protected Object getSubQueryAlias() { return "AS TMP_SUB "; } private String buildTopClause(int pageSize) { return new StringBuilder().append("TOP ").append(pageSize).toString(); } }
if (StringUtils.hasText(getGroupClause())) { return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, true, buildTopClause(pageSize)); } else { return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); }
158
83
241
<methods>public non-sealed void <init>() ,public java.lang.String generateFirstPageQuery(int) ,public java.lang.String generateRemainingPagesQuery(int) <variables>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlWindowingPagingQueryProvider.java
SqlWindowingPagingQueryProvider
generateFirstPageQuery
class SqlWindowingPagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String generateFirstPageQuery(int pageSize) {<FILL_FUNCTION_BODY>} protected String getOrderedQueryAlias() { return ""; } protected Object getSubQueryAlias() { return "AS TMP_SUB "; } protected Object extractTableAlias() { String alias = String.valueOf(getSubQueryAlias()); if (StringUtils.hasText(alias) && alias.toUpperCase().startsWith("AS")) { alias = alias.substring(3).trim() + "."; } return alias; } @Override public String generateRemainingPagesQuery(int pageSize) { StringBuilder sql = new StringBuilder(); sql.append("SELECT * FROM ( "); sql.append("SELECT ") .append(StringUtils.hasText(getOrderedQueryAlias()) ? getOrderedQueryAlias() + ".*, " : "*, "); sql.append("ROW_NUMBER() OVER (").append(getOverClause()); sql.append(") AS ROW_NUMBER"); sql.append(getOverSubstituteClauseStart()); sql.append(" FROM ").append(getFromClause()); if (getWhereClause() != null) { sql.append(" WHERE "); sql.append(getWhereClause()); } sql.append(getGroupClause() == null ? "" : " GROUP BY " + getGroupClause()); sql.append(getOverSubstituteClauseEnd()); sql.append(") ") .append(getSubQueryAlias()) .append("WHERE ") .append(extractTableAlias()) .append("ROW_NUMBER <= ") .append(pageSize); sql.append(" AND "); SqlPagingQueryUtils.buildSortConditions(this, sql); sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this)); return sql.toString(); } protected String getOverClause() { StringBuilder sql = new StringBuilder(); sql.append(" ORDER BY ").append(buildSortClause(this)); return sql.toString(); } protected String getOverSubstituteClauseStart() { return ""; } protected String getOverSubstituteClauseEnd() { return ""; } /** * Generates ORDER BY attributes based on the sort keys. * @param provider the paging query provider * @return a String that can be appended to an ORDER BY clause. */ private String buildSortClause(AbstractSqlPagingQueryProvider provider) { return SqlPagingQueryUtils.buildSortClause(provider.getSortKeysWithoutAliases()); } }
StringBuilder sql = new StringBuilder(); sql.append("SELECT * FROM ( "); sql.append("SELECT ") .append(StringUtils.hasText(getOrderedQueryAlias()) ? getOrderedQueryAlias() + ".*, " : "*, "); sql.append("ROW_NUMBER() OVER (").append(getOverClause()); sql.append(") AS ROW_NUMBER"); sql.append(getOverSubstituteClauseStart()); sql.append(" FROM ") .append(getFromClause()) .append(getWhereClause() == null ? "" : " WHERE " + getWhereClause()); sql.append(getGroupClause() == null ? "" : " GROUP BY " + getGroupClause()); sql.append(getOverSubstituteClauseEnd()); sql.append(") ") .append(getSubQueryAlias()) .append("WHERE ") .append(extractTableAlias()) .append("ROW_NUMBER <= ") .append(pageSize); sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this)); return sql.toString();
715
299
1,014
<methods>public non-sealed void <init>() ,public abstract java.lang.String generateFirstPageQuery(int) ,public abstract java.lang.String generateRemainingPagesQuery(int) ,public java.lang.String getGroupClause() ,public int getParameterCount() ,public java.lang.String getSortKeyPlaceHolder(java.lang.String) ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeys() ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeysWithoutAliases() ,public void init(javax.sql.DataSource) throws java.lang.Exception,public boolean isUsingNamedParameters() ,public void setFromClause(java.lang.String) ,public void setGroupClause(java.lang.String) ,public void setSelectClause(java.lang.String) ,public void setSortKeys(Map<java.lang.String,org.springframework.batch.item.database.Order>) ,public void setWhereClause(java.lang.String) <variables>private java.lang.String fromClause,private java.lang.String groupClause,private int parameterCount,private java.lang.String selectClause,private Map<java.lang.String,org.springframework.batch.item.database.Order> sortKeys,private boolean usingNamedParameters,private java.lang.String whereClause
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqliteMaxValueIncrementer.java
SqliteMaxValueIncrementer
getNextKey
class SqliteMaxValueIncrementer extends AbstractColumnMaxValueIncrementer { public SqliteMaxValueIncrementer(DataSource dataSource, String incrementerName, String columnName) { super(dataSource, incrementerName, columnName); } @Override protected long getNextKey() {<FILL_FUNCTION_BODY>} }
Connection con = DataSourceUtils.getConnection(getDataSource()); Statement stmt = null; try { stmt = con.createStatement(); DataSourceUtils.applyTransactionTimeout(stmt, getDataSource()); stmt.executeUpdate("insert into " + getIncrementerName() + " values(null)"); ResultSet rs = stmt.executeQuery("select max(rowid) from " + getIncrementerName()); if (!rs.next()) { throw new DataAccessResourceFailureException("rowid query failed after executing an update"); } long nextKey = rs.getLong(1); stmt.executeUpdate("delete from " + getIncrementerName() + " where " + getColumnName() + " < " + nextKey); return nextKey; } catch (SQLException ex) { throw new DataAccessResourceFailureException("Could not obtain rowid", ex); } finally { JdbcUtils.closeStatement(stmt); DataSourceUtils.releaseConnection(con, getDataSource()); }
92
278
370
<methods>public void <init>() ,public void <init>(javax.sql.DataSource, java.lang.String, java.lang.String) ,public void afterPropertiesSet() ,public int getCacheSize() ,public java.lang.String getColumnName() ,public void setCacheSize(int) ,public void setColumnName(java.lang.String) <variables>private int cacheSize,private java.lang.String columnName
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlitePagingQueryProvider.java
SqlitePagingQueryProvider
generateRemainingPagesQuery
class SqlitePagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String generateFirstPageQuery(int pageSize) { return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); } @Override public String generateRemainingPagesQuery(int pageSize) {<FILL_FUNCTION_BODY>} private String buildLimitClause(int pageSize) { return new StringBuilder().append("LIMIT ").append(pageSize).toString(); } }
if (StringUtils.hasText(getGroupClause())) { return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); } else { return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); }
130
81
211
<methods>public non-sealed void <init>() ,public abstract java.lang.String generateFirstPageQuery(int) ,public abstract java.lang.String generateRemainingPagesQuery(int) ,public java.lang.String getGroupClause() ,public int getParameterCount() ,public java.lang.String getSortKeyPlaceHolder(java.lang.String) ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeys() ,public Map<java.lang.String,org.springframework.batch.item.database.Order> getSortKeysWithoutAliases() ,public void init(javax.sql.DataSource) throws java.lang.Exception,public boolean isUsingNamedParameters() ,public void setFromClause(java.lang.String) ,public void setGroupClause(java.lang.String) ,public void setSelectClause(java.lang.String) ,public void setSortKeys(Map<java.lang.String,org.springframework.batch.item.database.Order>) ,public void setWhereClause(java.lang.String) <variables>private java.lang.String fromClause,private java.lang.String groupClause,private int parameterCount,private java.lang.String selectClause,private Map<java.lang.String,org.springframework.batch.item.database.Order> sortKeys,private boolean usingNamedParameters,private java.lang.String whereClause
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SybasePagingQueryProvider.java
SybasePagingQueryProvider
generateRemainingPagesQuery
class SybasePagingQueryProvider extends SqlWindowingPagingQueryProvider { @Override public String generateFirstPageQuery(int pageSize) { return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); } @Override public String generateRemainingPagesQuery(int pageSize) {<FILL_FUNCTION_BODY>} @Override protected Object getSubQueryAlias() { return ""; } private String buildTopClause(int pageSize) { return new StringBuilder().append("TOP ").append(pageSize).toString(); } }
if (StringUtils.hasText(getGroupClause())) { return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, true, buildTopClause(pageSize)); } else { return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); }
152
83
235
<methods>public non-sealed void <init>() ,public java.lang.String generateFirstPageQuery(int) ,public java.lang.String generateRemainingPagesQuery(int) <variables>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileItemWriter.java
FlatFileItemWriter
afterPropertiesSet
class FlatFileItemWriter<T> extends AbstractFileItemWriter<T> { protected LineAggregator<T> lineAggregator; public FlatFileItemWriter() { this.setExecutionContextName(ClassUtils.getShortName(FlatFileItemWriter.class)); } /** * Assert that mandatory properties (lineAggregator) are set. * * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() */ @Override public void afterPropertiesSet() throws Exception {<FILL_FUNCTION_BODY>} /** * Public setter for the {@link LineAggregator}. This will be used to translate the * item into a line for output. * @param lineAggregator the {@link LineAggregator} to set */ public void setLineAggregator(LineAggregator<T> lineAggregator) { this.lineAggregator = lineAggregator; } @Override public String doWrite(Chunk<? extends T> items) { StringBuilder lines = new StringBuilder(); for (T item : items) { lines.append(this.lineAggregator.aggregate(item)).append(this.lineSeparator); } return lines.toString(); } }
Assert.state(lineAggregator != null, "A LineAggregator must be provided."); if (append) { shouldDeleteIfExists = false; }
306
46
352
<methods>public non-sealed void <init>() ,public void close() ,public void open(org.springframework.batch.item.ExecutionContext) throws org.springframework.batch.item.ItemStreamException,public void setAppendAllowed(boolean) ,public void setEncoding(java.lang.String) ,public void setFooterCallback(org.springframework.batch.item.file.FlatFileFooterCallback) ,public void setForceSync(boolean) ,public void setHeaderCallback(org.springframework.batch.item.file.FlatFileHeaderCallback) ,public void setLineSeparator(java.lang.String) ,public void setResource(org.springframework.core.io.WritableResource) ,public void setSaveState(boolean) ,public void setShouldDeleteIfEmpty(boolean) ,public void setShouldDeleteIfExists(boolean) ,public void setTransactional(boolean) ,public void update(org.springframework.batch.item.ExecutionContext) ,public void write(Chunk<? extends T>) throws java.lang.Exception<variables>public static final java.lang.String DEFAULT_CHARSET,public static final java.lang.String DEFAULT_LINE_SEPARATOR,public static final boolean DEFAULT_TRANSACTIONAL,private static final java.lang.String RESTART_DATA_NAME,private static final java.lang.String WRITTEN_STATISTICS_NAME,protected boolean append,private java.lang.String encoding,private org.springframework.batch.item.file.FlatFileFooterCallback footerCallback,private boolean forceSync,private org.springframework.batch.item.file.FlatFileHeaderCallback headerCallback,protected java.lang.String lineSeparator,protected static final org.apache.commons.logging.Log logger,private org.springframework.core.io.WritableResource resource,private boolean saveState,private boolean shouldDeleteIfEmpty,protected boolean shouldDeleteIfExists,protected OutputState state,private boolean transactional
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/MultiResourceItemReader.java
MultiResourceItemReader
read
class MultiResourceItemReader<T> extends AbstractItemStreamItemReader<T> { private static final Log logger = LogFactory.getLog(MultiResourceItemReader.class); private static final String RESOURCE_KEY = "resourceIndex"; private ResourceAwareItemReaderItemStream<? extends T> delegate; private Resource[] resources; private boolean saveState = true; private int currentResource = -1; // signals there are no resources to read -> just return null on first read private boolean noInput; private boolean strict = false; /** * In strict mode the reader will throw an exception on * {@link #open(org.springframework.batch.item.ExecutionContext)} if there are no * resources to read. * @param strict false by default */ public void setStrict(boolean strict) { this.strict = strict; } private Comparator<Resource> comparator = new Comparator<>() { /** * Compares resource filenames. */ @Override public int compare(Resource r1, Resource r2) { return r1.getFilename().compareTo(r2.getFilename()); } }; public MultiResourceItemReader() { this.setExecutionContextName(ClassUtils.getShortName(MultiResourceItemReader.class)); } /** * Reads the next item, jumping to next resource if necessary. */ @Nullable @Override public T read() throws Exception, UnexpectedInputException, ParseException {<FILL_FUNCTION_BODY>} /** * Use the delegate to read the next item, jump to next resource if current one is * exhausted. Items are appended to the buffer. * @return next item from input */ private T readNextItem() throws Exception { T item = readFromDelegate(); while (item == null) { currentResource++; if (currentResource >= resources.length) { return null; } delegate.close(); delegate.setResource(resources[currentResource]); delegate.open(new ExecutionContext()); item = readFromDelegate(); } return item; } private T readFromDelegate() throws Exception { T item = delegate.read(); if (item instanceof ResourceAware) { ((ResourceAware) item).setResource(resources[currentResource]); } return item; } /** * Close the {@link #setDelegate(ResourceAwareItemReaderItemStream)} reader and reset * instance variable values. */ @Override public void close() throws ItemStreamException { super.close(); if (!this.noInput) { delegate.close(); } noInput = false; } /** * Figure out which resource to start with in case of restart, open the delegate and * restore delegate's position in the resource. */ @Override public void open(ExecutionContext executionContext) throws ItemStreamException { super.open(executionContext); Assert.notNull(resources, "Resources must be set"); noInput = false; if (resources.length == 0) { if (strict) { throw new IllegalStateException( "No resources to read. Set strict=false if this is not an error condition."); } else { logger.warn("No resources to read. Set strict=true if this should be an error condition."); noInput = true; return; } } Arrays.sort(resources, comparator); if (executionContext.containsKey(getExecutionContextKey(RESOURCE_KEY))) { currentResource = executionContext.getInt(getExecutionContextKey(RESOURCE_KEY)); // context could have been saved before reading anything if (currentResource == -1) { currentResource = 0; } delegate.setResource(resources[currentResource]); delegate.open(executionContext); } else { currentResource = -1; } } /** * Store the current resource index and position in the resource. */ @Override public void update(ExecutionContext executionContext) throws ItemStreamException { super.update(executionContext); if (saveState) { executionContext.putInt(getExecutionContextKey(RESOURCE_KEY), currentResource); delegate.update(executionContext); } } /** * @param delegate reads items from single {@link Resource}. */ public void setDelegate(ResourceAwareItemReaderItemStream<? extends T> delegate) { this.delegate = delegate; } /** * Set the boolean indicating whether or not state should be saved in the provided * {@link ExecutionContext} during the {@link ItemStream} call to update. * @param saveState true to update ExecutionContext. False do not update * ExecutionContext. */ public void setSaveState(boolean saveState) { this.saveState = saveState; } /** * @param comparator used to order the injected resources, by default compares * {@link Resource#getFilename()} values. */ public void setComparator(Comparator<Resource> comparator) { this.comparator = comparator; } /** * @param resources input resources */ public void setResources(Resource[] resources) { Assert.notNull(resources, "The resources must not be null"); this.resources = Arrays.asList(resources).toArray(new Resource[resources.length]); } }
if (noInput) { return null; } // If there is no resource, then this is the first item, set the current // resource to 0 and open the first delegate. if (currentResource == -1) { currentResource = 0; delegate.setResource(resources[currentResource]); delegate.open(new ExecutionContext()); } return readNextItem();
1,414
112
1,526
<methods>public non-sealed void <init>() <variables>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/MultiResourceItemWriter.java
MultiResourceItemWriter
open
class MultiResourceItemWriter<T> extends AbstractItemStreamItemWriter<T> { final static private String RESOURCE_INDEX_KEY = "resource.index"; final static private String CURRENT_RESOURCE_ITEM_COUNT = "resource.item.count"; private Resource resource; private ResourceAwareItemWriterItemStream<? super T> delegate; private int itemCountLimitPerResource = Integer.MAX_VALUE; private int currentResourceItemCount = 0; private int resourceIndex = 1; private ResourceSuffixCreator suffixCreator = new SimpleResourceSuffixCreator(); private boolean saveState = true; private boolean opened = false; public MultiResourceItemWriter() { this.setExecutionContextName(ClassUtils.getShortName(MultiResourceItemWriter.class)); } @Override public void write(Chunk<? extends T> items) throws Exception { if (!opened) { File file = setResourceToDelegate(); // create only if write is called file.createNewFile(); Assert.state(file.canWrite(), "Output resource " + file.getAbsolutePath() + " must be writable"); delegate.open(new ExecutionContext()); opened = true; } delegate.write(items); currentResourceItemCount += items.size(); if (currentResourceItemCount >= itemCountLimitPerResource) { delegate.close(); resourceIndex++; currentResourceItemCount = 0; setResourceToDelegate(); opened = false; } } /** * Allows customization of the suffix of the created resources based on the index. * @param suffixCreator {@link ResourceSuffixCreator} to be used by the writer. */ public void setResourceSuffixCreator(ResourceSuffixCreator suffixCreator) { this.suffixCreator = suffixCreator; } /** * After this limit is exceeded the next chunk will be written into newly created * resource. * @param itemCountLimitPerResource int item threshold used to determine when a new * resource should be created. */ public void setItemCountLimitPerResource(int itemCountLimitPerResource) { this.itemCountLimitPerResource = itemCountLimitPerResource; } /** * Delegate used for actual writing of the output. * @param delegate {@link ResourceAwareItemWriterItemStream} that will be used to * write the output. */ public void setDelegate(ResourceAwareItemWriterItemStream<? super T> delegate) { this.delegate = delegate; } /** * Prototype for output resources. Actual output files will be created in the same * directory and use the same name as this prototype with appended suffix (according * to {@link #setResourceSuffixCreator(ResourceSuffixCreator)}. * @param resource The prototype resource. */ public void setResource(Resource resource) { this.resource = resource; } /** * Indicates that the state of the reader will be saved after each commit. * @param saveState true the state is saved. */ public void setSaveState(boolean saveState) { this.saveState = saveState; } @Override public void close() throws ItemStreamException { super.close(); resourceIndex = 1; currentResourceItemCount = 0; if (opened) { delegate.close(); } } @Override public void open(ExecutionContext executionContext) throws ItemStreamException {<FILL_FUNCTION_BODY>} @Override public void update(ExecutionContext executionContext) throws ItemStreamException { super.update(executionContext); if (saveState) { if (opened) { delegate.update(executionContext); } executionContext.putInt(getExecutionContextKey(CURRENT_RESOURCE_ITEM_COUNT), currentResourceItemCount); executionContext.putInt(getExecutionContextKey(RESOURCE_INDEX_KEY), resourceIndex); } } /** * Create output resource (if necessary) and point the delegate to it. */ private File setResourceToDelegate() throws IOException { String path = resource.getFile().getAbsolutePath() + suffixCreator.getSuffix(resourceIndex); File file = new File(path); delegate.setResource(new FileSystemResource(file)); return file; } }
super.open(executionContext); resourceIndex = executionContext.getInt(getExecutionContextKey(RESOURCE_INDEX_KEY), 1); currentResourceItemCount = executionContext.getInt(getExecutionContextKey(CURRENT_RESOURCE_ITEM_COUNT), 0); try { setResourceToDelegate(); } catch (IOException e) { throw new ItemStreamException("Couldn't assign resource", e); } if (executionContext.containsKey(getExecutionContextKey(CURRENT_RESOURCE_ITEM_COUNT))) { // It's a restart delegate.open(executionContext); opened = true; } else { opened = false; }
1,127
197
1,324
<methods>public non-sealed void <init>() <variables>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourcesItemReader.java
ResourcesItemReader
read
class ResourcesItemReader extends AbstractItemStreamItemReader<Resource> { private static final String COUNT_KEY = "COUNT"; private Resource[] resources = new Resource[0]; private final AtomicInteger counter = new AtomicInteger(0); public ResourcesItemReader() { /* * Initialize the name for the key in the execution context. */ this.setExecutionContextName(getClass().getName()); } /** * The resources to serve up as items. Hint: use a pattern to configure. * @param resources the resources */ public void setResources(Resource[] resources) { this.resources = Arrays.asList(resources).toArray(new Resource[resources.length]); } /** * Increments a counter and returns the next {@link Resource} instance from the input, * or {@code null} if none remain. */ @Override @Nullable public synchronized Resource read() throws Exception {<FILL_FUNCTION_BODY>} @Override public void open(ExecutionContext executionContext) throws ItemStreamException { super.open(executionContext); counter.set(executionContext.getInt(getExecutionContextKey(COUNT_KEY), 0)); } @Override public void update(ExecutionContext executionContext) throws ItemStreamException { super.update(executionContext); executionContext.putInt(getExecutionContextKey(COUNT_KEY), counter.get()); } }
int index = counter.incrementAndGet() - 1; if (index >= resources.length) { return null; } return resources[index];
350
45
395
<methods>public non-sealed void <init>() <variables>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/SimpleBinaryBufferedReaderFactory.java
BinaryBufferedReader
isEndOfLine
class BinaryBufferedReader extends BufferedReader { private final String ending; private final Lock lock = new ReentrantLock(); private BinaryBufferedReader(Reader in, String ending) { super(in); this.ending = ending; } @Override public String readLine() throws IOException { StringBuilder buffer; this.lock.lock(); try { int next = read(); if (next == -1) { return null; } buffer = new StringBuilder(); StringBuilder candidateEnding = new StringBuilder(); while (!isEndOfLine(buffer, candidateEnding, next)) { next = read(); } buffer.append(candidateEnding); } finally { this.lock.unlock(); } if (buffer != null && buffer.length() > 0) { return buffer.toString(); } return null; } /** * Check for end of line and accumulate a buffer for next time. * @param buffer the current line excluding the candidate ending * @param candidate a buffer containing accumulated state * @param next the next character (or -1 for end of file) * @return true if the values together signify the end of a file */ private boolean isEndOfLine(StringBuilder buffer, StringBuilder candidate, int next) {<FILL_FUNCTION_BODY>} }
if (next == -1) { return true; } char c = (char) next; if (ending.charAt(0) == c || candidate.length() > 0) { candidate.append(c); } if (candidate.length() == 0) { buffer.append(c); return false; } boolean end = ending.equals(candidate.toString()); if (end) { candidate.delete(0, candidate.length()); } else if (candidate.length() >= ending.length()) { buffer.append(candidate); candidate.delete(0, candidate.length()); } return end;
374
187
561
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/builder/FlatFileItemWriterBuilder.java
DelimitedBuilder
build
class DelimitedBuilder<T> { private final FlatFileItemWriterBuilder<T> parent; private final List<String> names = new ArrayList<>(); private String delimiter = ","; private String quoteCharacter = ""; private FieldExtractor<T> fieldExtractor; private Class<T> sourceType; protected DelimitedBuilder(FlatFileItemWriterBuilder<T> parent) { this.parent = parent; } /** * Define the delimiter for the file. * @param delimiter String used as a delimiter between fields. * @return The instance of the builder for chaining. * @see DelimitedLineAggregator#setDelimiter(String) */ public DelimitedBuilder<T> delimiter(String delimiter) { this.delimiter = delimiter; return this; } /** * Specify the type of items from which fields will be extracted. This is used to * configure the right {@link FieldExtractor} based on the given type (ie a record * or a regular class). * @param sourceType type of items from which fields will be extracted * @return The current instance of the builder. * @since 5.0 */ public DelimitedBuilder<T> sourceType(Class<T> sourceType) { this.sourceType = sourceType; return this; } /** * Define the quote character for each delimited field. Default is empty string. * @param quoteCharacter String used as a quote for the aggregate. * @return The instance of the builder for chaining. * @see DelimitedLineAggregator#setQuoteCharacter(String) * @since 5.1 */ public DelimitedBuilder<T> quoteCharacter(String quoteCharacter) { this.quoteCharacter = quoteCharacter; return this; } /** * Names of each of the fields within the fields that are returned in the order * they occur within the delimited file. These names will be used to create a * {@link BeanWrapperFieldExtractor} only if no explicit field extractor is set * via {@link DelimitedBuilder#fieldExtractor(FieldExtractor)}. * @param names names of each field * @return The parent {@link FlatFileItemWriterBuilder} * @see BeanWrapperFieldExtractor#setNames(String[]) */ public FlatFileItemWriterBuilder<T> names(String... names) { this.names.addAll(Arrays.asList(names)); return this.parent; } /** * Set the {@link FieldExtractor} to use to extract fields from each item. * @param fieldExtractor to use to extract fields from each item * @return The parent {@link FlatFileItemWriterBuilder} */ public FlatFileItemWriterBuilder<T> fieldExtractor(FieldExtractor<T> fieldExtractor) { this.fieldExtractor = fieldExtractor; return this.parent; } public DelimitedLineAggregator<T> build() {<FILL_FUNCTION_BODY>} }
Assert.isTrue((this.names != null && !this.names.isEmpty()) || this.fieldExtractor != null, "A list of field names or a field extractor is required"); DelimitedLineAggregator<T> delimitedLineAggregator = new DelimitedLineAggregator<>(); if (this.delimiter != null) { delimitedLineAggregator.setDelimiter(this.delimiter); } if (StringUtils.hasLength(this.quoteCharacter)) { delimitedLineAggregator.setQuoteCharacter(this.quoteCharacter); } if (this.fieldExtractor == null) { if (this.sourceType != null && this.sourceType.isRecord()) { this.fieldExtractor = new RecordFieldExtractor<>(this.sourceType); } else { BeanWrapperFieldExtractor<T> beanWrapperFieldExtractor = new BeanWrapperFieldExtractor<>(); beanWrapperFieldExtractor.setNames(this.names.toArray(new String[this.names.size()])); try { beanWrapperFieldExtractor.afterPropertiesSet(); this.fieldExtractor = beanWrapperFieldExtractor; } catch (Exception e) { throw new IllegalStateException("Unable to initialize DelimitedLineAggregator", e); } } } delimitedLineAggregator.setFieldExtractor(this.fieldExtractor); return delimitedLineAggregator;
792
382
1,174
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/builder/MultiResourceItemReaderBuilder.java
MultiResourceItemReaderBuilder
build
class MultiResourceItemReaderBuilder<T> { private ResourceAwareItemReaderItemStream<? extends T> delegate; private Resource[] resources; private boolean strict = false; private Comparator<Resource> comparator; private boolean saveState = true; private String name; /** * Configure if the state of the * {@link org.springframework.batch.item.ItemStreamSupport} should be persisted within * the {@link org.springframework.batch.item.ExecutionContext} for restart purposes. * @param saveState defaults to true * @return The current instance of the builder. */ public MultiResourceItemReaderBuilder<T> saveState(boolean saveState) { this.saveState = saveState; return this; } /** * The name used to calculate the key within the * {@link org.springframework.batch.item.ExecutionContext}. Required if * {@link #saveState(boolean)} is set to true. * @param name name of the reader instance * @return The current instance of the builder. * @see org.springframework.batch.item.ItemStreamSupport#setName(String) */ public MultiResourceItemReaderBuilder<T> name(String name) { this.name = name; return this; } /** * The array of resources that the {@link MultiResourceItemReader} will use to * retrieve items. * @param resources the array of resources to use. * @return this instance for method chaining. * * @see MultiResourceItemReader#setResources(Resource[]) */ public MultiResourceItemReaderBuilder<T> resources(Resource... resources) { this.resources = resources; return this; } /** * Establishes the delegate to use for reading the resources provided. * @param delegate reads items from single {@link Resource}. * @return this instance for method chaining. * * @see MultiResourceItemReader#setDelegate(ResourceAwareItemReaderItemStream) */ public MultiResourceItemReaderBuilder<T> delegate(ResourceAwareItemReaderItemStream<? extends T> delegate) { this.delegate = delegate; return this; } /** * In strict mode the reader will throw an exception on * {@link MultiResourceItemReader#open(org.springframework.batch.item.ExecutionContext)} * if there are no resources to read. * @param strict false by default. * @return this instance for method chaining. * @see MultiResourceItemReader#setStrict(boolean) */ public MultiResourceItemReaderBuilder<T> setStrict(boolean strict) { this.strict = strict; return this; } /** * Used to order the injected resources, by default compares * {@link Resource#getFilename()} values. * @param comparator the comparator to use for ordering resources. * @return this instance for method chaining. * @see MultiResourceItemReader#setComparator(Comparator) */ public MultiResourceItemReaderBuilder<T> comparator(Comparator<Resource> comparator) { this.comparator = comparator; return this; } /** * Builds the {@link MultiResourceItemReader}. * @return a {@link MultiResourceItemReader} */ public MultiResourceItemReader<T> build() {<FILL_FUNCTION_BODY>} }
Assert.notNull(this.resources, "resources array is required."); Assert.notNull(this.delegate, "delegate is required."); if (this.saveState) { Assert.state(StringUtils.hasText(this.name), "A name is required when saveState is set to true."); } MultiResourceItemReader<T> reader = new MultiResourceItemReader<>(); reader.setResources(this.resources); reader.setDelegate(this.delegate); reader.setSaveState(this.saveState); reader.setStrict(this.strict); if (comparator != null) { reader.setComparator(this.comparator); } if (StringUtils.hasText(this.name)) { reader.setName(this.name); } return reader;
850
226
1,076
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/builder/MultiResourceItemWriterBuilder.java
MultiResourceItemWriterBuilder
build
class MultiResourceItemWriterBuilder<T> { private Resource resource; private ResourceAwareItemWriterItemStream<? super T> delegate; private int itemCountLimitPerResource = Integer.MAX_VALUE; private ResourceSuffixCreator suffixCreator; private boolean saveState = true; private String name; /** * Configure if the state of the * {@link org.springframework.batch.item.ItemStreamSupport} should be persisted within * the {@link org.springframework.batch.item.ExecutionContext} for restart purposes. * @param saveState defaults to true * @return The current instance of the builder. */ public MultiResourceItemWriterBuilder<T> saveState(boolean saveState) { this.saveState = saveState; return this; } /** * The name used to calculate the key within the * {@link org.springframework.batch.item.ExecutionContext}. Required if * {@link #saveState(boolean)} is set to true. * @param name name of the reader instance * @return The current instance of the builder. * @see org.springframework.batch.item.ItemStreamSupport#setName(String) */ public MultiResourceItemWriterBuilder<T> name(String name) { this.name = name; return this; } /** * Allows customization of the suffix of the created resources based on the index. * @param suffixCreator the customizable ResourceSuffixCreator to use. * @return The current instance of the builder. * @see MultiResourceItemWriter#setResourceSuffixCreator(ResourceSuffixCreator) */ public MultiResourceItemWriterBuilder<T> resourceSuffixCreator(ResourceSuffixCreator suffixCreator) { this.suffixCreator = suffixCreator; return this; } /** * After this limit is exceeded the next chunk will be written into newly created * resource. * @param itemCountLimitPerResource the max numbers of items to be written per chunk. * @return The current instance of the builder. * @see MultiResourceItemWriter#setItemCountLimitPerResource(int) */ public MultiResourceItemWriterBuilder<T> itemCountLimitPerResource(int itemCountLimitPerResource) { this.itemCountLimitPerResource = itemCountLimitPerResource; return this; } /** * Delegate used for actual writing of the output. * @param delegate The delegate to use for writing. * @return The current instance of the builder. * @see MultiResourceItemWriter#setDelegate(ResourceAwareItemWriterItemStream) */ public MultiResourceItemWriterBuilder<T> delegate(ResourceAwareItemWriterItemStream<? super T> delegate) { this.delegate = delegate; return this; } /** * Prototype for output resources. Actual output files will be created in the same * directory and use the same name as this prototype with appended suffix (according * to {@link MultiResourceItemWriter#setResourceSuffixCreator(ResourceSuffixCreator)}. * @param resource the prototype resource to use as the basis for creating resources. * @return The current instance of the builder. * @see MultiResourceItemWriter#setResource(Resource) */ public MultiResourceItemWriterBuilder<T> resource(Resource resource) { this.resource = resource; return this; } /** * Builds the {@link MultiResourceItemWriter}. * @return a {@link MultiResourceItemWriter} */ public MultiResourceItemWriter<T> build() {<FILL_FUNCTION_BODY>} }
Assert.notNull(this.resource, "resource is required."); Assert.notNull(this.delegate, "delegate is required."); if (this.saveState) { org.springframework.util.Assert.hasText(this.name, "A name is required when saveState is true."); } MultiResourceItemWriter<T> writer = new MultiResourceItemWriter<>(); writer.setResource(this.resource); writer.setDelegate(this.delegate); writer.setItemCountLimitPerResource(this.itemCountLimitPerResource); if (this.suffixCreator != null) { writer.setResourceSuffixCreator(this.suffixCreator); } writer.setSaveState(this.saveState); writer.setName(this.name); return writer;
901
220
1,121
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/DefaultLineMapper.java
DefaultLineMapper
afterPropertiesSet
class DefaultLineMapper<T> implements LineMapper<T>, InitializingBean { private LineTokenizer tokenizer; private FieldSetMapper<T> fieldSetMapper; @Override public T mapLine(String line, int lineNumber) throws Exception { return fieldSetMapper.mapFieldSet(tokenizer.tokenize(line)); } public void setLineTokenizer(LineTokenizer tokenizer) { this.tokenizer = tokenizer; } public void setFieldSetMapper(FieldSetMapper<T> fieldSetMapper) { this.fieldSetMapper = fieldSetMapper; } @Override public void afterPropertiesSet() {<FILL_FUNCTION_BODY>} }
Assert.state(tokenizer != null, "The LineTokenizer must be set"); Assert.state(fieldSetMapper != null, "The FieldSetMapper must be set");
171
48
219
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/JsonLineMapper.java
JsonLineMapper
mapLine
class JsonLineMapper implements LineMapper<Map<String, Object>> { private final MappingJsonFactory factory = new MappingJsonFactory(); /** * Interpret the line as a Json object and create a Map from it. * * @see LineMapper#mapLine(String, int) */ @Override public Map<String, Object> mapLine(String line, int lineNumber) throws Exception {<FILL_FUNCTION_BODY>} }
Map<String, Object> result; JsonParser parser = factory.createParser(line); @SuppressWarnings("unchecked") Map<String, Object> token = parser.readValueAs(Map.class); result = token; return result;
113
73
186
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PatternMatchingCompositeLineMapper.java
PatternMatchingCompositeLineMapper
afterPropertiesSet
class PatternMatchingCompositeLineMapper<T> implements LineMapper<T>, InitializingBean { private final PatternMatchingCompositeLineTokenizer tokenizer = new PatternMatchingCompositeLineTokenizer(); private PatternMatcher<FieldSetMapper<T>> patternMatcher; @Override public T mapLine(String line, int lineNumber) throws Exception { return patternMatcher.match(line).mapFieldSet(this.tokenizer.tokenize(line)); } @Override public void afterPropertiesSet() throws Exception {<FILL_FUNCTION_BODY>} public void setTokenizers(Map<String, LineTokenizer> tokenizers) { this.tokenizer.setTokenizers(tokenizers); } public void setFieldSetMappers(Map<String, FieldSetMapper<T>> fieldSetMappers) { Assert.isTrue(!fieldSetMappers.isEmpty(), "The 'fieldSetMappers' property must be non-empty"); this.patternMatcher = new PatternMatcher<>(fieldSetMappers); } }
this.tokenizer.afterPropertiesSet(); Assert.state(this.patternMatcher != null, "The 'patternMatcher' property must be non-null");
264
44
308
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PropertyMatches.java
PropertyMatches
buildErrorMessage
class PropertyMatches { // --------------------------------------------------------------------- // Static section // --------------------------------------------------------------------- /** Default maximum property distance: 2 */ public static final int DEFAULT_MAX_DISTANCE = 2; /** * Create PropertyMatches for the given bean property. * @param propertyName the name of the property to find possible matches for * @param beanClass the bean class to search for matches */ public static PropertyMatches forProperty(String propertyName, Class<?> beanClass) { return forProperty(propertyName, beanClass, DEFAULT_MAX_DISTANCE); } /** * Create PropertyMatches for the given bean property. * @param propertyName the name of the property to find possible matches for * @param beanClass the bean class to search for matches * @param maxDistance the maximum property distance allowed for matches */ public static PropertyMatches forProperty(String propertyName, Class<?> beanClass, int maxDistance) { return new PropertyMatches(propertyName, beanClass, maxDistance); } // --------------------------------------------------------------------- // Instance section // --------------------------------------------------------------------- private final String propertyName; private final String[] possibleMatches; /** * Create a new PropertyMatches instance for the given property. */ private PropertyMatches(String propertyName, Class<?> beanClass, int maxDistance) { this.propertyName = propertyName; this.possibleMatches = calculateMatches(BeanUtils.getPropertyDescriptors(beanClass), maxDistance); } /** * Return the calculated possible matches. */ public String[] getPossibleMatches() { return possibleMatches; } /** * Build an error message for the given invalid property name, indicating the possible * property matches. */ public String buildErrorMessage() {<FILL_FUNCTION_BODY>} /** * Generate possible property alternatives for the given property and class. * Internally uses the <code>getStringDistance</code> method, which in turn uses the * Levenshtein algorithm to determine the distance between two Strings. * @param propertyDescriptors the JavaBeans property descriptors to search * @param maxDistance the maximum distance to accept */ private String[] calculateMatches(PropertyDescriptor[] propertyDescriptors, int maxDistance) { List<String> candidates = new ArrayList<>(); for (PropertyDescriptor propertyDescriptor : propertyDescriptors) { if (propertyDescriptor.getWriteMethod() != null) { String possibleAlternative = propertyDescriptor.getName(); int distance = calculateStringDistance(this.propertyName, possibleAlternative); if (distance <= maxDistance) { candidates.add(possibleAlternative); } } } Collections.sort(candidates); return StringUtils.toStringArray(candidates); } /** * Calculate the distance between the given two Strings according to the Levenshtein * algorithm. * @param s1 the first String * @param s2 the second String * @return the distance value */ private int calculateStringDistance(String s1, String s2) { if (s1.length() == 0) { return s2.length(); } if (s2.length() == 0) { return s1.length(); } int d[][] = new int[s1.length() + 1][s2.length() + 1]; for (int i = 0; i <= s1.length(); i++) { d[i][0] = i; } for (int j = 0; j <= s2.length(); j++) { d[0][j] = j; } for (int i = 1; i <= s1.length(); i++) { char s_i = s1.charAt(i - 1); for (int j = 1; j <= s2.length(); j++) { int cost; char t_j = s2.charAt(j - 1); if (Character.toLowerCase(s_i) == Character.toLowerCase(t_j)) { cost = 0; } else { cost = 1; } d[i][j] = Math.min(Math.min(d[i - 1][j] + 1, d[i][j - 1] + 1), d[i - 1][j - 1] + cost); } } return d[s1.length()][s2.length()]; } }
StringBuilder buf = new StringBuilder(128); buf.append("Bean property '"); buf.append(this.propertyName); buf.append("' is not writable or has an invalid setter method. "); if (ObjectUtils.isEmpty(this.possibleMatches)) { buf.append("Does the parameter type of the setter match the return type of the getter?"); } else { buf.append("Did you mean "); for (int i = 0; i < this.possibleMatches.length; i++) { buf.append('\''); buf.append(this.possibleMatches[i]); if (i < this.possibleMatches.length - 2) { buf.append("', "); } else if (i == this.possibleMatches.length - 2) { buf.append("', or "); } } buf.append("'?"); } return buf.toString();
1,128
253
1,381
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/RecordFieldSetMapper.java
RecordFieldSetMapper
mapFieldSet
class RecordFieldSetMapper<T> implements FieldSetMapper<T> { private final SimpleTypeConverter typeConverter = new SimpleTypeConverter(); private final Constructor<T> mappedConstructor; private String[] constructorParameterNames; private Class<?>[] constructorParameterTypes; /** * Create a new {@link RecordFieldSetMapper}. * @param targetType type of mapped items */ public RecordFieldSetMapper(Class<T> targetType) { this(targetType, new DefaultConversionService()); } /** * Create a new {@link RecordFieldSetMapper}. * @param targetType type of mapped items * @param conversionService service to use to convert raw data to typed fields */ public RecordFieldSetMapper(Class<T> targetType, ConversionService conversionService) { this.typeConverter.setConversionService(conversionService); this.mappedConstructor = BeanUtils.getResolvableConstructor(targetType); if (this.mappedConstructor.getParameterCount() > 0) { this.constructorParameterNames = BeanUtils.getParameterNames(this.mappedConstructor); this.constructorParameterTypes = this.mappedConstructor.getParameterTypes(); } } @Override public T mapFieldSet(FieldSet fieldSet) {<FILL_FUNCTION_BODY>} }
Assert.isTrue(fieldSet.getFieldCount() == this.constructorParameterNames.length, "Fields count must be equal to record components count"); Assert.isTrue(fieldSet.hasNames(), "Field names must specified"); Object[] args = new Object[0]; if (this.constructorParameterNames != null && this.constructorParameterTypes != null) { args = new Object[this.constructorParameterNames.length]; for (int i = 0; i < args.length; i++) { String name = this.constructorParameterNames[i]; Class<?> type = this.constructorParameterTypes[i]; args[i] = this.typeConverter.convertIfNecessary(fieldSet.readRawString(name), type); } } return BeanUtils.instantiateClass(this.mappedConstructor, args);
324
216
540
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/DefaultRecordSeparatorPolicy.java
DefaultRecordSeparatorPolicy
preProcess
class DefaultRecordSeparatorPolicy extends SimpleRecordSeparatorPolicy { private static final String QUOTE = "\""; private static final String CONTINUATION = "\\"; private String quoteCharacter; private String continuation; /** * Default constructor. */ public DefaultRecordSeparatorPolicy() { this(QUOTE, CONTINUATION); } /** * Convenient constructor with quote character as parameter. * @param quoteCharacter value used to indicate a quoted string */ public DefaultRecordSeparatorPolicy(String quoteCharacter) { this(quoteCharacter, CONTINUATION); } /** * Convenient constructor with quote character and continuation marker as parameters. * @param quoteCharacter value used to indicate a quoted string * @param continuation value used to indicate a line continuation */ public DefaultRecordSeparatorPolicy(String quoteCharacter, String continuation) { super(); this.continuation = continuation; this.quoteCharacter = quoteCharacter; } /** * Public setter for the quoteCharacter. Defaults to double quote mark. * @param quoteCharacter the quoteCharacter to set */ public void setQuoteCharacter(String quoteCharacter) { this.quoteCharacter = quoteCharacter; } /** * Public setter for the continuation. Defaults to back slash. * @param continuation the continuation to set */ public void setContinuation(String continuation) { this.continuation = continuation; } /** * Return true if the line does not have unterminated quotes (delimited by "), and * does not end with a continuation marker ('\'). The test for the continuation marker * ignores whitespace at the end of the line. * * @see org.springframework.batch.item.file.separator.RecordSeparatorPolicy#isEndOfRecord(java.lang.String) */ @Override public boolean isEndOfRecord(String line) { return !isQuoteUnterminated(line) && !isContinued(line); } /** * If we are in an unterminated quote, add a line separator. Otherwise remove the * continuation marker (plus whitespace at the end) if it is there. * * @see org.springframework.batch.item.file.separator.SimpleRecordSeparatorPolicy#preProcess(java.lang.String) */ @Override public String preProcess(String line) {<FILL_FUNCTION_BODY>} /** * Determine if the current line (or buffered concatenation of lines) contains an * unterminated quote, indicating that the record is continuing onto the next line. * @param line the line to check * @return true if the quote is unterminated, false otherwise */ private boolean isQuoteUnterminated(String line) { return StringUtils.countOccurrencesOf(line, quoteCharacter) % 2 != 0; } /** * Determine if the current line (or buffered concatenation of lines) ends with the * continuation marker, indicating that the record is continuing onto the next line. * @param line the line to check * @return true if the line ends with the continuation marker, false otherwise */ private boolean isContinued(String line) { if (line == null) { return false; } return line.trim().endsWith(continuation); } }
if (isQuoteUnterminated(line)) { return line + "\n"; } if (isContinued(line)) { return line.substring(0, line.lastIndexOf(continuation)); } return line;
849
67
916
<methods>public non-sealed void <init>() ,public boolean isEndOfRecord(java.lang.String) ,public java.lang.String postProcess(java.lang.String) ,public java.lang.String preProcess(java.lang.String) <variables>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/SuffixRecordSeparatorPolicy.java
SuffixRecordSeparatorPolicy
isEndOfRecord
class SuffixRecordSeparatorPolicy extends DefaultRecordSeparatorPolicy { /** * Default value for record terminator suffix. */ public static final String DEFAULT_SUFFIX = ";"; private String suffix = DEFAULT_SUFFIX; private boolean ignoreWhitespace = true; /** * Lines ending in this terminator String signal the end of a record. * @param suffix suffix to indicate the end of a record */ public void setSuffix(String suffix) { this.suffix = suffix; } /** * Flag to indicate that the decision to terminate a record should ignore whitespace * at the end of the line. * @param ignoreWhitespace indicator */ public void setIgnoreWhitespace(boolean ignoreWhitespace) { this.ignoreWhitespace = ignoreWhitespace; } /** * Return true if the line ends with the specified substring. By default whitespace is * trimmed before the comparison. Also returns true if the line is null, but not if it * is empty. * * @see org.springframework.batch.item.file.separator.RecordSeparatorPolicy#isEndOfRecord(java.lang.String) */ @Override public boolean isEndOfRecord(String line) {<FILL_FUNCTION_BODY>} /** * Remove the suffix from the end of the record. * * @see org.springframework.batch.item.file.separator.SimpleRecordSeparatorPolicy#postProcess(java.lang.String) */ @Override public String postProcess(String record) { if (record == null) { return null; } return record.substring(0, record.lastIndexOf(suffix)); } }
if (line == null) { return true; } String trimmed = ignoreWhitespace ? line.trim() : line; return trimmed.endsWith(suffix);
434
50
484
<methods>public void <init>() ,public void <init>(java.lang.String) ,public void <init>(java.lang.String, java.lang.String) ,public boolean isEndOfRecord(java.lang.String) ,public java.lang.String preProcess(java.lang.String) ,public void setContinuation(java.lang.String) ,public void setQuoteCharacter(java.lang.String) <variables>private static final java.lang.String CONTINUATION,private static final java.lang.String QUOTE,private java.lang.String continuation,private java.lang.String quoteCharacter
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/AbstractLineTokenizer.java
AbstractLineTokenizer
adjustTokenCountIfNecessary
class AbstractLineTokenizer implements LineTokenizer { protected String[] names = new String[0]; private boolean strict = true; private final String emptyToken = ""; private FieldSetFactory fieldSetFactory = new DefaultFieldSetFactory(); /** * Public setter for the strict flag. If true (the default) then number of tokens in * line must match the number of tokens defined (by {@link Range}, columns, etc.) in * {@link LineTokenizer}. If false then lines with less tokens will be tolerated and * padded with empty columns, and lines with more tokens will simply be truncated. * @param strict the strict flag to set */ public void setStrict(boolean strict) { this.strict = strict; } /** * Provides access to the strict flag for subclasses if needed. * @return the strict flag value */ protected boolean isStrict() { return strict; } /** * Factory for {@link FieldSet} instances. Can be injected by clients to customize the * default number and date formats. * @param fieldSetFactory the {@link FieldSetFactory} to set */ public void setFieldSetFactory(FieldSetFactory fieldSetFactory) { this.fieldSetFactory = fieldSetFactory; } /** * Setter for column names. Optional, but if set, then all lines must have as many or * fewer tokens. * @param names names of each column */ public void setNames(String... names) { if (names == null) { this.names = null; } else { boolean valid = false; for (String name : names) { if (StringUtils.hasText(name)) { valid = true; break; } } if (valid) { this.names = Arrays.asList(names).toArray(new String[names.length]); } } } /** * @return <code>true</code> if column names have been specified * @see #setNames(String[]) */ public boolean hasNames() { if (names != null && names.length > 0) { return true; } return false; } /** * Yields the tokens resulting from the splitting of the supplied <code>line</code>. * @param line the line to be tokenized (can be <code>null</code>) * @return the resulting tokens */ @Override public FieldSet tokenize(@Nullable String line) { if (line == null) { line = ""; } List<String> tokens = new ArrayList<>(doTokenize(line)); // if names are set and strict flag is false if ((names.length != 0) && (!strict)) { adjustTokenCountIfNecessary(tokens); } String[] values = tokens.toArray(new String[tokens.size()]); if (names.length == 0) { return fieldSetFactory.create(values); } else if (values.length != names.length) { throw new IncorrectTokenCountException(names.length, values.length, line); } return fieldSetFactory.create(values, names); } protected abstract List<String> doTokenize(String line); /** * Adds empty tokens or truncates existing token list to match expected (configured) * number of tokens in {@link LineTokenizer}. * @param tokens - list of tokens */ private void adjustTokenCountIfNecessary(List<String> tokens) {<FILL_FUNCTION_BODY>} }
int nameLength = names.length; int tokensSize = tokens.size(); // if the number of tokens is not what expected if (nameLength != tokensSize) { if (nameLength > tokensSize) { // add empty tokens until the token list size matches // the expected number of tokens for (int i = 0; i < (nameLength - tokensSize); i++) { tokens.add(emptyToken); } } else { // truncate token list to match the number of expected tokens tokens.subList(nameLength, tokensSize).clear(); } }
915
164
1,079
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/BeanWrapperFieldExtractor.java
BeanWrapperFieldExtractor
afterPropertiesSet
class BeanWrapperFieldExtractor<T> implements FieldExtractor<T>, InitializingBean { private String[] names; /** * @param names field names to be extracted by the {@link #extract(Object)} method. */ public void setNames(String[] names) { Assert.notNull(names, "Names must be non-null"); this.names = Arrays.asList(names).toArray(new String[names.length]); } /** * @see org.springframework.batch.item.file.transform.FieldExtractor#extract(java.lang.Object) */ @Override public Object[] extract(T item) { List<Object> values = new ArrayList<>(); BeanWrapper bw = new BeanWrapperImpl(item); for (String propertyName : this.names) { values.add(bw.getPropertyValue(propertyName)); } return values.toArray(); } @Override public void afterPropertiesSet() {<FILL_FUNCTION_BODY>} }
Assert.state(names != null, "The 'names' property must be set.");
263
25
288
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DefaultFieldSetFactory.java
DefaultFieldSetFactory
enhance
class DefaultFieldSetFactory implements FieldSetFactory { private DateFormat dateFormat; private NumberFormat numberFormat; /** * The {@link NumberFormat} to use for parsing numbers. If unset then * {@link java.util.Locale#US} will be used. * @param numberFormat the {@link NumberFormat} to use for number parsing */ public void setNumberFormat(NumberFormat numberFormat) { this.numberFormat = numberFormat; } /** * The {@link DateFormat} to use for parsing dates. If unset the default pattern is * ISO standard <code>yyyy-MM-dd</code>. * @param dateFormat the {@link DateFormat} to use for date parsing */ public void setDateFormat(DateFormat dateFormat) { this.dateFormat = dateFormat; } /** * {@inheritDoc} */ @Override public FieldSet create(String[] values, String[] names) { DefaultFieldSet fieldSet = new DefaultFieldSet(values, names); return enhance(fieldSet); } /** * {@inheritDoc} */ @Override public FieldSet create(String[] values) { DefaultFieldSet fieldSet = new DefaultFieldSet(values); return enhance(fieldSet); } private FieldSet enhance(DefaultFieldSet fieldSet) {<FILL_FUNCTION_BODY>} }
if (dateFormat != null) { fieldSet.setDateFormat(dateFormat); } if (numberFormat != null) { fieldSet.setNumberFormat(numberFormat); } return fieldSet;
348
63
411
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DelimitedLineAggregator.java
DelimitedLineAggregator
doAggregate
class DelimitedLineAggregator<T> extends ExtractorLineAggregator<T> { private String delimiter = ","; private String quoteCharacter = ""; /** * Public setter for the delimiter. * @param delimiter the delimiter to set */ public void setDelimiter(String delimiter) { this.delimiter = delimiter; } /** * Setter for the quote character. * @since 5.1 * @param quoteCharacter the quote character to set */ public void setQuoteCharacter(String quoteCharacter) { this.quoteCharacter = quoteCharacter; } @Override public String doAggregate(Object[] fields) {<FILL_FUNCTION_BODY>} }
return Arrays.stream(fields) .map(field -> this.quoteCharacter + field + this.quoteCharacter) .collect(Collectors.joining(this.delimiter));
193
50
243
<methods>public non-sealed void <init>() ,public java.lang.String aggregate(T) ,public void setFieldExtractor(FieldExtractor<T>) <variables>private FieldExtractor<T> fieldExtractor
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/ExtractorLineAggregator.java
ExtractorLineAggregator
aggregate
class ExtractorLineAggregator<T> implements LineAggregator<T> { private FieldExtractor<T> fieldExtractor = new PassThroughFieldExtractor<>(); /** * Public setter for the field extractor responsible for splitting an input object up * into an array of objects. Defaults to {@link PassThroughFieldExtractor}. * @param fieldExtractor The field extractor to set */ public void setFieldExtractor(FieldExtractor<T> fieldExtractor) { this.fieldExtractor = fieldExtractor; } /** * Extract fields from the given item using the {@link FieldExtractor} and then * aggregate them. Any null field returned by the extractor will be replaced by an * empty String. Null items are not allowed. * * @see org.springframework.batch.item.file.transform.LineAggregator#aggregate(java.lang.Object) */ @Override public String aggregate(T item) {<FILL_FUNCTION_BODY>} /** * Aggregate provided fields into single String. * @param fields An array of the fields that must be aggregated * @return aggregated string */ protected abstract String doAggregate(Object[] fields); }
Assert.notNull(item, "Item is required"); Object[] fields = this.fieldExtractor.extract(item); // // Replace nulls with empty strings // Object[] args = new Object[fields.length]; for (int i = 0; i < fields.length; i++) { if (fields[i] == null) { args[i] = ""; } else { args[i] = fields[i]; } } return this.doAggregate(args);
303
144
447
<no_super_class>
spring-projects_spring-batch
spring-batch/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FixedLengthTokenizer.java
FixedLengthTokenizer
doTokenize
class FixedLengthTokenizer extends AbstractLineTokenizer { private Range[] ranges; private int maxRange = 0; boolean open = false; /** * Set the column ranges. Used in conjunction with the * {@link RangeArrayPropertyEditor} this property can be set in the form of a String * describing the range boundaries, e.g. "1,4,7" or "1-3,4-6,7" or "1-2,4-5,7-10". If * the last range is open then the rest of the line is read into that column * (irrespective of the strict flag setting). * * @see #setStrict(boolean) * @param ranges the column ranges expected in the input */ public void setColumns(Range... ranges) { this.ranges = Arrays.asList(ranges).toArray(new Range[ranges.length]); calculateMaxRange(ranges); } /* * Calculate the highest value within an array of ranges. The ranges aren't * necessarily in order. For example: "5-10, 1-4,11-15". Furthermore, there isn't * always a min and max, such as: "1,4-20, 22" */ private void calculateMaxRange(Range[] ranges) { if (ranges == null || ranges.length == 0) { maxRange = 0; return; } open = false; maxRange = ranges[0].getMin(); for (Range range : ranges) { int upperBound; if (range.hasMaxValue()) { upperBound = range.getMax(); } else { upperBound = range.getMin(); if (upperBound > maxRange) { open = true; } } if (upperBound > maxRange) { maxRange = upperBound; } } } /** * Yields the tokens resulting from the splitting of the supplied <code>line</code>. * @param line the line to be tokenized (can be <code>null</code>) * @return the resulting tokens (empty if the line is null) * @throws IncorrectLineLengthException if line length is greater than or less than * the max range set. */ @Override protected List<String> doTokenize(String line) {<FILL_FUNCTION_BODY>} }
List<String> tokens = new ArrayList<>(ranges.length); int lineLength; String token; lineLength = line.length(); if (lineLength < maxRange && isStrict()) { throw new IncorrectLineLengthException("Line is shorter than max range " + maxRange, maxRange, lineLength, line); } if (!open && lineLength > maxRange && isStrict()) { throw new IncorrectLineLengthException("Line is longer than max range " + maxRange, maxRange, lineLength, line); } for (Range range : ranges) { int startPos = range.getMin() - 1; int endPos = range.getMax(); if (lineLength >= endPos) { token = line.substring(startPos, endPos); } else if (lineLength >= startPos) { token = line.substring(startPos); } else { token = ""; } tokens.add(token); } return tokens;
620
279
899
<methods>public non-sealed void <init>() ,public boolean hasNames() ,public void setFieldSetFactory(org.springframework.batch.item.file.transform.FieldSetFactory) ,public transient void setNames(java.lang.String[]) ,public void setStrict(boolean) ,public org.springframework.batch.item.file.transform.FieldSet tokenize(java.lang.String) <variables>private final java.lang.String emptyToken,private org.springframework.batch.item.file.transform.FieldSetFactory fieldSetFactory,protected java.lang.String[] names,private boolean strict