Skip to content

Commit 76a5d12

Browse files
committed
Updated to files to fit the Standard.
1 parent 39908bb commit 76a5d12

File tree

200 files changed

+2848
-4134
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

200 files changed

+2848
-4134
lines changed

spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/RangeConverter.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,9 +51,8 @@ else if (columns.length == 2) {
5151
return new Range(start, end);
5252
}
5353
else {
54-
throw new IllegalArgumentException(String.format(
55-
"%s is in an illegal format. Ranges must be specified as startIndex-endIndex",
56-
source));
54+
throw new IllegalArgumentException(String
55+
.format("%s is in an illegal format. Ranges must be specified as startIndex-endIndex", source));
5756
}
5857
}
5958

spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/SingleStepJobAutoConfiguration.java

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -57,9 +57,8 @@ public class SingleStepJobAutoConfiguration {
5757
@Autowired(required = false)
5858
private ItemProcessor<Map<String, Object>, Map<String, Object>> itemProcessor;
5959

60-
public SingleStepJobAutoConfiguration(JobBuilderFactory jobBuilderFactory,
61-
StepBuilderFactory stepBuilderFactory, SingleStepJobProperties properties,
62-
ApplicationContext context) {
60+
public SingleStepJobAutoConfiguration(JobBuilderFactory jobBuilderFactory, StepBuilderFactory stepBuilderFactory,
61+
SingleStepJobProperties properties, ApplicationContext context) {
6362

6463
validateProperties(properties);
6564

@@ -72,27 +71,23 @@ private void validateProperties(SingleStepJobProperties properties) {
7271
Assert.hasText(properties.getJobName(), "A job name is required");
7372
Assert.hasText(properties.getStepName(), "A step name is required");
7473
Assert.notNull(properties.getChunkSize(), "A chunk size is required");
75-
Assert.isTrue(properties.getChunkSize() > 0,
76-
"A chunk size greater than zero is required");
74+
Assert.isTrue(properties.getChunkSize() > 0, "A chunk size greater than zero is required");
7775
}
7876

7977
@Bean
8078
@ConditionalOnMissingBean
8179
@ConditionalOnProperty(prefix = "spring.batch.job", name = "job-name")
82-
public Job job(ItemReader<Map<String, Object>> itemReader,
83-
ItemWriter<Map<String, Object>> itemWriter) {
80+
public Job job(ItemReader<Map<String, Object>> itemReader, ItemWriter<Map<String, Object>> itemWriter) {
8481

8582
SimpleStepBuilder<Map<String, Object>, Map<String, Object>> stepBuilder = this.stepBuilderFactory
8683
.get(this.properties.getStepName())
87-
.<Map<String, Object>, Map<String, Object>>chunk(
88-
this.properties.getChunkSize())
89-
.reader(itemReader);
84+
.<Map<String, Object>, Map<String, Object>>chunk(this.properties.getChunkSize()).reader(itemReader);
9085

9186
stepBuilder.processor(this.itemProcessor);
9287

9388
Step step = stepBuilder.writer(itemWriter).build();
9489

95-
return this.jobBuilderFactory.get(this.properties.getJobName()).start(step)
96-
.build();
90+
return this.jobBuilderFactory.get(this.properties.getJobName()).start(step).build();
9791
}
92+
9893
}

spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/flatfile/FlatFileItemReaderAutoConfiguration.java

Lines changed: 15 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -58,47 +58,36 @@ public FlatFileItemReaderAutoConfiguration(FlatFileItemReaderProperties properti
5858
@Bean
5959
@ConditionalOnMissingBean
6060
@ConditionalOnProperty(prefix = "spring.batch.job.flatfileitemreader", name = "name")
61-
public FlatFileItemReader<Map<String, Object>> itemReader(
62-
@Autowired(required = false) LineTokenizer lineTokenizer,
63-
@Autowired(required = false) FieldSetMapper<Map<String, Object>> fieldSetMapper,
64-
@Autowired(required = false) LineMapper<Map<String, Object>> lineMapper,
65-
@Autowired(required = false) LineCallbackHandler skippedLinesCallback,
66-
@Autowired(required = false) RecordSeparatorPolicy recordSeparatorPolicy) {
61+
public FlatFileItemReader<Map<String, Object>> itemReader(@Autowired(required = false) LineTokenizer lineTokenizer,
62+
@Autowired(required = false) FieldSetMapper<Map<String, Object>> fieldSetMapper,
63+
@Autowired(required = false) LineMapper<Map<String, Object>> lineMapper,
64+
@Autowired(required = false) LineCallbackHandler skippedLinesCallback,
65+
@Autowired(required = false) RecordSeparatorPolicy recordSeparatorPolicy) {
6766
FlatFileItemReaderBuilder<Map<String, Object>> mapFlatFileItemReaderBuilder = new FlatFileItemReaderBuilder<Map<String, Object>>()
6867
.name(this.properties.getName()).resource(this.properties.getResource())
69-
.saveState(this.properties.isSaveState())
70-
.maxItemCount(this.properties.getMaxItemCount())
71-
.currentItemCount(this.properties.getCurrentItemCount())
72-
.strict(this.properties.isStrict())
73-
.encoding(this.properties.getEncoding())
74-
.linesToSkip(this.properties.getLinesToSkip())
75-
.comments(this.properties.getComments()
76-
.toArray(new String[this.properties.getComments().size()]));
68+
.saveState(this.properties.isSaveState()).maxItemCount(this.properties.getMaxItemCount())
69+
.currentItemCount(this.properties.getCurrentItemCount()).strict(this.properties.isStrict())
70+
.encoding(this.properties.getEncoding()).linesToSkip(this.properties.getLinesToSkip())
71+
.comments(this.properties.getComments().toArray(new String[this.properties.getComments().size()]));
7772

7873
mapFlatFileItemReaderBuilder.lineTokenizer(lineTokenizer);
7974
if (recordSeparatorPolicy != null) {
80-
mapFlatFileItemReaderBuilder
81-
.recordSeparatorPolicy(recordSeparatorPolicy);
75+
mapFlatFileItemReaderBuilder.recordSeparatorPolicy(recordSeparatorPolicy);
8276
}
8377
mapFlatFileItemReaderBuilder.fieldSetMapper(fieldSetMapper);
8478
mapFlatFileItemReaderBuilder.lineMapper(lineMapper);
8579
mapFlatFileItemReaderBuilder.skippedLinesCallback(skippedLinesCallback);
8680

8781
if (this.properties.isDelimited()) {
88-
mapFlatFileItemReaderBuilder.delimited()
89-
.quoteCharacter(this.properties.getQuoteCharacter())
82+
mapFlatFileItemReaderBuilder.delimited().quoteCharacter(this.properties.getQuoteCharacter())
9083
.delimiter(this.properties.getDelimiter())
91-
.includedFields(
92-
this.properties.getIncludedFields().toArray(new Integer[0]))
93-
.names(this.properties.getNames())
94-
.beanMapperStrict(this.properties.isParsingStrict())
84+
.includedFields(this.properties.getIncludedFields().toArray(new Integer[0]))
85+
.names(this.properties.getNames()).beanMapperStrict(this.properties.isParsingStrict())
9586
.fieldSetMapper(new MapFieldSetMapper());
9687
}
9788
else if (this.properties.isFixedLength()) {
98-
mapFlatFileItemReaderBuilder.fixedLength()
99-
.columns(this.properties.getRanges().toArray(new Range[0]))
100-
.names(this.properties.getNames())
101-
.fieldSetMapper(new MapFieldSetMapper())
89+
mapFlatFileItemReaderBuilder.fixedLength().columns(this.properties.getRanges().toArray(new Range[0]))
90+
.names(this.properties.getNames()).fieldSetMapper(new MapFieldSetMapper())
10291
.beanMapperStrict(this.properties.isParsingStrict());
10392
}
10493

spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/flatfile/FlatFileItemReaderProperties.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,8 @@ public class FlatFileItemReaderProperties {
7373
private boolean strict = true;
7474

7575
/**
76-
* Configure the encoding used by the reader to read the input source. The default value
77-
* is {@link FlatFileItemReader#DEFAULT_CHARSET}.
76+
* Configure the encoding used by the reader to read the input source. The default
77+
* value is {@link FlatFileItemReader#DEFAULT_CHARSET}.
7878
*/
7979
private String encoding = FlatFileItemReader.DEFAULT_CHARSET;
8080

spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/flatfile/FlatFileItemWriterAutoConfiguration.java

Lines changed: 16 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -71,53 +71,45 @@ public FlatFileItemWriterAutoConfiguration(FlatFileItemWriterProperties properti
7171
public FlatFileItemWriter<Map<String, Object>> itemWriter() {
7272

7373
if (this.properties.isDelimited() && this.properties.isFormatted()) {
74-
throw new IllegalStateException(
75-
"An output file must be either delimited or formatted or a custom "
76-
+ "LineAggregator must be provided. Your current configuration specifies both delimited and formatted");
74+
throw new IllegalStateException("An output file must be either delimited or formatted or a custom "
75+
+ "LineAggregator must be provided. Your current configuration specifies both delimited and formatted");
7776
}
78-
else if ((this.properties.isFormatted() || this.properties.isDelimited())
79-
&& this.lineAggregator != null) {
80-
throw new IllegalStateException("A LineAggregator must be configured if the "
81-
+ "output is not formatted or delimited");
77+
else if ((this.properties.isFormatted() || this.properties.isDelimited()) && this.lineAggregator != null) {
78+
throw new IllegalStateException(
79+
"A LineAggregator must be configured if the " + "output is not formatted or delimited");
8280
}
8381

8482
FlatFileItemWriterBuilder<Map<String, Object>> builder = new FlatFileItemWriterBuilder<Map<String, Object>>()
8583
.name(this.properties.getName()).resource((WritableResource) this.properties.getResource())
86-
.append(this.properties.isAppend())
87-
.encoding(this.properties.getEncoding())
88-
.forceSync(this.properties.isForceSync())
89-
.lineSeparator(this.properties.getLineSeparator())
90-
.saveState(this.properties.isSaveState())
91-
.shouldDeleteIfEmpty(this.properties.isShouldDeleteIfEmpty())
84+
.append(this.properties.isAppend()).encoding(this.properties.getEncoding())
85+
.forceSync(this.properties.isForceSync()).lineSeparator(this.properties.getLineSeparator())
86+
.saveState(this.properties.isSaveState()).shouldDeleteIfEmpty(this.properties.isShouldDeleteIfEmpty())
9287
.shouldDeleteIfExists(this.properties.isShouldDeleteIfExists())
93-
.transactional(this.properties.isTransactional())
94-
.headerCallback(this.headerCallback).footerCallback(this.footerCallback);
88+
.transactional(this.properties.isTransactional()).headerCallback(this.headerCallback)
89+
.footerCallback(this.footerCallback);
9590

9691
if (this.properties.isDelimited()) {
97-
FlatFileItemWriterBuilder.DelimitedBuilder<Map<String, Object>> delimitedBuilder = builder
98-
.delimited().delimiter(this.properties.getDelimiter());
92+
FlatFileItemWriterBuilder.DelimitedBuilder<Map<String, Object>> delimitedBuilder = builder.delimited()
93+
.delimiter(this.properties.getDelimiter());
9994

10095
if (this.fieldExtractor != null) {
10196
delimitedBuilder.fieldExtractor(this.fieldExtractor);
10297
}
10398
else {
104-
delimitedBuilder.fieldExtractor(
105-
new MapFieldExtractor(this.properties.getNames()));
99+
delimitedBuilder.fieldExtractor(new MapFieldExtractor(this.properties.getNames()));
106100
}
107101
}
108102
else if (this.properties.isFormatted()) {
109-
FlatFileItemWriterBuilder.FormattedBuilder<Map<String, Object>> formattedBuilder = builder
110-
.formatted().format(this.properties.getFormat())
111-
.locale(this.properties.getLocale())
103+
FlatFileItemWriterBuilder.FormattedBuilder<Map<String, Object>> formattedBuilder = builder.formatted()
104+
.format(this.properties.getFormat()).locale(this.properties.getLocale())
112105
.maximumLength(this.properties.getMaximumLength())
113106
.minimumLength(this.properties.getMinimumLength());
114107

115108
if (this.fieldExtractor != null) {
116109
formattedBuilder.fieldExtractor(this.fieldExtractor);
117110
}
118111
else {
119-
formattedBuilder.fieldExtractor(
120-
new MapFieldExtractor(this.properties.getNames()));
112+
formattedBuilder.fieldExtractor(new MapFieldExtractor(this.properties.getNames()));
121113
}
122114
}
123115
else if (this.lineAggregator != null) {

spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/flatfile/FlatFileItemWriterProperties.java

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,8 @@ public class FlatFileItemWriterProperties {
3737
private Resource resource;
3838

3939
/**
40-
* Configure the use of the {@code DelimitedLineAggregator} to generate the output per item.
41-
* Default is {@code false}.
40+
* Configure the use of the {@code DelimitedLineAggregator} to generate the output per
41+
* item. Default is {@code false}.
4242
*/
4343
private boolean delimited;
4444

@@ -65,7 +65,7 @@ public class FlatFileItemWriterProperties {
6565

6666
/**
6767
* Configure the minimum record length.
68-
*/
68+
*/
6969
private int minimumLength = 0;
7070

7171
/**
@@ -74,12 +74,14 @@ public class FlatFileItemWriterProperties {
7474
private String delimiter = ",";
7575

7676
/**
77-
* File encoding for the output file. Defaults to {@code FlatFileItemWriter.DEFAULT_CHARSET})
77+
* File encoding for the output file. Defaults to
78+
* {@code FlatFileItemWriter.DEFAULT_CHARSET})
7879
*/
7980
private String encoding = FlatFileItemWriter.DEFAULT_CHARSET;
8081

8182
/**
82-
* A flag indicating that changes should be force-synced to disk on flush. Defaults to {@code false}.
83+
* A flag indicating that changes should be force-synced to disk on flush. Defaults to
84+
* {@code false}.
8385
*/
8486
private boolean forceSync = false;
8587

@@ -89,12 +91,14 @@ public class FlatFileItemWriterProperties {
8991
private String[] names;
9092

9193
/**
92-
* Configure if the output file is found if it should be appended to. Defaults to {@code false}.
94+
* Configure if the output file is found if it should be appended to. Defaults to
95+
* {@code false}.
9396
*/
9497
private boolean append = false;
9598

9699
/**
97-
* String used to separate lines in output. Defaults to the {@code System} property {@code line.separator}.
100+
* String used to separate lines in output. Defaults to the {@code System} property
101+
* {@code line.separator}.
98102
*/
99103
private String lineSeparator = FlatFileItemWriter.DEFAULT_LINE_SEPARATOR;
100104

@@ -117,12 +121,14 @@ public class FlatFileItemWriterProperties {
117121
private boolean shouldDeleteIfEmpty = false;
118122

119123
/**
120-
* Indicates whether an existing output file should be deleted on startup. Defaults to {@code true}.
124+
* Indicates whether an existing output file should be deleted on startup. Defaults to
125+
* {@code true}.
121126
*/
122127
private boolean shouldDeleteIfExists = true;
123128

124129
/**
125-
* Indicates whether flushing the buffer should be delayed while a transaction is active. Defaults to {@code true}.
130+
* Indicates whether flushing the buffer should be delayed while a transaction is
131+
* active. Defaults to {@code true}.
126132
*/
127133
private boolean transactional = FlatFileItemWriter.DEFAULT_TRANSACTIONAL;
128134

spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/jdbc/JDBCSingleStepDataSourceAutoConfiguration.java

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,10 @@
2929
import org.springframework.context.annotation.Primary;
3030

3131
/**
32-
* Establishes the default {@link DataSource} for the Task when creating a {@link DataSource}
33-
* for {@link org.springframework.batch.item.database.JdbcCursorItemReader}
34-
* or {@link org.springframework.batch.item.database.JdbcBatchItemWriter}.
32+
* Establishes the default {@link DataSource} for the Task when creating a
33+
* {@link DataSource} for
34+
* {@link org.springframework.batch.item.database.JdbcCursorItemReader} or
35+
* {@link org.springframework.batch.item.database.JdbcBatchItemWriter}.
3536
*
3637
* @author Glenn Renfro
3738
* @since 3.0
@@ -44,7 +45,8 @@ public TaskConfigurer myTaskConfigurer(DataSource dataSource) {
4445
return new DefaultTaskConfigurer(dataSource);
4546
}
4647

47-
@ConditionalOnProperty(prefix = "spring.batch.job.jdbcsinglestep.datasource", name = "enable", havingValue = "true", matchIfMissing = true)
48+
@ConditionalOnProperty(prefix = "spring.batch.job.jdbcsinglestep.datasource", name = "enable", havingValue = "true",
49+
matchIfMissing = true)
4850
@ConditionalOnMissingBean(name = "springDataSourceProperties")
4951
@Bean(name = "springDataSourceProperties")
5052
@ConfigurationProperties("spring.datasource")
@@ -53,11 +55,14 @@ public DataSourceProperties springDataSourceProperties() {
5355
return new DataSourceProperties();
5456
}
5557

56-
@ConditionalOnProperty(prefix = "spring.batch.job.jdbcsinglestep.datasource", name = "enable", havingValue = "true", matchIfMissing = true)
58+
@ConditionalOnProperty(prefix = "spring.batch.job.jdbcsinglestep.datasource", name = "enable", havingValue = "true",
59+
matchIfMissing = true)
5760
@Bean(name = "springDataSource")
5861
@Primary
59-
public DataSource dataSource(@Qualifier("springDataSourceProperties")DataSourceProperties springDataSourceProperties) {
60-
DataSource dataSource = springDataSourceProperties.initializeDataSourceBuilder().build();
62+
public DataSource dataSource(
63+
@Qualifier("springDataSourceProperties") DataSourceProperties springDataSourceProperties) {
64+
DataSource dataSource = springDataSourceProperties.initializeDataSourceBuilder().build();
6165
return dataSource;
6266
}
67+
6368
}

spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/jdbc/JdbcBatchItemWriterAutoConfiguration.java

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,7 @@
5454
@Import(JDBCSingleStepDataSourceAutoConfiguration.class)
5555
public class JdbcBatchItemWriterAutoConfiguration {
5656

57-
private static final Log logger = LogFactory
58-
.getLog(JdbcBatchItemWriterAutoConfiguration.class);
57+
private static final Log logger = LogFactory.getLog(JdbcBatchItemWriterAutoConfiguration.class);
5958

6059
@Autowired(required = false)
6160
private ItemPreparedStatementSetter itemPreparedStatementSetter;
@@ -70,8 +69,7 @@ public class JdbcBatchItemWriterAutoConfiguration {
7069

7170
private DataSource dataSource;
7271

73-
public JdbcBatchItemWriterAutoConfiguration(DataSource dataSource,
74-
JdbcBatchItemWriterProperties properties) {
72+
public JdbcBatchItemWriterAutoConfiguration(DataSource dataSource, JdbcBatchItemWriterProperties properties) {
7573
this.dataSource = dataSource;
7674
this.properties = properties;
7775
}
@@ -91,12 +89,10 @@ public JdbcBatchItemWriter<Map<String, Object>> itemWriter() {
9189
JdbcBatchItemWriterBuilder<Map<String, Object>> jdbcBatchItemWriterBuilder = new JdbcBatchItemWriterBuilder<Map<String, Object>>()
9290
.dataSource(writerDataSource).sql(this.properties.getSql());
9391
if (this.itemPreparedStatementSetter != null) {
94-
jdbcBatchItemWriterBuilder
95-
.itemPreparedStatementSetter(this.itemPreparedStatementSetter);
92+
jdbcBatchItemWriterBuilder.itemPreparedStatementSetter(this.itemPreparedStatementSetter);
9693
}
9794
else if (this.itemSqlParameterSourceProvider != null) {
98-
jdbcBatchItemWriterBuilder
99-
.itemSqlParameterSourceProvider(this.itemSqlParameterSourceProvider);
95+
jdbcBatchItemWriterBuilder.itemSqlParameterSourceProvider(this.itemSqlParameterSourceProvider);
10096
}
10197
else {
10298
jdbcBatchItemWriterBuilder.columnMapped();
@@ -105,17 +101,21 @@ else if (this.itemSqlParameterSourceProvider != null) {
105101
return jdbcBatchItemWriterBuilder.build();
106102
}
107103

108-
@ConditionalOnProperty(prefix = "spring.batch.job.jdbcbatchitemwriter.datasource", name = "enable", havingValue = "true")
104+
@ConditionalOnProperty(prefix = "spring.batch.job.jdbcbatchitemwriter.datasource", name = "enable",
105+
havingValue = "true")
109106
@Bean(name = "jdbcBatchItemWriterDataSourceProperties")
110107
@ConfigurationProperties("jdbcbatchitemwriter.datasource")
111108
public DataSourceProperties jdbcBatchItemWriterDataSourceProperties() {
112109
return new DataSourceProperties();
113110
}
114111

115-
@ConditionalOnProperty(prefix = "spring.batch.job.jdbcbatchitemwriter.datasource", name = "enable", havingValue = "true")
112+
@ConditionalOnProperty(prefix = "spring.batch.job.jdbcbatchitemwriter.datasource", name = "enable",
113+
havingValue = "true")
116114
@Bean(name = "jdbcBatchItemWriterSpringDataSource")
117-
public DataSource writerDataSource(@Qualifier("jdbcBatchItemWriterDataSourceProperties") DataSourceProperties writerDataSourceProperties) {
118-
DataSource result = writerDataSourceProperties.initializeDataSourceBuilder().build();
115+
public DataSource writerDataSource(
116+
@Qualifier("jdbcBatchItemWriterDataSourceProperties") DataSourceProperties writerDataSourceProperties) {
117+
DataSource result = writerDataSourceProperties.initializeDataSourceBuilder().build();
119118
return result;
120119
}
120+
121121
}

spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/jdbc/JdbcBatchItemWriterProperties.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@ public class JdbcBatchItemWriterProperties {
3939
private String sql;
4040

4141
/**
42-
* If set to {@code true}, confirms that every insert results in the update of at least one
43-
* row in the database. Defaults to {@code true}.
42+
* If set to {@code true}, confirms that every insert results in the update of at
43+
* least one row in the database. Defaults to {@code true}.
4444
*/
4545
private boolean assertUpdates = true;
4646

0 commit comments

Comments
 (0)