Skip to content

Commit

Permalink
improved logging
Browse files Browse the repository at this point in the history
  • Loading branch information
jruaux committed Mar 4, 2021
1 parent c90ca6d commit 6ca98aa
Show file tree
Hide file tree
Showing 61 changed files with 1,162 additions and 1,190 deletions.
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ subprojects {
implementation project(':core')
testImplementation project(':test')
implementation 'org.slf4j:slf4j-jdk14'
implementation 'org.slf4j:log4j-over-slf4j'
}
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
package com.redislabs.riot.db;

import lombok.Data;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.NoArgsConstructor;
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;
import picocli.CommandLine.Option;

import javax.sql.DataSource;

@Data
public class DatabaseOptions {
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class DataSourceOptions {

@Option(names = "--driver", description = "Fully qualified name of the JDBC driver", paramLabel = "<class>")
private String driver;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,33 +2,36 @@

import com.redislabs.riot.AbstractExportCommand;
import com.redislabs.riot.processor.DataStructureMapItemProcessor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.job.flow.Flow;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.builder.JdbcBatchItemWriterBuilder;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import picocli.CommandLine.Command;
import picocli.CommandLine.Mixin;

import javax.sql.DataSource;
import java.util.Map;

@Slf4j
@Command(name = "export", description = "Export to a database")
public class DatabaseExportCommand extends AbstractExportCommand<Map<String, Object>> {

@Mixin
private DatabaseExportOptions options = new DatabaseExportOptions();
private DataSourceOptions dataSourceOptions = DataSourceOptions.builder().build();
@Mixin
private DatabaseExportOptions exportOptions = DatabaseExportOptions.builder().build();

@Override
protected Flow flow() throws Exception {
DataSource dataSource = options.dataSource();
JdbcBatchItemWriterBuilder<Map<String, Object>> builder = new JdbcBatchItemWriterBuilder<>();
builder.itemSqlParameterSourceProvider(MapSqlParameterSource::new);
builder.dataSource(dataSource);
builder.sql(options.getSql());
builder.assertUpdates(options.isAssertUpdates());
log.info("Creating data source {}", dataSourceOptions);
builder.dataSource(dataSourceOptions.dataSource());
builder.sql(exportOptions.getSql());
builder.assertUpdates(exportOptions.isAssertUpdates());
JdbcBatchItemWriter<Map<String, Object>> writer = builder.build();
writer.afterPropertiesSet();
DataStructureMapItemProcessor processor = DataStructureMapItemProcessor.builder().keyRegex(options.getKeyRegex()).build();
DataStructureMapItemProcessor processor = DataStructureMapItemProcessor.builder().keyRegex(exportOptions.getKeyRegex()).build();
return flow(step(processor, writer).build());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,10 @@
import picocli.CommandLine;

@Data
@EqualsAndHashCode(callSuper = true)
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class DatabaseExportOptions extends DatabaseOptions {
public class DatabaseExportOptions {

@CommandLine.Parameters(arity = "1", description = "SQL INSERT statement.", paramLabel = "SQL")
private String sql;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,34 +17,36 @@
public class DatabaseImportCommand extends AbstractImportCommand<Map<String, Object>, Map<String, Object>> {

@Mixin
private DatabaseImportOptions options = new DatabaseImportOptions();
private DataSourceOptions options = DataSourceOptions.builder().build();
@Mixin
private KeyValueProcessingOptions processingOptions = new KeyValueProcessingOptions();
private DatabaseImportOptions importOptions = DatabaseImportOptions.builder().build();
@Mixin
private KeyValueProcessingOptions processingOptions = KeyValueProcessingOptions.builder().build();

@Override
protected Flow flow() throws Exception {
DataSource dataSource = options.dataSource();
JdbcCursorItemReaderBuilder<Map<String, Object>> builder = new JdbcCursorItemReaderBuilder<>();
builder.saveState(false);
builder.dataSource(dataSource);
if (options.getFetchSize() != null) {
builder.fetchSize(options.getFetchSize());
if (importOptions.getFetchSize() != null) {
builder.fetchSize(importOptions.getFetchSize());
}
if (options.getMaxRows() != null) {
builder.maxRows(options.getMaxRows());
if (importOptions.getMaxRows() != null) {
builder.maxRows(importOptions.getMaxRows());
}
builder.name("database-reader");
if (options.getQueryTimeout() != null) {
builder.queryTimeout(options.getQueryTimeout());
if (importOptions.getQueryTimeout() != null) {
builder.queryTimeout(importOptions.getQueryTimeout());
}
builder.rowMapper(new ColumnMapRowMapper());
builder.sql(options.getSql());
builder.useSharedExtendedConnection(options.isUseSharedExtendedConnection());
builder.verifyCursorPosition(options.isVerifyCursorPosition());
builder.sql(importOptions.getSql());
builder.useSharedExtendedConnection(importOptions.isUseSharedExtendedConnection());
builder.verifyCursorPosition(importOptions.isVerifyCursorPosition());
JdbcCursorItemReader<Map<String, Object>> reader = builder.build();
reader.afterPropertiesSet();
String name = dataSource.getConnection().getMetaData().getDatabaseProductName();
return flow(step("Importing from " + name, reader).build());
return flow(step(name + "-db-import-step", "Importing from " + name, reader).build());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,10 @@
import picocli.CommandLine;

@Data
@EqualsAndHashCode(callSuper = true)
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class DatabaseImportOptions extends DatabaseOptions {
public class DatabaseImportOptions {

@CommandLine.Parameters(arity = "1", description = "SQL SELECT statement", paramLabel = "SQL")
private String sql;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@

import com.redislabs.riot.AbstractImportCommand;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.job.flow.Flow;
import org.springframework.batch.item.support.AbstractItemStreamItemReader;
Expand All @@ -15,16 +13,14 @@
import java.util.ArrayList;
import java.util.List;

@Slf4j
@Setter
@Command
public abstract class AbstractFileImportCommand<T> extends AbstractImportCommand<T, T> {

@CommandLine.Parameters(arity = "1..*", description = "One ore more files or URLs", paramLabel = "FILE")
private String[] files = new String[0];
@Getter
@CommandLine.Mixin
private FileOptions fileOptions = new FileOptions();
private FileOptions fileOptions = FileOptions.builder().build();

@Override
protected Flow flow() throws Exception {
Expand All @@ -39,7 +35,7 @@ protected Flow flow() throws Exception {
AbstractItemStreamItemReader<T> reader = reader(file, fileType, resource);
String name = FileUtils.filename(resource);
reader.setName(name);
steps.add(step("Importing file " + name, reader).build());
steps.add(step(name + "-file-import-step", "Importing " + name, reader).build());
}
return flow(steps.toArray(new Step[0]));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,15 @@
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.json.JsonItemReader;
import org.springframework.batch.item.redis.DataStructureItemWriter;
import org.springframework.batch.item.redis.support.CommandTimeoutBuilder;
import org.springframework.batch.item.redis.support.DataStructure;
import org.springframework.batch.item.support.AbstractItemStreamItemReader;
import org.springframework.batch.item.xml.XmlItemReader;
import org.springframework.core.io.Resource;
import picocli.CommandLine;
import picocli.CommandLine.Command;

import java.time.Duration;
import java.util.ArrayList;
import java.util.List;

Expand All @@ -32,7 +34,7 @@ public class DataStructureFileImportCommand extends AbstractTransferCommand<Data
private String[] files = new String[0];
@Getter
@CommandLine.Mixin
private FileOptions fileOptions = new FileOptions();
private FileOptions fileOptions = FileOptions.builder().build();

@Override
protected Flow flow() throws Exception {
Expand All @@ -43,27 +45,29 @@ protected Flow flow() throws Exception {
FileType fileType = FileUtils.fileType(file);
Resource resource = FileUtils.inputResource(file, fileOptions);
String name = FileUtils.filename(resource);
StepBuilder<DataStructure<String>, DataStructure<String>> step = stepBuilder("Importing file " + name);
AbstractItemStreamItemReader<DataStructure<String>> reader = reader(file, fileType, resource);
AbstractItemStreamItemReader<DataStructure<String>> reader = reader(fileType, resource);
reader.setName(name);
StepBuilder<DataStructure<String>, DataStructure<String>> step = stepBuilder(name + "-datastructure-file-import-step", "Importing " + name);
steps.add(step.reader(reader).processor(processor).writer(writer()).build().build());
}
return flow(steps.toArray(new Step[0]));
}

private ItemWriter<DataStructure<String>> writer() {
if (isCluster()) {
return DataStructureItemWriter.clusterBuilder((GenericObjectPool<StatefulRedisClusterConnection<String, String>>) pool).commandTimeout(getCommandTimeout()).build();
return configureCommandTimeoutBuilder(DataStructureItemWriter.clusterBuilder((GenericObjectPool<StatefulRedisClusterConnection<String, String>>) pool)).build();
}
return DataStructureItemWriter.builder((GenericObjectPool<StatefulRedisConnection<String, String>>) pool).commandTimeout(getCommandTimeout()).build();
return configureCommandTimeoutBuilder(DataStructureItemWriter.builder((GenericObjectPool<StatefulRedisConnection<String, String>>) pool)).build();
}

@SuppressWarnings({"unchecked", "rawtypes"})
protected AbstractItemStreamItemReader<DataStructure<String>> reader(String file, FileType fileType, Resource resource) {
protected AbstractItemStreamItemReader<DataStructure<String>> reader(FileType fileType, Resource resource) {
switch (fileType) {
case JSON:
log.info("Creating JSON data structure reader for file {}", resource);
return (JsonItemReader) FileUtils.jsonReader(resource, DataStructure.class);
case XML:
log.info("Creating XML data structure reader for file {}", resource);
return (XmlItemReader) FileUtils.xmlReader(resource, DataStructure.class);
}
throw new IllegalArgumentException("Unsupported file type: " + fileType);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import com.redislabs.riot.AbstractExportCommand;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.job.flow.Flow;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.json.JacksonJsonObjectMarshaller;
Expand All @@ -17,16 +17,17 @@

import java.io.IOException;

@Setter
@Slf4j
@Command(name = "export", description = "Export Redis data to a JSON or XML file")
public class FileExportCommand extends AbstractExportCommand<DataStructure<String>> {

@SuppressWarnings("unused")
@CommandLine.Parameters(arity = "1", description = "File path or URL", paramLabel = "FILE")
protected String file;
private String file;
@Mixin
private FileExportOptions options = new FileExportOptions();
private FileExportOptions exportOptions = FileExportOptions.builder().build();
@Mixin
private FileOptions fileOptions = new FileOptions();
private FileOptions fileOptions = FileOptions.builder().build();

@Override
protected Flow flow() throws Exception {
Expand All @@ -40,23 +41,25 @@ private ItemWriter<DataStructure<String>> writer() throws IOException {
case JSON:
JsonResourceItemWriterBuilder<DataStructure<String>> jsonWriterBuilder = new JsonResourceItemWriterBuilder<>();
jsonWriterBuilder.name("json-resource-item-writer");
jsonWriterBuilder.append(options.isAppend());
jsonWriterBuilder.append(exportOptions.isAppend());
jsonWriterBuilder.encoding(fileOptions.getEncoding());
jsonWriterBuilder.jsonObjectMarshaller(new JacksonJsonObjectMarshaller<>());
jsonWriterBuilder.lineSeparator(options.getLineSeparator());
jsonWriterBuilder.lineSeparator(exportOptions.getLineSeparator());
jsonWriterBuilder.resource(resource);
jsonWriterBuilder.saveState(false);
log.info("Creating JSON writer with {} for file {}", exportOptions, file);
return jsonWriterBuilder.build();
case XML:
XmlResourceItemWriterBuilder<DataStructure<String>> xmlWriterBuilder = new XmlResourceItemWriterBuilder<>();
xmlWriterBuilder.name("xml-resource-item-writer");
xmlWriterBuilder.append(options.isAppend());
xmlWriterBuilder.append(exportOptions.isAppend());
xmlWriterBuilder.encoding(fileOptions.getEncoding());
xmlWriterBuilder.xmlObjectMarshaller(xmlMarshaller());
xmlWriterBuilder.lineSeparator(options.getLineSeparator());
xmlWriterBuilder.rootName(options.getRootName());
xmlWriterBuilder.lineSeparator(exportOptions.getLineSeparator());
xmlWriterBuilder.rootName(exportOptions.getRootName());
xmlWriterBuilder.resource(resource);
xmlWriterBuilder.saveState(false);
log.info("Creating XML writer with {} for file {}", exportOptions, file);
return xmlWriterBuilder.build();
default:
throw new IllegalArgumentException("Unsupported file type: " + fileType);
Expand All @@ -65,7 +68,7 @@ private ItemWriter<DataStructure<String>> writer() throws IOException {

private JsonObjectMarshaller<DataStructure<String>> xmlMarshaller() {
XmlMapper mapper = new XmlMapper();
mapper.setConfig(mapper.getSerializationConfig().withRootName(options.getElementName()));
mapper.setConfig(mapper.getSerializationConfig().withRootName(exportOptions.getElementName()));
JacksonJsonObjectMarshaller<DataStructure<String>> marshaller = new JacksonJsonObjectMarshaller<>();
marshaller.setObjectMapper(mapper);
return marshaller;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,18 +1,27 @@
package com.redislabs.riot.file;

import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.batch.item.file.FlatFileItemWriter;
import picocli.CommandLine;

@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class FileExportOptions {

@CommandLine.Option(names = "--append", description = "Append to file if it exists")
private boolean append;
@Builder.Default
@CommandLine.Option(names = "--root", description = "XML root element tag name (default: ${DEFAULT-VALUE})", paramLabel = "<string>")
private String rootName = "root";
@Builder.Default
@CommandLine.Option(names = "--element", description = "XML element tag name (default: ${DEFAULT-VALUE})", paramLabel = "<string>")
private String elementName = "record";
@Builder.Default
@CommandLine.Option(names = "--line-sep", description = "String to separate lines (default: system default)", paramLabel = "<string>")
private String lineSeparator = FlatFileItemWriter.DEFAULT_LINE_SEPARATOR;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package com.redislabs.riot.file;

import lombok.Data;
import lombok.*;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.file.transform.Range;
import picocli.CommandLine.Option;
Expand All @@ -9,8 +9,13 @@
import java.util.List;

@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class FileImportOptions {

@Getter
@Builder.Default
@Option(names = "--fields", arity = "1..*", description = "Delimited/FW field names", paramLabel = "<names>")
private List<String> names = new ArrayList<>();
@Option(names = {"-h", "--header"}, description = "Delimited/FW first line contains field names")
Expand All @@ -19,12 +24,20 @@ public class FileImportOptions {
private String delimiter;
@Option(names = "--skip", description = "Delimited/FW lines to skip at start", paramLabel = "<count>")
private Integer linesToSkip;
@Getter
@Builder.Default
@Option(names = "--include", arity = "1..*", description = "Delimited/FW field indices to include (0-based)", paramLabel = "<index>")
private List<Integer> includedFields = new ArrayList<>();
@Getter
@Builder.Default
@Option(names = "--ranges", arity = "1..*", description = "Fixed-width column ranges", paramLabel = "<int>")
private List<Range> columnRanges = new ArrayList<>();
@Getter
@Builder.Default
@Option(names = "--quote", description = "Escape character for delimited files (default: ${DEFAULT-VALUE})", paramLabel = "<char>")
private Character quoteCharacter = DelimitedLineTokenizer.DEFAULT_QUOTE_CHARACTER;
@Getter
@Builder.Default
@Option(names = "--continuation", description = "Line continuation string (default: ${DEFAULT-VALUE})", paramLabel = "<string>")
private String continuationString = "\\";

Expand Down
Loading

0 comments on commit 6ca98aa

Please sign in to comment.