Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/ant 2636 parquet avro #26

Draft
wants to merge 6 commits into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 68 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,12 @@
</scm>
<properties>
<java.version>21</java.version>
<maven.compiler.source>21</maven.compiler.source>
<maven.compiler.target>21</maven.compiler.target>
<parquet.version>1.15.0</parquet.version>
<avro.version>1.11.4</avro.version>
<hadoop.version>3.4.1</hadoop.version>

<open-api-doc.version>2.0.3</open-api-doc.version>
<sonar.organization>antaressimulatorteam</sonar.organization>
<sonar.projectKey>AntaresSimulatorTeam_antares-datamanager-back</sonar.projectKey>
Expand Down Expand Up @@ -162,6 +168,36 @@
<artifactId>junit-jupiter</artifactId>
<scope>test</scope>
</dependency>

<!-- Parquet Avro dependency -->
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
<version>${parquet.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>

</dependencies>

<build>
Expand Down Expand Up @@ -220,6 +256,38 @@
<artifactId>maven-surefire-plugin</artifactId>
<version>3.0.0-M9</version>
</plugin>

<!-- Avro Plugin -->
<plugin>
<groupId>org.apache.avro</groupId>
<artifactId>avro-maven-plugin</artifactId>
<version>${avro.version}</version>
<executions>
<execution>
<phase>generate-sources</phase>
<goals>
<goal>idl-protocol</goal>
<goal>schema</goal>
</goals>
</execution>
</executions>
<configuration>
<sourceDirectory>src/main/resources/avro</sourceDirectory>
<outputDirectory>${project.build.directory}/generated-sources</outputDirectory>
<stringType>String</stringType>
<fieldVisibility>PRIVATE</fieldVisibility>
</configuration>
</plugin>

<!-- <plugin>-->
<!-- <groupId>org.apache.maven.plugins</groupId>-->
<!-- <artifactId>maven-compiler-plugin</artifactId>-->
<!-- <version>${maven-compiler-plugin.version}</version>-->
<!-- <configuration>-->
<!-- <source>${maven.compiler.source}</source>-->
<!-- <target>${maven.compiler.target}</target>-->
<!-- </configuration>-->
<!-- </plugin>-->
</plugins>
</build>
<repositories>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
package com.rte_france.antares.datamanager_back.util;

import org.apache.parquet.avro.AvroParquetReader;
import org.apache.parquet.io.LocalInputFile;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Objects;

public class TimeSeriesReader {
public static TimeSeriesMatrix readFromParquet(Path filePath) throws IOException {
Objects.requireNonNull(filePath);
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

File.exists needs to be checked too

if (Files.notExists(filePath)) {
throw new IllegalArgumentException("File " + filePath + " doesn't exist");
}

var inputFile = new LocalInputFile(filePath);
try (var reader = AvroParquetReader.<TimeSeriesMatrix>builder(inputFile).build()) {
var matrix = reader.read();
if (matrix == null) {
throw new IOException("The Parquet file is empty or does not contain a TimeSeriesMatrix");
}
return matrix;
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
package com.rte_france.antares.datamanager_back.util;

import org.apache.parquet.avro.AvroParquetWriter;
import org.apache.parquet.hadoop.ParquetFileWriter;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.io.LocalOutputFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Objects;
import java.util.stream.Collectors;

public class TimeSeriesWriter {
private static final Logger LOGGER = LoggerFactory.getLogger(TimeSeriesWriter.class);

public static TimeSeriesMatrix readFromTxt(Path filePath) throws IOException {
Objects.requireNonNull(filePath);
try (var lines = Files.lines(filePath)) {
var rows = lines.map(line -> {
var values = line.trim().split("\\s+");
var doubles = Arrays.stream(values)
.map(Double::parseDouble)
.toList();
return new TimeSeriesRow(doubles);
}).collect(Collectors.toList());
return new TimeSeriesMatrix(rows);
}
}

public static void writeToParquet(TimeSeriesMatrix matrix, Path outputPath) throws IOException {
Objects.requireNonNull(matrix);
Objects.requireNonNull(outputPath);
if (!outputPath.toString().endsWith(".parquet")) {
outputPath = outputPath.resolveSibling(outputPath.getFileName() + ".parquet");
}

var outputFile = new LocalOutputFile(outputPath);
try (var writer = AvroParquetWriter
.<TimeSeriesMatrix>builder(outputFile)
.withSchema(TimeSeriesMatrix.getClassSchema())
.withCompressionCodec(CompressionCodecName.ZSTD)
.withByteStreamSplitEncoding(true)
.withRowGroupSize((long) ParquetWriter.DEFAULT_BLOCK_SIZE)
.withPageSize(ParquetWriter.DEFAULT_PAGE_SIZE)
.withWriteMode(ParquetFileWriter.Mode.OVERWRITE)
.build()) {

writer.write(matrix);
}
}

public static void main(String[] args) {
try {
var matrix = TimeSeriesWriter.readFromTxt(Path.of("src/main/resources/INPUT/load/load_fr_2030-2031.txt"));

var startSerialization = System.nanoTime();
var parquetFilePath = Path.of("src/main/resources/INPUT/load/output_test.parquet");
TimeSeriesWriter.writeToParquet(matrix, parquetFilePath);
var endSerialization = System.nanoTime();
var serializationTime = (endSerialization - startSerialization) / 1_000_000_000.0;
var fileSize = Files.size(parquetFilePath);

var startDeserialization = System.nanoTime();
var deserializedMatrix = TimeSeriesReader.readFromParquet(parquetFilePath);
var endDeserialization = System.nanoTime();
var deserializationTime = (endDeserialization - startDeserialization) / 1_000_000_000.0;

LOGGER.info("Serialization time: {}", serializationTime);
LOGGER.info("Deserialization time: {}", deserializationTime);
LOGGER.info(".parquet file size (bytes): {}", fileSize);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
10 changes: 10 additions & 0 deletions src/main/resources/avro/TimeSeriesMatrix.avdl
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
@namespace("com.rte_france.antares.datamanager_back.util")
protocol TimeSeriesMatrixProtocol {
record TimeSeriesMatrix {
array<TimeSeriesRow> rows;
}

record TimeSeriesRow {
array<double> columns;
}
}