Skip to content

Commit

Permalink
Revert "[core] Introduce file.compression (apache#914)"
Browse files Browse the repository at this point in the history
This reverts commit 9965bf0
  • Loading branch information
tsreaper committed Apr 17, 2023
1 parent de458dc commit f44345b
Show file tree
Hide file tree
Showing 19 changed files with 35 additions and 110 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@

import org.apache.paimon.fs.PositionOutputStream;

import javax.annotation.Nullable;

import java.io.IOException;

/** A factory to create {@link FormatWriter} for file. */
Expand All @@ -33,5 +35,9 @@ public interface FormatWriterFactory {
* @throws IOException Thrown if the writer cannot be opened, or if the output stream throws an
* exception.
*/
FormatWriter create(PositionOutputStream out, String compression) throws IOException;
FormatWriter create(PositionOutputStream out, @Nullable String compression) throws IOException;

default FormatWriter create(PositionOutputStream out) throws IOException {
return create(out, null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ public void testExtract() throws Exception {
FormatWriterFactory writerFactory = format.createWriterFactory(rowType);
Path path = new Path(tempDir.toString() + "/test");
PositionOutputStream out = new LocalFileIO().newOutputStream(path, false);
FormatWriter writer = writerFactory.create(out, "LZ4");
FormatWriter writer = writerFactory.create(out);

List<GenericRow> data = createData(rowType);
for (GenericRow row : data) {
Expand Down
12 changes: 0 additions & 12 deletions paimon-core/src/main/java/org/apache/paimon/CoreOptions.java
Original file line number Diff line number Diff line change
Expand Up @@ -113,14 +113,6 @@ public class CoreOptions implements Serializable {
+ "could be NONE, ZLIB, SNAPPY, LZO, LZ4, for parquet file format, the compression value could be "
+ "UNCOMPRESSED, SNAPPY, GZIP, LZO, BROTLI, LZ4, ZSTD.");

public static final ConfigOption<String> FILE_COMPRESSION =
key("file.compression")
.stringType()
.defaultValue("LZ4")
.withDescription(
"Default file compression format, can be overridden by "
+ FILE_COMPRESSION_PER_LEVEL.key());

public static final ConfigOption<FileFormatType> MANIFEST_FORMAT =
key("manifest.format")
.enumType(FileFormatType.class)
Expand Down Expand Up @@ -681,10 +673,6 @@ public Map<Integer, String> fileCompressionPerLevel() {
.collect(Collectors.toMap(e -> Integer.valueOf(e.getKey()), Map.Entry::getValue));
}

public String fileCompression() {
return options.get(FILE_COMPRESSION);
}

public int snapshotNumRetainMin() {
return options.get(SNAPSHOT_NUM_RETAINED_MIN);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@ public class AppendOnlyWriter implements RecordWriter<InternalRow> {
private final List<DataFileMeta> compactBefore;
private final List<DataFileMeta> compactAfter;
private final LongCounter seqNumCounter;
private final String fileCompression;

private RowDataRollingFileWriter writer;

Expand All @@ -75,8 +74,7 @@ public AppendOnlyWriter(
CompactManager compactManager,
boolean forceCompact,
DataFilePathFactory pathFactory,
@Nullable CommitIncrement increment,
String fileCompression) {
@Nullable CommitIncrement increment) {
this.fileIO = fileIO;
this.schemaId = schemaId;
this.fileFormat = fileFormat;
Expand All @@ -89,7 +87,6 @@ public AppendOnlyWriter(
this.compactBefore = new ArrayList<>();
this.compactAfter = new ArrayList<>();
this.seqNumCounter = new LongCounter(maxSequenceNumber + 1);
this.fileCompression = fileCompression;

this.writer = createRollingRowWriter();

Expand Down Expand Up @@ -172,8 +169,7 @@ private RowDataRollingFileWriter createRollingRowWriter() {
targetFileSize,
writeSchema,
pathFactory,
seqNumCounter,
fileCompression);
seqNumCounter);
}

private void trySyncLatestCompaction(boolean blocking)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

package org.apache.paimon.io;

import org.apache.paimon.CoreOptions;
import org.apache.paimon.KeyValue;
import org.apache.paimon.KeyValueSerializer;
import org.apache.paimon.annotation.VisibleForTesting;
Expand Down Expand Up @@ -47,7 +46,6 @@ public class KeyValueFileWriterFactory {
private final DataFilePathFactory pathFactory;
private final long suggestedFileSize;
private final Map<Integer, String> levelCompressions;
private final String fileCompression;

private KeyValueFileWriterFactory(
FileIO fileIO,
Expand All @@ -58,8 +56,7 @@ private KeyValueFileWriterFactory(
@Nullable FileStatsExtractor fileStatsExtractor,
DataFilePathFactory pathFactory,
long suggestedFileSize,
Map<Integer, String> levelCompressions,
String fileCompression) {
Map<Integer, String> levelCompressions) {
this.fileIO = fileIO;
this.schemaId = schemaId;
this.keyType = keyType;
Expand All @@ -69,7 +66,6 @@ private KeyValueFileWriterFactory(
this.pathFactory = pathFactory;
this.suggestedFileSize = suggestedFileSize;
this.levelCompressions = levelCompressions;
this.fileCompression = fileCompression;
}

public RowType keyType() {
Expand All @@ -92,11 +88,7 @@ public RollingFileWriter<KeyValue, DataFileMeta> createRollingMergeTreeFileWrite
}

private String getCompression(int level) {
if (null == levelCompressions) {
return fileCompression;
} else {
return levelCompressions.getOrDefault(level, fileCompression);
}
return null == levelCompressions ? null : levelCompressions.get(level);
}

public RollingFileWriter<KeyValue, DataFileMeta> createRollingChangelogFileWriter(int level) {
Expand Down Expand Up @@ -168,18 +160,6 @@ private Builder(

public KeyValueFileWriterFactory build(
BinaryRow partition, int bucket, Map<Integer, String> levelCompressions) {
return build(
partition,
bucket,
levelCompressions,
CoreOptions.FILE_COMPRESSION.defaultValue());
}

public KeyValueFileWriterFactory build(
BinaryRow partition,
int bucket,
Map<Integer, String> levelCompressions,
String fileCompression) {
RowType recordType = KeyValue.schema(keyType, valueType);
return new KeyValueFileWriterFactory(
fileIO,
Expand All @@ -190,8 +170,7 @@ public KeyValueFileWriterFactory build(
fileFormat.createStatsExtractor(recordType).orElse(null),
pathFactory.createDataFilePathFactory(partition, bucket),
suggestedFileSize,
levelCompressions,
fileCompression);
levelCompressions);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -51,16 +51,8 @@ public RowDataFileWriter(
RowType writeSchema,
@Nullable FileStatsExtractor fileStatsExtractor,
long schemaId,
LongCounter seqNumCounter,
String fileCompression) {
super(
fileIO,
factory,
path,
Function.identity(),
writeSchema,
fileStatsExtractor,
fileCompression);
LongCounter seqNumCounter) {
super(fileIO, factory, path, Function.identity(), writeSchema, fileStatsExtractor, null);
this.schemaId = schemaId;
this.seqNumCounter = seqNumCounter;
this.statsArraySerializer = new FieldStatsArraySerializer(writeSchema);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,7 @@ public RowDataRollingFileWriter(
long targetFileSize,
RowType writeSchema,
DataFilePathFactory pathFactory,
LongCounter seqNumCounter,
String fileCompression) {
LongCounter seqNumCounter) {
super(
() ->
new RowDataFileWriter(
Expand All @@ -46,8 +45,7 @@ public RowDataRollingFileWriter(
writeSchema,
fileFormat.createStatsExtractor(writeSchema).orElse(null),
schemaId,
seqNumCounter,
fileCompression),
seqNumCounter),
targetFileSize);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

package org.apache.paimon.manifest;

import org.apache.paimon.CoreOptions;
import org.apache.paimon.annotation.VisibleForTesting;
import org.apache.paimon.format.FieldStatsCollector;
import org.apache.paimon.format.FileFormat;
Expand Down Expand Up @@ -83,11 +82,7 @@ public long suggestedFileSize() {
public List<ManifestFileMeta> write(List<ManifestEntry> entries) {
RollingFileWriter<ManifestEntry, ManifestFileMeta> writer =
new RollingFileWriter<>(
() ->
new ManifestEntryWriter(
writerFactory,
pathFactory.newPath(),
CoreOptions.FILE_COMPRESSION.defaultValue()),
() -> new ManifestEntryWriter(writerFactory, pathFactory.newPath()),
suggestedFileSize);
try {
writer.write(entries);
Expand All @@ -107,8 +102,8 @@ private class ManifestEntryWriter extends SingleFileWriter<ManifestEntry, Manife
private long numDeletedFiles = 0;
private long schemaId = Long.MIN_VALUE;

ManifestEntryWriter(FormatWriterFactory factory, Path path, String fileCompression) {
super(ManifestFile.this.fileIO, factory, path, serializer::toRow, fileCompression);
ManifestEntryWriter(FormatWriterFactory factory, Path path) {
super(ManifestFile.this.fileIO, factory, path, serializer::toRow, null);

this.partitionStatsCollector = new FieldStatsCollector(partitionType);
this.partitionStatsSerializer = new FieldStatsArraySerializer(partitionType);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

package org.apache.paimon.manifest;

import org.apache.paimon.CoreOptions;
import org.apache.paimon.format.FileFormat;
import org.apache.paimon.format.FormatReaderFactory;
import org.apache.paimon.format.FormatWriter;
Expand Down Expand Up @@ -65,8 +64,7 @@ public String write(List<ManifestFileMeta> metas) {
Path path = pathFactory.newPath();
try {
try (PositionOutputStream out = fileIO.newOutputStream(path, false)) {
FormatWriter writer =
writerFactory.create(out, CoreOptions.FILE_COMPRESSION.defaultValue());
FormatWriter writer = writerFactory.create(out);
try {
for (ManifestFileMeta record : metas) {
writer.addElement(serializer.toRow(record));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@ public class AppendOnlyFileStoreWrite extends AbstractFileStoreWrite<InternalRow
private final boolean commitForceCompact;
private final boolean skipCompaction;
private final boolean assertDisorder;
private final String fileCompression;

public AppendOnlyFileStoreWrite(
FileIO fileIO,
Expand All @@ -89,7 +88,6 @@ public AppendOnlyFileStoreWrite(
this.commitForceCompact = options.commitForceCompact();
this.skipCompaction = options.writeOnly();
this.assertDisorder = options.toConfiguration().get(APPEND_ONLY_ASSERT_DISORDER);
this.fileCompression = options.fileCompression();
}

@Override
Expand All @@ -114,7 +112,6 @@ protected RecordWriter<InternalRow> createWriter(
targetFileSize,
compactRewriter(partition, bucket),
assertDisorder);

return new AppendOnlyWriter(
fileIO,
schemaId,
Expand All @@ -125,8 +122,7 @@ protected RecordWriter<InternalRow> createWriter(
compactManager,
commitForceCompact,
factory,
restoreIncrement,
fileCompression);
restoreIncrement);
}

private AppendOnlyCompactManager.CompactRewriter compactRewriter(
Expand All @@ -143,8 +139,7 @@ private AppendOnlyCompactManager.CompactRewriter compactRewriter(
targetFileSize,
rowType,
pathFactory.createDataFilePathFactory(partition, bucket),
new LongCounter(toCompact.get(0).minSequenceNumber()),
fileCompression);
new LongCounter(toCompact.get(0).minSequenceNumber()));
rewriter.write(
new RecordReaderIterator<>(
read.createReader(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,11 +135,7 @@ protected MergeTreeWriter createWriter(
}

KeyValueFileWriterFactory writerFactory =
writerFactoryBuilder.build(
partition,
bucket,
options.fileCompressionPerLevel(),
options.fileCompression());
writerFactoryBuilder.build(partition, bucket, options.fileCompressionPerLevel());
Comparator<InternalRow> keyComparator = keyComparatorSupplier.get();
Levels levels = new Levels(keyComparator, restoreFiles, options.numLevels());
UniversalCompaction universalCompaction =
Expand Down Expand Up @@ -198,11 +194,7 @@ private MergeTreeCompactRewriter createRewriter(
BinaryRow partition, int bucket, Comparator<InternalRow> keyComparator, Levels levels) {
KeyValueFileReaderFactory readerFactory = readerFactoryBuilder.build(partition, bucket);
KeyValueFileWriterFactory writerFactory =
writerFactoryBuilder.build(
partition,
bucket,
options.fileCompressionPerLevel(),
options.fileCompression());
writerFactoryBuilder.build(partition, bucket, options.fileCompressionPerLevel());
switch (options.changelogProducer()) {
case FULL_COMPACTION:
return new FullChangelogMergeTreeCompactRewriter(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,7 @@ public void testWriteRead(@TempDir java.nio.file.Path tempDir) throws IOExceptio
expected.add(GenericRow.of(2, 22));
expected.add(GenericRow.of(3, 33));
PositionOutputStream out = LocalFileIO.create().newOutputStream(path, false);
FormatWriter writer =
avro.createWriterFactory(rowType)
.create(out, CoreOptions.FILE_COMPRESSION.defaultValue());
FormatWriter writer = avro.createWriterFactory(rowType).create(out);
for (InternalRow row : expected) {
writer.addElement(row);
}
Expand All @@ -85,10 +83,7 @@ public void testUnsupportedOption(@TempDir java.nio.file.Path tempDir) {
Path path = new Path(tempDir.toUri().toString(), "1.avro");
Assertions.assertThrows(
RuntimeException.class,
() ->
writerFactory.create(
LocalFileIO.create().newOutputStream(path, false),
CoreOptions.FILE_COMPRESSION.defaultValue()),
() -> writerFactory.create(LocalFileIO.create().newOutputStream(path, false)),
"Unrecognized codec: _unsupported");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -316,8 +316,7 @@ private Pair<AppendOnlyWriter, List<DataFileMeta>> createWriter(
compactManager,
forceCompact,
pathFactory,
null,
CoreOptions.FILE_COMPRESSION.defaultValue());
null);
return Pair.of(writer, compactManager.allFiles());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

package org.apache.paimon.format;

import org.apache.paimon.CoreOptions;
import org.apache.paimon.append.AppendOnlyCompactManager;
import org.apache.paimon.append.AppendOnlyWriter;
import org.apache.paimon.data.BinaryString;
Expand Down Expand Up @@ -73,8 +72,7 @@ public void testFileSuffix(@TempDir java.nio.file.Path tempDir) throws Exception
null, toCompact, 4, 10, 10, null, false), // not used
false,
dataFilePathFactory,
null,
CoreOptions.FILE_COMPRESSION.defaultValue());
null);
appendOnlyWriter.write(
GenericRow.of(1, BinaryString.fromString("aaa"), BinaryString.fromString("1")));
CommitIncrement increment = appendOnlyWriter.prepareCommit(true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

package org.apache.paimon.format;

import org.apache.paimon.CoreOptions;
import org.apache.paimon.data.InternalRow;
import org.apache.paimon.options.Options;
import org.apache.paimon.predicate.Predicate;
Expand Down Expand Up @@ -48,11 +47,7 @@ public FormatReaderFactory createReaderFactory(
@Override
public FormatWriterFactory createWriterFactory(RowType type) {
return (PositionOutputStream, level) -> {
FormatWriter wrapped =
format.createWriterFactory(type)
.create(
PositionOutputStream,
CoreOptions.FILE_COMPRESSION.defaultValue());
FormatWriter wrapped = format.createWriterFactory(type).create(PositionOutputStream);
return new FormatWriter() {
@Override
public void addElement(InternalRow rowData) throws IOException {
Expand Down
Loading

0 comments on commit f44345b

Please sign in to comment.