Skip to content

Commit

Permalink
[core] Introduce DeletionVectorIndexFileWriter (#3402)
Browse files Browse the repository at this point in the history
  • Loading branch information
YannByron authored May 30, 2024
1 parent 4aad10f commit cec1709
Show file tree
Hide file tree
Showing 9 changed files with 291 additions and 118 deletions.
6 changes: 6 additions & 0 deletions docs/layouts/shortcodes/generated/core_configuration.html
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,12 @@
<td>Boolean</td>
<td>Whether to enable deletion vectors mode. In this mode, index files containing deletion vectors are generated when data is written, which marks the data for deletion. During read operations, by applying these index files, merging can be avoided.</td>
</tr>
<tr>
<td><h5>deletion-vector.index-file.target-size</h5></td>
<td style="word-wrap: break-word;">2 mb</td>
<td>MemorySize</td>
<td>The target size of deletion vector index file.</td>
</tr>
<tr>
<td><h5>dynamic-bucket.assigner-parallelism</h5></td>
<td style="word-wrap: break-word;">(none)</td>
Expand Down
10 changes: 10 additions & 0 deletions paimon-common/src/main/java/org/apache/paimon/CoreOptions.java
Original file line number Diff line number Diff line change
Expand Up @@ -1113,6 +1113,12 @@ public class CoreOptions implements Serializable {
+ " vectors are generated when data is written, which marks the data for deletion."
+ " During read operations, by applying these index files, merging can be avoided.");

public static final ConfigOption<MemorySize> DELETION_VECTOR_INDEX_FILE_TARGET_SIZE =
key("deletion-vector.index-file.target-size")
.memoryType()
.defaultValue(MemorySize.ofMebiBytes(2))
.withDescription("The target size of deletion vector index file.");

public static final ConfigOption<Boolean> DELETION_FORCE_PRODUCE_CHANGELOG =
key("delete.force-produce-changelog")
.booleanType()
Expand Down Expand Up @@ -1816,6 +1822,10 @@ public boolean deletionVectorsEnabled() {
return options.get(DELETION_VECTORS_ENABLED);
}

public MemorySize deletionVectorIndexFileTargetSize() {
return options.get(DELETION_VECTOR_INDEX_FILE_TARGET_SIZE);
}

public FileIndexOptions indexColumnsOptions() {
return new FileIndexOptions(this);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.apache.paimon.service.ServiceManager;
import org.apache.paimon.stats.StatsFile;
import org.apache.paimon.stats.StatsFileHandler;
import org.apache.paimon.table.BucketMode;
import org.apache.paimon.table.CatalogEnvironment;
import org.apache.paimon.table.sink.CallbackUtils;
import org.apache.paimon.table.sink.TagCallback;
Expand Down Expand Up @@ -146,7 +147,12 @@ public IndexFileHandler newIndexFileHandler() {
pathFactory().indexFileFactory(),
indexManifestFileFactory().create(),
new HashIndexFile(fileIO, pathFactory().indexFileFactory()),
new DeletionVectorsIndexFile(fileIO, pathFactory().indexFileFactory()));
new DeletionVectorsIndexFile(
fileIO,
pathFactory().indexFileFactory(),
bucketMode() == BucketMode.BUCKET_UNAWARE
? options.deletionVectorIndexFileTargetSize()
: MemorySize.ofBytes(Long.MAX_VALUE)));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,16 +95,19 @@ public List<IndexManifestEntry> writeUnchangedDeletionVector() {
Map<String, DeletionFile> dataFileToDeletionFiles =
indexFileToDeletionFiles.get(indexFile);
if (!dataFileToDeletionFiles.isEmpty()) {
IndexFileMeta newIndexFile =
List<IndexFileMeta> newIndexFiles =
indexFileHandler.writeDeletionVectorsIndex(
deletionVectorsIndexFile.readDeletionVector(
dataFileToDeletionFiles));
newIndexEntries.add(
new IndexManifestEntry(
FileKind.ADD,
oldEntry.partition(),
oldEntry.bucket(),
newIndexFile));
newIndexFiles.forEach(
newIndexFile -> {
newIndexEntries.add(
new IndexManifestEntry(
FileKind.ADD,
oldEntry.partition(),
oldEntry.bucket(),
newIndexFile));
});
}

// mark the touched index file as removed.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.paimon.deletionvectors;

import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.Path;
import org.apache.paimon.index.IndexFileMeta;
import org.apache.paimon.options.MemorySize;
import org.apache.paimon.utils.Pair;
import org.apache.paimon.utils.PathFactory;
import org.apache.paimon.utils.Preconditions;

import java.io.Closeable;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

import static org.apache.paimon.deletionvectors.DeletionVectorsIndexFile.DELETION_VECTORS_INDEX;
import static org.apache.paimon.deletionvectors.DeletionVectorsIndexFile.VERSION_ID_V1;
import static org.apache.paimon.deletionvectors.DeletionVectorsIndexFile.calculateChecksum;

/** Writer for deletion vector index file. */
public class DeletionVectorIndexFileWriter {

private final PathFactory indexPathFactory;
private final FileIO fileIO;
private final long targetSizeInBytes;

public DeletionVectorIndexFileWriter(
FileIO fileIO, PathFactory pathFactory, MemorySize targetSizePerIndexFile) {
this.indexPathFactory = pathFactory;
this.fileIO = fileIO;
this.targetSizeInBytes = targetSizePerIndexFile.getBytes();
}

public List<IndexFileMeta> write(Map<String, DeletionVector> input) throws IOException {
if (input.isEmpty()) {
return emptyIndexFile();
}
List<IndexFileMeta> result = new ArrayList<>();
Iterator<Map.Entry<String, DeletionVector>> iterator = input.entrySet().iterator();
while (iterator.hasNext()) {
result.add(tryWriter(iterator));
}
return result;
}

private IndexFileMeta tryWriter(Iterator<Map.Entry<String, DeletionVector>> iterator)
throws IOException {
SingleIndexFileWriter writer = new SingleIndexFileWriter();
try {
while (iterator.hasNext()) {
Map.Entry<String, DeletionVector> entry = iterator.next();
long currentSize = writer.write(entry.getKey(), entry.getValue());

if (writer.writtenSizeInBytes() + currentSize > targetSizeInBytes) {
break;
}
}
} finally {
writer.close();
}
return writer.writtenIndexFile();
}

private List<IndexFileMeta> emptyIndexFile() throws IOException {
try (SingleIndexFileWriter writer = new SingleIndexFileWriter()) {
return Collections.singletonList(writer.writtenIndexFile());
}
}

private class SingleIndexFileWriter implements Closeable {

private final Path path;

private final DataOutputStream dataOutputStream;

private final LinkedHashMap<String, Pair<Integer, Integer>> dvRanges;

private long writtenSizeInBytes = 0L;

public SingleIndexFileWriter() throws IOException {
this.path = indexPathFactory.newPath();
this.dataOutputStream = new DataOutputStream(fileIO.newOutputStream(path, true));
dataOutputStream.writeByte(VERSION_ID_V1);
this.dvRanges = new LinkedHashMap<>();
}

public long writtenSizeInBytes() {
return this.writtenSizeInBytes;
}

public long write(String key, DeletionVector deletionVector) throws IOException {
Preconditions.checkNotNull(dataOutputStream);
byte[] data = deletionVector.serializeToBytes();
int size = data.length;

dvRanges.put(key, Pair.of(dataOutputStream.size(), size));
dataOutputStream.writeInt(size);
dataOutputStream.write(data);
dataOutputStream.writeInt(calculateChecksum(data));
writtenSizeInBytes += size;
return size;
}

public IndexFileMeta writtenIndexFile() throws IOException {
return new IndexFileMeta(
DELETION_VECTORS_INDEX,
path.getName(),
fileIO.getFileSize(path),
dvRanges.size(),
dvRanges);
}

@Override
public void close() throws IOException {
dataOutputStream.close();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,17 +23,18 @@
import org.apache.paimon.fs.SeekableInputStream;
import org.apache.paimon.index.IndexFile;
import org.apache.paimon.index.IndexFileMeta;
import org.apache.paimon.options.MemorySize;
import org.apache.paimon.table.source.DeletionFile;
import org.apache.paimon.utils.Pair;
import org.apache.paimon.utils.PathFactory;

import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.zip.CRC32;

Expand All @@ -46,8 +47,12 @@ public class DeletionVectorsIndexFile extends IndexFile {
public static final String DELETION_VECTORS_INDEX = "DELETION_VECTORS";
public static final byte VERSION_ID_V1 = 1;

public DeletionVectorsIndexFile(FileIO fileIO, PathFactory pathFactory) {
private final MemorySize targetSizePerIndexFile;

public DeletionVectorsIndexFile(
FileIO fileIO, PathFactory pathFactory, MemorySize targetSizePerIndexFile) {
super(fileIO, pathFactory);
this.targetSizePerIndexFile = targetSizePerIndexFile;
}

/**
Expand Down Expand Up @@ -85,6 +90,12 @@ public Map<String, DeletionVector> readAllDeletionVectors(IndexFileMeta fileMeta
return deletionVectors;
}

public Map<String, DeletionVector> readAllDeletionVectors(List<IndexFileMeta> indexFiles) {
Map<String, DeletionVector> deletionVectors = new HashMap<>();
indexFiles.forEach(indexFile -> deletionVectors.putAll(readAllDeletionVectors(indexFile)));
return deletionVectors;
}

/** Reads deletion vectors from a list of DeletionFile which belong to a same index file. */
public Map<String, DeletionVector> readDeletionVector(
Map<String, DeletionFile> dataFileToDeletionFiles) {
Expand All @@ -110,33 +121,6 @@ public Map<String, DeletionVector> readDeletionVector(
return deletionVectors;
}

/**
* Reads a single deletion vector from the specified file.
*
* @param fileName The name of the file from which to read the deletion vector.
* @param deletionVectorRange A Pair specifying the range (start position and size) within the
* file where the deletion vector data is located.
* @return The DeletionVector object read from the specified range in the file.
* @throws UncheckedIOException If an I/O error occurs while reading from the file.
*/
public DeletionVector readDeletionVector(
String fileName, Pair<Integer, Integer> deletionVectorRange) {
Path filePath = pathFactory.toPath(fileName);
try (SeekableInputStream inputStream = fileIO.newInputStream(filePath)) {
checkVersion(inputStream);
inputStream.seek(deletionVectorRange.getLeft());
DataInputStream dataInputStream = new DataInputStream(inputStream);
return readDeletionVector(dataInputStream, deletionVectorRange.getRight());
} catch (Exception e) {
throw new RuntimeException(
"Unable to read deletion vector from file: "
+ filePath
+ ", deletionVectorRange: "
+ deletionVectorRange,
e);
}
}

/**
* Write deletion vectors to a new file, the format of this file can be referenced at: <a
* href="https://cwiki.apache.org/confluence/x/Tws4EQ">PIP-16</a>.
Expand All @@ -149,28 +133,15 @@ public DeletionVector readDeletionVector(
* data is located.
* @throws UncheckedIOException If an I/O error occurs while writing to the file.
*/
public Pair<String, LinkedHashMap<String, Pair<Integer, Integer>>> write(
Map<String, DeletionVector> input) {
int size = input.size();
LinkedHashMap<String, Pair<Integer, Integer>> deletionVectorRanges =
new LinkedHashMap<>(size);
Path path = pathFactory.newPath();
try (DataOutputStream dataOutputStream =
new DataOutputStream(fileIO.newOutputStream(path, true))) {
dataOutputStream.writeByte(VERSION_ID_V1);
for (Map.Entry<String, DeletionVector> entry : input.entrySet()) {
String key = entry.getKey();
byte[] valueBytes = entry.getValue().serializeToBytes();
deletionVectorRanges.put(key, Pair.of(dataOutputStream.size(), valueBytes.length));
dataOutputStream.writeInt(valueBytes.length);
dataOutputStream.write(valueBytes);
dataOutputStream.writeInt(calculateChecksum(valueBytes));
}
public List<IndexFileMeta> write(Map<String, DeletionVector> input) {
try {
DeletionVectorIndexFileWriter writer =
new DeletionVectorIndexFileWriter(
this.fileIO, this.pathFactory, this.targetSizePerIndexFile);
return writer.write(input);
} catch (IOException e) {
throw new RuntimeException(
"Unable to write deletion vectors to file: " + path.getName(), e);
throw new RuntimeException("Failed to write deletion vectors.", e);
}
return Pair.of(path.getName(), deletionVectorRanges);
}

private void checkVersion(InputStream in) throws IOException {
Expand Down Expand Up @@ -213,7 +184,7 @@ private DeletionVector readDeletionVector(DataInputStream inputStream, int size)
}
}

private int calculateChecksum(byte[] bytes) {
public static int calculateChecksum(byte[] bytes) {
CRC32 crc = new CRC32();
crc.update(bytes);
return (int) crc.getValue();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,8 @@ public void removeDeletionVectorOf(String fileName) {
*/
public List<IndexFileMeta> prepareCommit() {
if (modified) {
IndexFileMeta entry = indexFileHandler.writeDeletionVectorsIndex(deletionVectors);
modified = false;
return Collections.singletonList(entry);
return indexFileHandler.writeDeletionVectorsIndex(deletionVectors);
}
return Collections.emptyList();
}
Expand Down
Loading

0 comments on commit cec1709

Please sign in to comment.