Skip to content

Commit

Permalink
[core] support decouple the delta files lifecycle
Browse files Browse the repository at this point in the history
  • Loading branch information
Aitozi committed Apr 12, 2024
1 parent 5c78579 commit b6d6cd9
Show file tree
Hide file tree
Showing 52 changed files with 432 additions and 135 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,8 @@ private RowDataRollingFileWriter createRollingRowWriter() {
pathFactory,
seqNumCounter,
fileCompression,
statsCollectors);
statsCollectors,
false);
}

private void trySyncLatestCompaction(boolean blocking)
Expand Down
41 changes: 30 additions & 11 deletions paimon-core/src/main/java/org/apache/paimon/io/DataFileMeta.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.paimon.data.BinaryRow;
import org.apache.paimon.data.Timestamp;
import org.apache.paimon.fs.Path;
import org.apache.paimon.manifest.FileSource;
import org.apache.paimon.stats.BinaryTableStats;
import org.apache.paimon.stats.FieldStatsArraySerializer;
import org.apache.paimon.types.ArrayType;
Expand All @@ -31,6 +32,7 @@
import org.apache.paimon.types.DataTypes;
import org.apache.paimon.types.IntType;
import org.apache.paimon.types.RowType;
import org.apache.paimon.types.TinyIntType;

import javax.annotation.Nullable;

Expand Down Expand Up @@ -82,6 +84,7 @@ public class DataFileMeta {
// Because in previous versions of DataFileMeta, we only keep rowCount.
// We have to keep the compatibility.
private final @Nullable Long deleteRowCount;
private final @Nullable FileSource fileSource;

public static DataFileMeta forAppend(
String fileName,
Expand All @@ -90,7 +93,8 @@ public static DataFileMeta forAppend(
BinaryTableStats rowStats,
long minSequenceNumber,
long maxSequenceNumber,
long schemaId) {
long schemaId,
FileSource fileSource) {
return new DataFileMeta(
fileName,
fileSize,
Expand All @@ -103,7 +107,8 @@ public static DataFileMeta forAppend(
maxSequenceNumber,
schemaId,
DUMMY_LEVEL,
0L);
0L,
fileSource);
}

public DataFileMeta(
Expand All @@ -118,7 +123,8 @@ public DataFileMeta(
long maxSequenceNumber,
long schemaId,
int level,
@Nullable Long deleteRowCount) {
@Nullable Long deleteRowCount,
FileSource fileSource) {
this(
fileName,
fileSize,
Expand All @@ -133,7 +139,8 @@ public DataFileMeta(
level,
Collections.emptyList(),
Timestamp.fromLocalDateTime(LocalDateTime.now()).toMillisTimestamp(),
deleteRowCount);
deleteRowCount,
fileSource);
}

public DataFileMeta(
Expand All @@ -150,7 +157,8 @@ public DataFileMeta(
int level,
List<String> extraFiles,
Timestamp creationTime,
@Nullable Long deleteRowCount) {
@Nullable Long deleteRowCount,
FileSource fileSource) {
this.fileName = fileName;
this.fileSize = fileSize;

Expand All @@ -169,6 +177,7 @@ public DataFileMeta(
this.creationTime = creationTime;

this.deleteRowCount = deleteRowCount;
this.fileSource = fileSource;
}

public String fileName() {
Expand All @@ -191,6 +200,10 @@ public Optional<Long> deleteRowCount() {
return Optional.ofNullable(deleteRowCount);
}

public Optional<FileSource> fileSource() {
return Optional.ofNullable(fileSource);
}

public BinaryRow minKey() {
return minKey;
}
Expand Down Expand Up @@ -276,7 +289,8 @@ public DataFileMeta upgrade(int newLevel) {
newLevel,
extraFiles,
creationTime,
deleteRowCount);
deleteRowCount,
fileSource);
}

public List<Path> collectFiles(DataFilePathFactory pathFactory) {
Expand All @@ -301,7 +315,8 @@ public DataFileMeta copy(List<String> newExtraFiles) {
level,
newExtraFiles,
creationTime,
deleteRowCount);
deleteRowCount,
fileSource);
}

@Override
Expand All @@ -326,7 +341,8 @@ public boolean equals(Object o) {
&& level == that.level
&& Objects.equals(extraFiles, that.extraFiles)
&& Objects.equals(creationTime, that.creationTime)
&& Objects.equals(deleteRowCount, that.deleteRowCount);
&& Objects.equals(deleteRowCount, that.deleteRowCount)
&& Objects.equals(fileSource, that.fileSource);
}

@Override
Expand All @@ -345,7 +361,8 @@ public int hashCode() {
level,
extraFiles,
creationTime,
deleteRowCount);
deleteRowCount,
fileSource);
}

@Override
Expand All @@ -354,7 +371,7 @@ public String toString() {
"{fileName: %s, fileSize: %d, rowCount: %d, "
+ "minKey: %s, maxKey: %s, keyStats: %s, valueStats: %s, "
+ "minSequenceNumber: %d, maxSequenceNumber: %d, "
+ "schemaId: %d, level: %d, extraFiles: %s, creationTime: %s, deleteRowCount: %d}",
+ "schemaId: %d, level: %d, extraFiles: %s, creationTime: %s, deleteRowCount: %d, fileSource: %s}",
fileName,
fileSize,
rowCount,
Expand All @@ -368,7 +385,8 @@ public String toString() {
level,
extraFiles,
creationTime,
deleteRowCount);
deleteRowCount,
fileSource);
}

public static RowType schema() {
Expand All @@ -387,6 +405,7 @@ public static RowType schema() {
fields.add(new DataField(11, "_EXTRA_FILES", new ArrayType(false, newStringType(false))));
fields.add(new DataField(12, "_CREATION_TIME", DataTypes.TIMESTAMP_MILLIS()));
fields.add(new DataField(13, "_DELETE_ROW_COUNT", new BigIntType(true)));
fields.add(new DataField(14, "_FILE_SOURCE", new TinyIntType(true)));
return new RowType(fields);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.apache.paimon.data.BinaryString;
import org.apache.paimon.data.GenericRow;
import org.apache.paimon.data.InternalRow;
import org.apache.paimon.manifest.FileSource;
import org.apache.paimon.stats.BinaryTableStats;
import org.apache.paimon.utils.ObjectSerializer;

Expand Down Expand Up @@ -54,7 +55,8 @@ public InternalRow toRow(DataFileMeta meta) {
meta.level(),
toStringArrayData(meta.extraFiles()),
meta.creationTime(),
meta.deleteRowCount().orElse(null));
meta.deleteRowCount().orElse(null),
meta.fileSource().map(FileSource::toByteValue).orElse(null));
}

@Override
Expand All @@ -73,6 +75,7 @@ public DataFileMeta fromRow(InternalRow row) {
row.getInt(10),
fromStringArrayData(row.getArray(11)),
row.getTimestamp(12, 3),
row.isNullAt(13) ? null : row.getLong(13));
row.isNullAt(13) ? null : row.getLong(13),
row.isNullAt(14) ? null : FileSource.fromByteValue(row.getByte(14)));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.apache.paimon.format.TableStatsExtractor;
import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.Path;
import org.apache.paimon.manifest.FileSource;
import org.apache.paimon.stats.BinaryTableStats;
import org.apache.paimon.stats.FieldStatsArraySerializer;
import org.apache.paimon.types.RowType;
Expand Down Expand Up @@ -68,6 +69,7 @@ public class KeyValueDataFileWriter
private long minSeqNumber = Long.MAX_VALUE;
private long maxSeqNumber = Long.MIN_VALUE;
private long deleteRecordCount = 0;
private final boolean isCompact;

public KeyValueDataFileWriter(
FileIO fileIO,
Expand All @@ -80,7 +82,8 @@ public KeyValueDataFileWriter(
long schemaId,
int level,
String compression,
CoreOptions options) {
CoreOptions options,
boolean isCompact) {
super(
fileIO,
factory,
Expand All @@ -100,6 +103,7 @@ public KeyValueDataFileWriter(
this.keyStatsConverter = new FieldStatsArraySerializer(keyType);
this.valueStatsConverter = new FieldStatsArraySerializer(valueType);
this.keySerializer = new InternalRowSerializer(keyType);
this.isCompact = isCompact;
}

@Override
Expand Down Expand Up @@ -168,6 +172,7 @@ public DataFileMeta result() throws IOException {
maxSeqNumber,
schemaId,
level,
deleteRecordCount);
deleteRecordCount,
isCompact ? FileSource.COMPACT : FileSource.APPEND);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -81,21 +81,24 @@ public DataFilePathFactory pathFactory(int level) {
return formatContext.pathFactory(level);
}

public RollingFileWriter<KeyValue, DataFileMeta> createRollingMergeTreeFileWriter(int level) {
public RollingFileWriter<KeyValue, DataFileMeta> createRollingMergeTreeFileWriter(
int level, boolean isCompact) {
return new RollingFileWriter<>(
() -> createDataFileWriter(formatContext.pathFactory(level).newPath(), level),
() ->
createDataFileWriter(
formatContext.pathFactory(level).newPath(), level, isCompact),
suggestedFileSize);
}

public RollingFileWriter<KeyValue, DataFileMeta> createRollingChangelogFileWriter(int level) {
return new RollingFileWriter<>(
() ->
createDataFileWriter(
formatContext.pathFactory(level).newChangelogPath(), level),
formatContext.pathFactory(level).newChangelogPath(), level, false),
suggestedFileSize);
}

private KeyValueDataFileWriter createDataFileWriter(Path path, int level) {
private KeyValueDataFileWriter createDataFileWriter(Path path, int level, boolean isCompact) {
KeyValueSerializer kvSerializer = new KeyValueSerializer(keyType, valueType);
return new KeyValueDataFileWriter(
fileIO,
Expand All @@ -108,7 +111,8 @@ private KeyValueDataFileWriter createDataFileWriter(Path path, int level) {
schemaId,
level,
formatContext.compression(level),
options);
options,
isCompact);
}

public void deleteFile(String filename, int level) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.paimon.format.TableStatsExtractor;
import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.Path;
import org.apache.paimon.manifest.FileSource;
import org.apache.paimon.statistics.FieldStatsCollector;
import org.apache.paimon.stats.BinaryTableStats;
import org.apache.paimon.stats.FieldStatsArraySerializer;
Expand All @@ -43,6 +44,7 @@ public class RowDataFileWriter extends StatsCollectingSingleFileWriter<InternalR
private final long schemaId;
private final LongCounter seqNumCounter;
private final FieldStatsArraySerializer statsArraySerializer;
private final boolean isCompact;

public RowDataFileWriter(
FileIO fileIO,
Expand All @@ -53,7 +55,8 @@ public RowDataFileWriter(
long schemaId,
LongCounter seqNumCounter,
String fileCompression,
FieldStatsCollector.Factory[] statsCollectors) {
FieldStatsCollector.Factory[] statsCollectors,
boolean isCompact) {
super(
fileIO,
factory,
Expand All @@ -66,6 +69,7 @@ public RowDataFileWriter(
this.schemaId = schemaId;
this.seqNumCounter = seqNumCounter;
this.statsArraySerializer = new FieldStatsArraySerializer(writeSchema);
this.isCompact = isCompact;
}

@Override
Expand All @@ -84,6 +88,7 @@ public DataFileMeta result() throws IOException {
stats,
seqNumCounter.getValue() - super.recordCount(),
seqNumCounter.getValue() - 1,
schemaId);
schemaId,
isCompact ? FileSource.COMPACT : FileSource.APPEND);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ public RowDataRollingFileWriter(
DataFilePathFactory pathFactory,
LongCounter seqNumCounter,
String fileCompression,
FieldStatsCollector.Factory[] statsCollectors) {
FieldStatsCollector.Factory[] statsCollectors,
boolean isCompact) {
super(
() ->
new RowDataFileWriter(
Expand All @@ -54,7 +55,8 @@ public RowDataRollingFileWriter(
schemaId,
seqNumCounter,
fileCompression,
statsCollectors),
statsCollectors,
isCompact),
targetFileSize);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.paimon.manifest;

/** Source of a file. */
public enum FileSource {
APPEND((byte) 0),

COMPACT((byte) 0);

private final byte value;

FileSource(byte value) {
this.value = value;
}

public byte toByteValue() {
return value;
}

public static FileSource fromByteValue(byte value) {
switch (value) {
case 0:
return APPEND;
case 1:
return COMPACT;
default:
throw new UnsupportedOperationException(
"Unsupported byte value '" + value + "' for value kind.");
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ public ManifestEntry convertFrom(int version, InternalRow row) {
"The current version %s is not compatible with the version %s, please recreate the table.",
getVersion(), version));
}

throw new IllegalArgumentException("Unsupported version: " + version);
}
return new ManifestEntry(
Expand Down

0 comments on commit b6d6cd9

Please sign in to comment.