Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hudi.common.table.timeline.TimelineMetadataUtils Maven / Gradle / Ivy
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.common.table.timeline;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieCleanerPlan;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.avro.model.HoodieIndexCommitMetadata;
import org.apache.hudi.avro.model.HoodieIndexPlan;
import org.apache.hudi.avro.model.HoodieInstantInfo;
import org.apache.hudi.avro.model.HoodieReplaceCommitMetadata;
import org.apache.hudi.avro.model.HoodieRequestedReplaceMetadata;
import org.apache.hudi.avro.model.HoodieRestoreMetadata;
import org.apache.hudi.avro.model.HoodieRestorePlan;
import org.apache.hudi.avro.model.HoodieRollbackMetadata;
import org.apache.hudi.avro.model.HoodieRollbackPartitionMetadata;
import org.apache.hudi.avro.model.HoodieRollbackPlan;
import org.apache.hudi.avro.model.HoodieSavepointMetadata;
import org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata;
import org.apache.hudi.common.HoodieRollbackStat;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.file.FileReader;
import org.apache.avro.file.SeekableByteArrayInput;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.avro.specific.SpecificRecordBase;
import org.apache.hadoop.fs.FileStatus;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class TimelineMetadataUtils {
private static final Integer DEFAULT_VERSION = 1;
public static HoodieRestoreMetadata convertRestoreMetadata(String startRestoreTime,
long durationInMs,
List instants,
Map> instantToRollbackMetadata) {
return new HoodieRestoreMetadata(startRestoreTime, durationInMs,
instants.stream().map(HoodieInstant::getTimestamp).collect(Collectors.toList()),
Collections.unmodifiableMap(instantToRollbackMetadata), DEFAULT_VERSION,
instants.stream().map(instant -> new HoodieInstantInfo(instant.getTimestamp(), instant.getAction())).collect(Collectors.toList()));
}
public static HoodieRollbackMetadata convertRollbackMetadata(String startRollbackTime, Option durationInMs,
List instants, List rollbackStats) {
Map partitionMetadataBuilder = new HashMap<>();
int totalDeleted = 0;
for (HoodieRollbackStat stat : rollbackStats) {
Map rollbackLogFiles = stat.getCommandBlocksCount().keySet().stream()
.collect(Collectors.toMap(f -> f.getPath().toString(), FileStatus::getLen));
HoodieRollbackPartitionMetadata metadata = new HoodieRollbackPartitionMetadata(stat.getPartitionPath(),
stat.getSuccessDeleteFiles(), stat.getFailedDeleteFiles(), rollbackLogFiles);
partitionMetadataBuilder.put(stat.getPartitionPath(), metadata);
totalDeleted += stat.getSuccessDeleteFiles().size();
}
return new HoodieRollbackMetadata(startRollbackTime, durationInMs.orElseGet(() -> -1L), totalDeleted,
instants.stream().map(HoodieInstant::getTimestamp).collect(Collectors.toList()),
Collections.unmodifiableMap(partitionMetadataBuilder), DEFAULT_VERSION,
instants.stream().map(instant -> new HoodieInstantInfo(instant.getTimestamp(), instant.getAction())).collect(Collectors.toList()));
}
public static HoodieSavepointMetadata convertSavepointMetadata(String user, String comment,
Map> latestFiles) {
Map partitionMetadataBuilder = new HashMap<>();
for (Map.Entry> stat : latestFiles.entrySet()) {
HoodieSavepointPartitionMetadata metadata = new HoodieSavepointPartitionMetadata(stat.getKey(), stat.getValue());
partitionMetadataBuilder.put(stat.getKey(), metadata);
}
return new HoodieSavepointMetadata(user, System.currentTimeMillis(), comment,
Collections.unmodifiableMap(partitionMetadataBuilder), DEFAULT_VERSION);
}
public static Option serializeCompactionPlan(HoodieCompactionPlan compactionWorkload) throws IOException {
return serializeAvroMetadata(compactionWorkload, HoodieCompactionPlan.class);
}
public static Option serializeCleanerPlan(HoodieCleanerPlan cleanPlan) throws IOException {
return serializeAvroMetadata(cleanPlan, HoodieCleanerPlan.class);
}
public static Option serializeRollbackPlan(HoodieRollbackPlan rollbackPlan) throws IOException {
return serializeAvroMetadata(rollbackPlan, HoodieRollbackPlan.class);
}
public static Option serializeRestorePlan(HoodieRestorePlan restorePlan) throws IOException {
return serializeAvroMetadata(restorePlan, HoodieRestorePlan.class);
}
public static Option serializeCleanMetadata(HoodieCleanMetadata metadata) throws IOException {
return serializeAvroMetadata(metadata, HoodieCleanMetadata.class);
}
public static Option serializeSavepointMetadata(HoodieSavepointMetadata metadata) throws IOException {
return serializeAvroMetadata(metadata, HoodieSavepointMetadata.class);
}
public static Option serializeRollbackMetadata(HoodieRollbackMetadata rollbackMetadata) throws IOException {
return serializeAvroMetadata(rollbackMetadata, HoodieRollbackMetadata.class);
}
public static Option serializeRestoreMetadata(HoodieRestoreMetadata restoreMetadata) throws IOException {
return serializeAvroMetadata(restoreMetadata, HoodieRestoreMetadata.class);
}
public static Option serializeRequestedReplaceMetadata(HoodieRequestedReplaceMetadata clusteringPlan) throws IOException {
return serializeAvroMetadata(clusteringPlan, HoodieRequestedReplaceMetadata.class);
}
public static Option serializeIndexPlan(HoodieIndexPlan indexPlan) throws IOException {
return serializeAvroMetadata(indexPlan, HoodieIndexPlan.class);
}
public static Option serializeIndexCommitMetadata(HoodieIndexCommitMetadata indexCommitMetadata) throws IOException {
return serializeAvroMetadata(indexCommitMetadata, HoodieIndexCommitMetadata.class);
}
public static Option serializeAvroMetadata(T metadata, Class clazz)
throws IOException {
DatumWriter datumWriter = new SpecificDatumWriter<>(clazz);
DataFileWriter fileWriter = new DataFileWriter<>(datumWriter);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
fileWriter.create(metadata.getSchema(), baos);
fileWriter.append(metadata);
fileWriter.flush();
return Option.of(baos.toByteArray());
}
public static HoodieCleanerPlan deserializeCleanerPlan(byte[] bytes) throws IOException {
return deserializeAvroMetadata(bytes, HoodieCleanerPlan.class);
}
public static HoodieCompactionPlan deserializeCompactionPlan(byte[] bytes) throws IOException {
return deserializeAvroMetadata(bytes, HoodieCompactionPlan.class);
}
public static HoodieCleanMetadata deserializeHoodieCleanMetadata(byte[] bytes) throws IOException {
return deserializeAvroMetadata(bytes, HoodieCleanMetadata.class);
}
public static HoodieRollbackMetadata deserializeHoodieRollbackMetadata(byte[] bytes) throws IOException {
return deserializeAvroMetadata(bytes, HoodieRollbackMetadata.class);
}
public static HoodieRestoreMetadata deserializeHoodieRestoreMetadata(byte[] bytes) throws IOException {
return deserializeAvroMetadata(bytes, HoodieRestoreMetadata.class);
}
public static HoodieSavepointMetadata deserializeHoodieSavepointMetadata(byte[] bytes) throws IOException {
return deserializeAvroMetadata(bytes, HoodieSavepointMetadata.class);
}
public static HoodieRequestedReplaceMetadata deserializeRequestedReplaceMetadata(byte[] bytes) throws IOException {
return deserializeAvroMetadata(bytes, HoodieRequestedReplaceMetadata.class);
}
public static HoodieReplaceCommitMetadata deserializeHoodieReplaceMetadata(byte[] bytes) throws IOException {
return deserializeAvroMetadata(bytes, HoodieReplaceCommitMetadata.class);
}
public static HoodieIndexPlan deserializeIndexPlan(byte[] bytes) throws IOException {
return deserializeAvroMetadata(bytes, HoodieIndexPlan.class);
}
public static HoodieIndexCommitMetadata deserializeIndexCommitMetadata(byte[] bytes) throws IOException {
return deserializeAvroMetadata(bytes, HoodieIndexCommitMetadata.class);
}
public static T deserializeAvroMetadata(byte[] bytes, Class clazz)
throws IOException {
DatumReader reader = new SpecificDatumReader<>(clazz);
FileReader fileReader = DataFileReader.openReader(new SeekableByteArrayInput(bytes), reader);
ValidationUtils.checkArgument(fileReader.hasNext(), "Could not deserialize metadata of type " + clazz);
return fileReader.next();
}
public static T deserializeAvroRecordMetadata(byte[] bytes, Schema schema)
throws IOException {
return deserializeAvroRecordMetadata(HoodieAvroUtils.bytesToAvro(bytes, schema), schema);
}
public static T deserializeAvroRecordMetadata(Object object, Schema schema) {
return (T) SpecificData.get().deepCopy(schema, object);
}
}