Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.state.gemini.engine.page;
import org.apache.flink.runtime.state.gemini.engine.exceptions.GeminiRuntimeException;
import org.apache.flink.runtime.state.gemini.engine.fs.FileWriter;
import org.apache.flink.runtime.state.gemini.engine.page.bmap.ByteBufferUtils;
import org.apache.flink.runtime.state.gemini.engine.page.bmap.GHashHeaderImpl;
import org.apache.flink.runtime.state.gemini.engine.page.compress.CompressorCodec;
import org.apache.flink.runtime.state.gemini.engine.page.compress.GCompressAlgorithm;
import org.apache.flink.runtime.state.gemini.engine.page.compress.GCompressHeaderHelper;
import org.apache.flink.runtime.state.gemini.engine.rm.GByteBuffer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.zip.CRC32;
/**
* DataPageUtil. be responsible for operator of logical page.
*/
public abstract class AbstractDataPageUtil implements DataPageUtil {
protected static final Logger LOG = LoggerFactory.getLogger(DataPageUtil.class);
protected final boolean enableChecksum;
public AbstractDataPageUtil(boolean enableChecksum) {
this.enableChecksum = enableChecksum;
}
@Override
public int write(
FileWriter writer,
GByteBuffer gByteBuffer,
PageAddress pageAddress,
GCompressAlgorithm gCompressAlgorithm,
int compressThreshold,
boolean checksumEnable) throws IOException {
byte[] data = getBytes(gByteBuffer, pageAddress, checksumEnable);
int diskLength;
boolean needCompress = gCompressAlgorithm.enableCompress() && pageAddress.getDataLen() > compressThreshold;
if (needCompress) {
CompressorCodec compressorCodec = gCompressAlgorithm.getCompressorCodec();
int compressLength = data.length - GHashHeaderImpl.HEADER_LENGTH;
byte[] compressedData = compressorCodec.getCompressedBytePool(compressLength);
int compressedDataLength = compressorCodec.compress(data,
GHashHeaderImpl.HEADER_LENGTH,
compressLength,
compressedData,
0);
diskLength = GCompressHeaderHelper.LENGTH + GHashHeaderImpl.HEADER_LENGTH + compressedDataLength;
ByteBuffer compressHeader = ByteBuffer.allocate(GCompressHeaderHelper.LENGTH);
GCompressHeaderHelper.writeCompressedDataPageType(compressHeader);
GCompressHeaderHelper.writeCompressVersion(compressHeader, GCompressHeaderHelper.CURRENT_VERSION);
GCompressHeaderHelper.writeMagicNumber(compressHeader, GCompressHeaderHelper.MAGIC_NUMBER);
GCompressHeaderHelper.writeAlgorithmCode(compressHeader, gCompressAlgorithm.getCode());
GCompressHeaderHelper.writeDiskLength(compressHeader, compressedDataLength);
// write compress header
writer.write(compressHeader.array(), 0, GCompressHeaderHelper.LENGTH);
// write page header
writer.write(data, 0, GHashHeaderImpl.HEADER_LENGTH);
// write compressed data
writer.write(compressedData, 0, compressedDataLength);
if (LOG.isDebugEnabled()) {
LOG.debug("Compress page mem {}/disk {} ratio {}",
pageAddress.getDataLen(),
diskLength,
(pageAddress.getDataLen() * 1.0 / diskLength));
}
} else {
diskLength = data.length;
// write page header & page data directly
writer.write(data, 0, data.length);
}
return diskLength;
}
/**
* Return a byte array which contains the data in gByteBuffer. If checkSum is true,
* correctness of data will be verified before return.
*/
public byte[] getBytes(GByteBuffer gByteBuffer, PageAddress page, boolean checkSum) {
byte[] bytes;
if (gByteBuffer.getByteBuffer().hasArray()) {
bytes = gByteBuffer.getByteBuffer().array();
} else {
bytes = new byte[gByteBuffer.capacity()];
ByteBufferUtils.copyFromBufferToArray(gByteBuffer.getByteBuffer(), bytes,
0, 0, gByteBuffer.capacity());
}
if (checkSum) {
CRC32 crc32 = new CRC32();
crc32.update(bytes);
int crc = (int) crc32.getValue();
if (crc != page.getChecksum()) {
throw new GeminiRuntimeException("checkSum changed! originCheckSum=" + page.getChecksum() + " ,now=" + crc);
}
}
return bytes;
}
}