Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.state.gemini.engine.page;
import org.apache.flink.runtime.state.gemini.engine.exceptions.GeminiRuntimeException;
import org.apache.flink.runtime.state.gemini.engine.fs.FileReader;
import org.apache.flink.runtime.state.gemini.engine.page.bmap.GHashHeaderImpl;
import org.apache.flink.runtime.state.gemini.engine.page.compress.GCompressAlgorithm;
import org.apache.flink.runtime.state.gemini.engine.page.compress.GCompressHeaderHelper;
import org.apache.flink.runtime.state.gemini.engine.rm.GByteBuffer;
import org.apache.flink.runtime.state.gemini.engine.rm.GUnPooledByteBuffer;
import java.nio.ByteBuffer;
import java.util.zip.CRC32;
/**
* DataPageUtil. be responsible for operator of logical page.
*/
public class DfsDataPageUtil extends AbstractDataPageUtil {
public DfsDataPageUtil(boolean enableChecksum) {
super(enableChecksum);
}
@Override
public GByteBuffer getDataPageFromReader(FileReader reader, int offsetInFile, PageAddress pageAddress) {
try {
int firstReadLength = GCompressHeaderHelper.LENGTH;
byte[] header = new byte[firstReadLength];
// Here we'll always read 16 bytes(Compress header's length),
// we would never encounter an EOFException because page header's length(64) will always bigger than compress header's length(16).
reader.read(offsetInFile, header, 0, firstReadLength);
byte[] pageData = new byte[pageAddress.getDataLen()];
if (GCompressHeaderHelper.isPageCompressed(header)) {
GCompressHeaderHelper.checkMagicNumber(header);
GCompressAlgorithm algorithm = GCompressHeaderHelper.readCompressAlgorithm(header);
// read the page header
int pageHeaderLength = GHashHeaderImpl.HEADER_LENGTH;
int pageHeaderOffset = offsetInFile + GCompressHeaderHelper.LENGTH;
reader.read(pageHeaderOffset, pageData, 0, pageHeaderLength);
// read the compress page data and decompress it.
int diskLength = GCompressHeaderHelper.readDiskLength(header);
byte[] compressed = new byte[diskLength];
int rawPageDataOffset = pageHeaderOffset + GHashHeaderImpl.HEADER_LENGTH;
reader.read(rawPageDataOffset, compressed, 0, compressed.length);
algorithm.getCompressorCodec().decompress(compressed, 0, compressed.length, pageData, pageHeaderLength);
} else {
// If page is not compressed before, we should first copy the bytes(16) we read before, then read the left bytes from file
System.arraycopy(header, 0, pageData, 0, firstReadLength);
reader.read(offsetInFile + firstReadLength,
pageData,
firstReadLength,
pageAddress.getDataLen() - firstReadLength);
}
int crc;
if (enableChecksum) {
CRC32 crc32 = new CRC32();
crc32.update(pageData);
crc = (int) crc32.getValue();
if (crc != pageAddress.getChecksum()) {
throw new GeminiRuntimeException("checkSum fail, " + pageAddress + " when reading from file=" + reader.getFileMeta() + " ,expected=" + pageAddress.getChecksum() + " ,now=" + crc);
}
}
return new GUnPooledByteBuffer(ByteBuffer.wrap(pageData));
} catch (Exception e) {
LOG.error("DfsUtil PageAddress:{}", pageAddress + " => " + e + " reader=>" + reader.getFileMeta());
throw new GeminiRuntimeException(e);
}
}
}