Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.state.gemini.engine.page;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.runtime.state.gemini.engine.GRegionContext;
import org.apache.flink.runtime.state.gemini.engine.exceptions.GeminiRuntimeException;
import org.apache.flink.runtime.state.gemini.engine.memstore.GSValue;
import org.apache.flink.runtime.state.gemini.engine.page.bmap.BinaryKey;
import org.apache.flink.runtime.state.gemini.engine.page.bmap.BinaryValue;
import org.apache.flink.runtime.state.gemini.engine.page.bmap.ByteBufferDataInputView;
import org.apache.flink.runtime.state.gemini.engine.page.bmap.GBinaryHashMap;
import org.apache.flink.runtime.state.gemini.engine.page.compress.GCompressAlgorithm;
import org.apache.flink.runtime.state.gemini.engine.rm.Allocator;
import org.apache.flink.runtime.state.gemini.engine.rm.GByteBuffer;
import org.apache.flink.runtime.state.gemini.engine.vm.HitRecord;
import org.apache.flink.util.MathUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.flink.runtime.state.gemini.engine.page.bmap.GBinaryHashMap.EMPTY_G_BINARY_HASHMAP;
/**
* DataPageKVImpl.
*/
public class DataPageKVImpl implements DataPage {
private static final Logger LOG = LoggerFactory.getLogger(DataPageKVImpl.class);
protected final TypeSerializer valueTypeSerializer;
protected final GBinaryHashMap gBinaryHashMap;
protected final HitRecord hitRecord = new HitRecord();
//chainIndex is changable
protected volatile short chainIndex = 0;
public DataPageKVImpl(
GBinaryHashMap gBinaryHashMap, TypeSerializer valueTypeSerializer) {
this.gBinaryHashMap = gBinaryHashMap;
this.valueTypeSerializer = valueTypeSerializer;
if (gBinaryHashMap.getGByteBuffer() != null) {
gBinaryHashMap.getGByteBuffer().setWaitSeqId();
}
}
@Override
public void retain() {
if (gBinaryHashMap != null) {
gBinaryHashMap.retain();
}
}
@Override
public void release() {
if (gBinaryHashMap != null) {
gBinaryHashMap.release();
}
}
@Override
public int refCnt() {
return gBinaryHashMap.refCnt();
}
@Override
public void close() {
release();
}
@Override
public int getCheckSum() {
return gBinaryHashMap == null ? 0 : gBinaryHashMap.getOriginChecksum();
}
@Override
public BinaryValue getBinaryValue(K key) {
try {
BinaryValue binaryValue = this.gBinaryHashMap.get(key);
if (binaryValue == null) {
return null;
}
return binaryValue;
} catch (Exception e) {
throw new GeminiRuntimeException("get exception: " + e.getMessage(), e);
}
}
@Override
public GSValue get(K key) {
try {
BinaryValue binaryValue = this.gBinaryHashMap.get(key);
if (binaryValue == null) {
return null;
}
if (binaryValue.getGValueType() == GValueType.Delete) {
return new GSValue<>(null, GValueType.Delete, binaryValue.getSeqID());
}
DataInputView byteBufferDataInputView = new ByteBufferDataInputView(binaryValue.getBb(),
binaryValue.getValueOffset(),
binaryValue.getValueLen());
V value = valueTypeSerializer.deserialize(byteBufferDataInputView);
return new GSValue<>(value, binaryValue.getGValueType(), binaryValue.getSeqID());
} catch (Exception e) {
throw new GeminiRuntimeException("get exception: " + e.getMessage(), e);
}
}
@Override
public boolean contains(K key) {
GSValue result = get(key);
return result != null && result.getValue() != null;
}
@Override
public long getVersion() {
return this.gBinaryHashMap.getVersion();
}
@Override
public DataPageType getDataPageType() {
return DataPageType.KV;
}
@Override
public int getSize() {
return this.gBinaryHashMap.bytesSize();
}
@Override
public int getCount() {
return this.gBinaryHashMap.keyCount();
}
@Override
public Map> getPOJOMap() {
return this.gBinaryHashMap.toPOJOMap(this.valueTypeSerializer);
}
@Override
public Set getPOJOSet() {
return this.gBinaryHashMap.toPOJOSet();
}
@Override
public Tuple2 split(
PageIndexContext indexContext, int curBucketNum, int curIndex, Allocator allocator, GCompressAlgorithm gCompressAlgorithm, GRegionContext gRegionContext) {
Map binaryMap = this.gBinaryHashMap.getBinaryMap();
List> list1 = new ArrayList<>();
List> list2 = new ArrayList<>();
for (Map.Entry entry : binaryMap.entrySet()) {
int hash = MathUtils.bitMix(entry.getKey().hashCode());
int checkBucketNum = curBucketNum << 1;
int index = hash & (checkBucketNum - 1) - curIndex;
if (index == 0) {
list1.add(Tuple2.of(entry.getKey(), entry.getValue()));
} else {
list2.add(Tuple2.of(entry.getKey(), entry.getValue()));
}
}
long version = getVersion();
long compactionCount = getCompactionCount();
int logicPageId = this.gBinaryHashMap.getLogicPageId();
TypeSerializer keySerializer = this.gBinaryHashMap.getKeyTypeSerializer();
// here we set logicPageChainIndex and logicPageHashCode to -1 to ignore loading them from lru to main cache.
// because we think that splitting is a short temporary state.
GBinaryHashMap gBinaryHashMap1 = GBinaryHashMap.ofBinaryList(
-1,
-1,
getDataPageType(),
list1,
keySerializer,
version,
logicPageId,
allocator,
compactionCount,
gCompressAlgorithm,
gRegionContext);
GBinaryHashMap gBinaryHashMap2 = GBinaryHashMap.ofBinaryList(
-1,
-1,
getDataPageType(),
list2,
keySerializer,
version,
logicPageId + curBucketNum,
allocator,
compactionCount,
gCompressAlgorithm,
gRegionContext);
return getSplitDataByGBinaryMap(gBinaryHashMap1, gBinaryHashMap2);
}
public Tuple2 getSplitDataByGBinaryMap(
GBinaryHashMap gBinaryHashMap1, GBinaryHashMap gBinaryHashMap2) {
DataPageKVImpl dataPage1 = gBinaryHashMap1 == EMPTY_G_BINARY_HASHMAP
? null
: new DataPageKVImpl<>(gBinaryHashMap1, valueTypeSerializer);
DataPageKVImpl dataPage2 = gBinaryHashMap2 == EMPTY_G_BINARY_HASHMAP
? null
: new DataPageKVImpl<>(gBinaryHashMap2, valueTypeSerializer);
return Tuple2.of(dataPage1, dataPage2);
}
@Override
public long getCompactionCount() {
return this.gBinaryHashMap.getCompactionCount();
}
@Override
public GBinaryHashMap getGBinaryHashMap() {
return this.gBinaryHashMap;
}
@Override
public void setChainIndex(int index) {
this.chainIndex = (short) index;
}
@Override
public int getChainIndex() {
return this.chainIndex;
}
public static DataPageKVImpl readKVPageFrom(
PageSerdeFlink pageSerdeFlink,
GByteBuffer dataPage,
int crc) {
GBinaryHashMap gBinaryHashMap = new GBinaryHashMap<>(dataPage, pageSerdeFlink.getKeySerde(), crc);
return new DataPageKVImpl<>(gBinaryHashMap, pageSerdeFlink.getValueSerde());
}
@Override
public void addRequestCount(long tickTime, int i) {
hitRecord.addRequestCount(tickTime, i);
}
@Override
public double score(long tickTime) {
return hitRecord.score(tickTime, getSize(), getChainIndex());
}
@Override
public long getRequestCount(long tickTime) {
return hitRecord.getRequestCount(tickTime);
}
}