Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.flink.runtime.state.gemini.engine.page.PageStoreHashKMapImpl Maven / Gradle / Ivy
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.state.gemini.engine.page;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.runtime.state.gemini.engine.GRegion;
import org.apache.flink.runtime.state.gemini.engine.exceptions.GeminiRuntimeException;
import org.apache.flink.runtime.state.gemini.engine.memstore.GSValue;
import org.apache.flink.runtime.state.gemini.engine.page.DataPage.DataPageType;
import org.apache.flink.runtime.state.gemini.engine.page.bmap.BinaryKey;
import org.apache.flink.runtime.state.gemini.engine.page.bmap.BinaryValue;
import org.apache.flink.runtime.state.gemini.engine.page.bmap.ByteBufferDataInputView;
import org.apache.flink.runtime.state.gemini.engine.page.bmap.GBinaryHashMap;
import org.apache.flink.runtime.state.gemini.engine.rm.ReferenceCount.ReleaseType;
import org.apache.flink.util.Preconditions;
import org.apache.flink.shaded.netty4.io.netty.util.concurrent.EventExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.flink.runtime.state.gemini.engine.page.bmap.GBinaryHashMap.EMPTY_G_BINARY_HASHMAP;
/**
* PageStoreHashKMapImpl.
*/
public class PageStoreHashKMapImpl extends AbstractHashPageStore>> implements PageStoreKMap {
private static final Logger LOG = LoggerFactory.getLogger(PageStoreHashKMapImpl.class);
protected final PageSerdeFlink2Key pageSerdeFlink2Key;
public PageStoreHashKMapImpl(
GRegion gRegion, EventExecutor eventExecutor) {
super(gRegion, eventExecutor);
this.pageSerdeFlink2Key = (PageSerdeFlink2KeyImpl) gRegionContext.getPageSerdeFlink();
}
public PageStoreHashKMapImpl(
GRegion gRegion,
PageIndex pageIndex,
EventExecutor eventExecutor) {
super(gRegion, pageIndex, eventExecutor);
this.pageSerdeFlink2Key = (PageSerdeFlink2KeyImpl) gRegionContext.getPageSerdeFlink();
}
@Override
public boolean contains(K key) {
//GeminiDB define empty collection as null.
Map> result = get(key);
return get(key) != null && !result.isEmpty();
}
@Override
long getRequestCount(List>>>> dataSet) {
return dataSet.stream().map((value) -> value.f1.getRequestCount()).reduce(0, (a, b) -> a + b);
}
@Override
DataPage doCreateDataPage(
long version, List>>>> dataSet, int logicPageId) {
GBinaryHashMap gBinaryHashMap = GBinaryHashMap.of(DataPageType.KHashMap,
dataSet,
this.pageSerdeFlink.getKeySerde(),
this.pageSerdeFlink2Key.getMapValueTypeSerializer(),
version,
logicPageId,
gContext.getSupervisor().getAllocator(),
1,
gRegionContext.getGContext().getInPageGCompressAlgorithm());
return gBinaryHashMap == EMPTY_G_BINARY_HASHMAP
? null
: new DataPageKMapImpl<>(gBinaryHashMap,
this.pageSerdeFlink2Key.getKey2Serde(),
this.pageSerdeFlink2Key.getValueSerde(),
this.pageSerdeFlink2Key.getMapValueTypeSerializer());
}
@Override
public Map> get(K key) {
final PageIndexContext pageIndexContext = pageIndex.getPageIndexContext(key, false);
final LogicChainedPage logicPageID = pageIndexContext.getPageID();
if (isNullPage(logicPageID)) {
return null;
}
Map> finalResult = null;
int curIndex = logicPageID.getCurrentPageChainIndex();
List binaryValueReversedOrderList = new ArrayList<>();
Map fetchedDataPageMap = new HashMap<>();
while (curIndex >= 0 && gContext.isDBNormal()) {
DataPage dataPage = getDataPageAutoLoadIfNeed(logicPageID, curIndex, fetchedDataPageMap);
Preconditions.checkArgument(dataPage instanceof DataPageKMap, "Interal BUG, error page");
//no need thread safe
logicPageID.getPageAddress(curIndex).addRequestCount(1);
gRegionContext.getPageStoreStats().addPageRequestCount(1);
BinaryValue binaryValue = dataPage.getBinaryValue(key);
dataPage.delReferenceCount(ReleaseType.Normal);
if (binaryValue != null) {
if (binaryValue.getgValueType() == GValueType.Delete) {
//old value is useless
break;
} else {
binaryValueReversedOrderList.add(binaryValue);
if (binaryValue.getgValueType() == GValueType.PutMap) {
//old value is useless
break;
}
}
}
curIndex--;
}
if (!gContext.isDBNormal()) {
throw new GeminiRuntimeException("DB is in abnormal status.");
}
if (binaryValueReversedOrderList.size() == 0) {
finalResult = null;
} else {
finalResult = doCompactValueToPOJO(binaryValueReversedOrderList);
}
tryLaunchCompactionByRead(pageIndexContext, logicPageID, fetchedDataPageMap);
// NOTE: state will be filtered by the upper level
return finalResult;
}
@Override
public MV get(K key, MK mapKey) {
final PageIndexContext pageIndexContext = pageIndex.getPageIndexContext(key, false);
final LogicChainedPage logicPageID = pageIndexContext.getPageID();
if (isNullPage(logicPageID)) {
return null;
}
int curIndex = logicPageID.getCurrentPageChainIndex();
MV finalResult = null;
Map fetchedDataPageMap = new HashMap<>(curIndex);
while (curIndex >= 0 && gContext.isDBNormal()) {
DataPage dataPage = getDataPageAutoLoadIfNeed(logicPageID, curIndex, fetchedDataPageMap);
Preconditions.checkArgument(dataPage instanceof DataPageKMap, "Interal BUG, error page");
logicPageID.getPageAddress(curIndex).addRequestCount(1);
gRegionContext.getPageStoreStats().addPageRequestCount(1);
GSValue result = ((DataPageKMap) dataPage).get(key, mapKey);
dataPage.delReferenceCount(ReleaseType.Normal);
if (result != null) {
if (result.getValueType() == GValueType.Delete || gRegionContext.filterState(result.getSeqID())) {
break;
}
finalResult = result.getValue();
break;
}
curIndex--;
}
if (!gContext.isDBNormal()) {
throw new GeminiRuntimeException("DB is in abnormal status.");
}
tryLaunchCompactionByRead(pageIndexContext, logicPageID, fetchedDataPageMap);
return finalResult;
}
@Override
public void getAll(Map>>> container) {
// TODO
}
@Override
public boolean contains(K key, MK mapKey) {
//TODO to define the null.
return get(key, mapKey) != null;
}
@Override
public DataPage doCompactPage(
boolean isMajor, List canCompactPageListReversedOrder, long version, int logicPageId) {
if (canCompactPageListReversedOrder == null || canCompactPageListReversedOrder.size() == 0) {
throw new GeminiRuntimeException("Interal BUG");
}
return doCompactPageForStructureValue(isMajor, canCompactPageListReversedOrder, version, logicPageId);
}
@Override
BinaryValue doCompactValue(
List binaryValueList, boolean isMajor, long version, int logicPageId) {
return DataPageKMapImpl.doCompactionMapValue(binaryValueList,
pageSerdeFlink2Key.getKey2Serde(),
pageSerdeFlink2Key.getValueSerde(),
isMajor,
version,
logicPageId,
//value use default Allocator.
gContext.getSupervisor().getDefaultAllocator(),
gContext.getStateFilter(),
gRegionContext);
}
Map> doCompactValueToPOJO(List binaryValueReversedOrderList) {
Map binaryValueMap = DataPageKMapImpl.doCompactValueToBinaryMap(
binaryValueReversedOrderList,
pageSerdeFlink2Key.getKey2Serde());
Map> result = new HashMap<>(binaryValueMap.size());
for (Map.Entry entry : binaryValueMap.entrySet()) {
//For POJO, it results to client, so it don't need to include the Deleted value.
if (entry.getValue() == null || entry.getValue().getgValueType() == GValueType.Delete) {
continue;
}
result.put(getMKeyFromBinary(entry.getKey()), getMValueFromBinary(entry.getValue()));
}
return result;
}
protected GSValue getMValueFromBinary(BinaryValue binaryValue) {
if (binaryValue == null) {
return null;
}
if (binaryValue.getgValueType() == GValueType.Delete) {
return new GSValue<>(null, GValueType.Delete, binaryValue.getSeqID());
}
try {
DataInputView byteBufferDataInputView = new ByteBufferDataInputView(binaryValue.getBb(),
binaryValue.getValueOffset(),
binaryValue.getValueLen());
MV value = pageSerdeFlink2Key.getValueSerde().deserialize(byteBufferDataInputView);
return new GSValue<>(value, binaryValue.getgValueType(), binaryValue.getSeqID());
} catch (Exception e) {
throw new GeminiRuntimeException("Exception: " + e.getMessage(), e);
}
}
protected MK getMKeyFromBinary(BinaryKey key) {
if (key == null) {
throw new GeminiRuntimeException("key can't be null");
}
try {
DataInputView byteBufferDataInputView = new ByteBufferDataInputView(key.getBb(),
key.getKeyOffset(),
key.getKeyLen());
return pageSerdeFlink2Key.getKey2Serde().deserialize(byteBufferDataInputView);
} catch (Exception e) {
throw new GeminiRuntimeException("Exception: " + e.getMessage(), e);
}
}
@Override
protected DataPage doBuildDataPageFromGBinaryMap(
boolean isMajor,
long version,
int logicPageId,
TypeSerializer keySerde,
Map finalCompactedMap,
long compactionCount) {
GBinaryHashMap gBinaryHashMap = GBinaryHashMap.ofBinaryList(DataPageType.KHashMap,
isMajor,
version,
logicPageId,
this.pageSerdeFlink.getKeySerde(),
gContext.getSupervisor().getAllocator(),
finalCompactedMap,
compactionCount,
gContext.getStateFilter(),
gRegionContext);
//TODO delReference finalCompactedMap.
return gBinaryHashMap == EMPTY_G_BINARY_HASHMAP
? null
: new DataPageKMapImpl<>(gBinaryHashMap,
this.pageSerdeFlink2Key.getKey2Serde(),
this.pageSerdeFlink2Key.getValueSerde(),
this.pageSerdeFlink2Key.getMapValueTypeSerializer());
}
}