Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.state.gemini.engine.hashtable;
import org.apache.flink.api.common.typeutils.base.ByteSerializer;
import org.apache.flink.runtime.state.KeyGroupRangeAssignment;
import org.apache.flink.runtime.state.gemini.engine.GRegion;
import org.apache.flink.runtime.state.gemini.engine.GRegionIDImpl;
import org.apache.flink.runtime.state.gemini.engine.GTableDescription;
import org.apache.flink.runtime.state.gemini.engine.GeminiPKey2;
import org.apache.flink.runtime.state.gemini.engine.dbms.GContext;
import org.apache.flink.runtime.state.gemini.engine.page.PKey2Serializer;
import org.apache.flink.runtime.state.gemini.engine.page.PageSerdeFlink2KeyImpl;
import org.apache.flink.runtime.state.gemini.engine.page.PageSerdeFlinkListImpl;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import static org.apache.flink.runtime.state.gemini.engine.GRegionID.G_REGION_DATA;
import static org.apache.flink.runtime.state.gemini.engine.GRegionID.G_REGION_INDEX_1;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* GeminiTableSubKeyValueImpl.
* K is key
* N is namespace
* E is value in a list
*/
public class GTableSubKeyedListImpl implements GTableWithPKey2 {
private final GRegionKListImpl, E>[] regions;
private final KMapTableDescription indexDescription;
private GRegionKMapImpl[] keyIndexRegions;
// 1 should rebuild the index after restored? should we use other state here?
// 2 every K may have one or more N, and every N may corresponds to one or more K, should we use GRegionKMapImple here?
// we do not support find keys by namespace here.
private final int startGroup;
private final int numberGroups;
private final int endGroup;
private final int maxParallelism;
private final GTableDescription description;
private final GContext gContext;
public GTableSubKeyedListImpl(
GTableDescription description,
int startGroup,
int numberGroups,
int maxParallelism,
GContext context) {
checkArgument(startGroup >= 0);
checkArgument(numberGroups > 0);
checkArgument(maxParallelism > 0);
checkArgument(startGroup < maxParallelism && startGroup + numberGroups <= maxParallelism);
checkArgument(description != null && description.getPageSerde() instanceof PageSerdeFlinkListImpl);
checkNotNull(context);
this.startGroup = startGroup;
this.numberGroups = numberGroups;
this.endGroup = startGroup + numberGroups;
this.maxParallelism = maxParallelism;
this.description = description;
this.gContext = context;
this.regions = new GRegionKListImpl[numberGroups];
this.keyIndexRegions = new GRegionKMapImpl[numberGroups];
PageSerdeFlinkListImpl, E> pageSerde = (PageSerdeFlinkListImpl, E>) description.getPageSerde();
PKey2Serializer pKey2Serializer = (PKey2Serializer) pageSerde.getKeySerde();
PageSerdeFlink2KeyImpl indexPageSerde = new PageSerdeFlink2KeyImpl<>(
pKey2Serializer.getFirstSerializer(),
pKey2Serializer.getSecondSerializer(),
ByteSerializer.INSTANCE,
null,
context.getGConfiguration().isChecksumEnable());
this.indexDescription = new KMapTableDescription<>(description.getTableName(),
startGroup,
numberGroups,
maxParallelism,
indexPageSerde);
}
@Override
public GTableDescription getTableDescription() {
return description;
}
@Override
public Iterator getSecondaryKeyByFirstKey(K key) {
int group = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);
int offset = group - startGroup;
GRegionKMapImpl region = keyIndexRegions[offset];
if (region == null) {
return Collections.emptyIterator();
}
GRegionKListImpl, E> dataRegion = regions[offset];
Map index = region.get(key);
if (index == null) {
return Collections.emptyIterator();
} else {
return region.get(key).keySet()
.stream()
.filter(x -> dataRegion.contains(getGeminiPKey2(key, x)))
.iterator();
}
}
@Override
public GeminiPKey2 getGeminiPKey2(K key1, N key2) {
return new GeminiPKey2<>(key1, key2);
}
@Override
public GRegionKListImpl, E> getRegion(GeminiPKey2 key) {
// we use the first key (K) as the input of assignToKeyGroup, so the group is same as the DataStream#KeyBy()
int group = KeyGroupRangeAssignment.assignToKeyGroup(key.getFirstKey(), maxParallelism);
int offset = group - startGroup;
GRegionKListImpl region = regions[offset];
if (region == null) {
region = (GRegionKListImpl) description.createRegion(gContext,
this,
new GRegionIDImpl(G_REGION_DATA, group));
regions[offset] = region;
}
return region;
}
public GRegionKMapImpl getIndexRegion(GeminiPKey2 key) {
int group = KeyGroupRangeAssignment.assignToKeyGroup(key.getFirstKey(), maxParallelism);
int offset = group - startGroup;
GRegionKMapImpl region = keyIndexRegions[offset];
if (region == null) {
region = (GRegionKMapImpl) indexDescription.createRegion(gContext,
this,
new GRegionIDImpl(G_REGION_INDEX_1, group));
keyIndexRegions[offset] = region;
}
return region;
}
@Override
public Iterator regionIterator() {
return new Iterator() {
private int currentIndex = 0;
private void advance() {
currentIndex++;
while (currentIndex < regions.length && regions[currentIndex] == null) {
currentIndex++;
}
}
@Override
public boolean hasNext() {
if (currentIndex < regions.length && regions[currentIndex] != null) {
return true;
}
advance();
return currentIndex < regions.length;
}
@Override
public GRegion next() {
return regions[currentIndex++];
}
};
}
@Override
public Iterator indexRegionIterator() {
return new Iterator() {
private int currentIndex = 0;
private void advance() {
currentIndex++;
while (currentIndex < keyIndexRegions.length && keyIndexRegions[currentIndex] == null) {
currentIndex++;
}
}
@Override
public boolean hasNext() {
if (currentIndex < keyIndexRegions.length && keyIndexRegions[currentIndex] != null) {
return true;
}
advance();
return currentIndex < keyIndexRegions.length;
}
@Override
public GRegion next() {
return keyIndexRegions[currentIndex++];
}
};
}
@Override
public void setRegion(int idx, GRegion region) {
checkArgument(idx >= startGroup && idx < endGroup);
regions[idx - startGroup] = (GRegionKListImpl) region;
}
@Override
public void setIndexRegion(int idx, GRegion indexRegion) {
checkArgument(idx >= startGroup && idx < endGroup);
keyIndexRegions[idx - startGroup] = (GRegionKMapImpl) indexRegion;
}
@Override
public GTableDescription getIndexDescription() {
return indexDescription;
}
}