com.netflix.hollow.core.write.HollowWriteStateEnginePrimaryKeyHasher Maven / Gradle / Ivy
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.memory.ByteArrayOrdinalMap;
import com.netflix.hollow.core.memory.SegmentedByteArray;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.memory.encoding.ZigZag;
import com.netflix.hollow.core.schema.HollowObjectSchema;
class HollowWriteStateEnginePrimaryKeyHasher {
private final HollowObjectTypeWriteState typeStates[][];
private final int[][] fieldPathIndexes;
public HollowWriteStateEnginePrimaryKeyHasher(PrimaryKey primaryKey, HollowWriteStateEngine writeEngine) {
HollowWriteStateEngine stateEngine = writeEngine;
HollowObjectTypeWriteState rootTypeWriteState = (HollowObjectTypeWriteState)writeEngine.getTypeState(primaryKey.getType());
this.fieldPathIndexes = new int[primaryKey.numFields()][];
this.typeStates = new HollowObjectTypeWriteState[primaryKey.numFields()][];
for(int i=0;i>> 32));
case REFERENCE:
return VarInt.readVInt(data, offset);
case BYTES:
int byteLen = VarInt.readVInt(data, offset);
offset += VarInt.sizeOfVInt(byteLen);
return HashCodes.hashCode(data, offset, byteLen);
case STRING:
int strByteLen = VarInt.readVInt(data, offset);
offset += VarInt.sizeOfVInt(strByteLen);
return getNaturalStringHashCode(data, offset, strByteLen);
case BOOLEAN:
if(VarInt.readVNull(data, offset))
return 0;
return data.get(offset) == 1 ? 1231 : 1237;
case DOUBLE:
long longBits = data.readLongBits(offset);
return (int)(longBits ^ (longBits >>> 32));
case FLOAT:
return data.readIntBits(offset);
default:
throw new IllegalArgumentException("Schema "+schema.getName()+" has unknown field type for field " + schema.getFieldName(fieldIdx) + ": " + schema.getFieldType(fieldIdx));
}
}
private int getNaturalStringHashCode(SegmentedByteArray data, long offset, int len) {
int hashCode = 0;
long endOffset = len + offset;
while(offset < endOffset) {
int ch = VarInt.readVInt(data, offset);
hashCode = hashCode * 31 + ch;
offset += VarInt.sizeOfVInt(ch);
}
return hashCode;
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy