Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.parquet.avro;
import java.lang.reflect.Constructor;
import java.util.HashMap;
import java.util.Map;
import org.apache.avro.Conversion;
import org.apache.avro.LogicalType;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.specific.SpecificData;
import org.apache.parquet.Preconditions;
import org.apache.parquet.io.InvalidRecordException;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.io.api.Converter;
import org.apache.parquet.io.api.GroupConverter;
import org.apache.parquet.io.api.PrimitiveConverter;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.Type;
/**
* This {@link Converter} class materializes records as Avro
* {@link IndexedRecord} instances. This is replaced by
* {@link AvroRecordConverter}, but is included for backward-compatibility.
*
* @param a subclass of Avro's IndexedRecord
*/
class AvroIndexedRecordConverter extends GroupConverter {
private final ParentValueContainer parent;
protected T currentRecord;
private final Converter[] converters;
private final Schema avroSchema;
private final Class extends IndexedRecord> specificClass;
private final GenericData model;
private final Map recordDefaults = new HashMap();
public AvroIndexedRecordConverter(MessageType parquetSchema, Schema avroSchema) {
this(parquetSchema, avroSchema, SpecificData.get());
}
public AvroIndexedRecordConverter(MessageType parquetSchema, Schema avroSchema,
GenericData baseModel) {
this(null, parquetSchema, avroSchema, baseModel);
}
public AvroIndexedRecordConverter(ParentValueContainer parent, GroupType
parquetSchema, Schema avroSchema) {
this(parent, parquetSchema, avroSchema, SpecificData.get());
}
public AvroIndexedRecordConverter(ParentValueContainer parent, GroupType
parquetSchema, Schema avroSchema, GenericData baseModel) {
this.parent = parent;
this.avroSchema = avroSchema;
int schemaSize = parquetSchema.getFieldCount();
this.converters = new Converter[schemaSize];
this.specificClass = getDatumClass(baseModel, avroSchema);
this.model = this.specificClass == null ? GenericData.get() : baseModel;
Map avroFieldIndexes = new HashMap();
int avroFieldIndex = 0;
for (Schema.Field field: avroSchema.getFields()) {
avroFieldIndexes.put(field.name(), avroFieldIndex++);
}
int parquetFieldIndex = 0;
for (Type parquetField: parquetSchema.getFields()) {
Schema.Field avroField = getAvroField(parquetField.getName());
Schema nonNullSchema = AvroSchemaConverter.getNonNull(avroField.schema());
final int finalAvroIndex = avroFieldIndexes.remove(avroField.name());
converters[parquetFieldIndex++] = newConverter(nonNullSchema, parquetField, model, new ParentValueContainer() {
@Override
public void add(Object value) {
AvroIndexedRecordConverter.this.set(finalAvroIndex, value);
}
});
}
// store defaults for any new Avro fields from avroSchema that are not in the writer schema (parquetSchema)
for (String fieldName : avroFieldIndexes.keySet()) {
Schema.Field field = avroSchema.getField(fieldName);
if (field.schema().getType() == Schema.Type.NULL) {
continue; // skip null since Parquet does not write nulls
}
if (field.defaultVal() == null || model.getDefaultValue(field) == null) {
continue; // field has no default
}
recordDefaults.put(field, model.getDefaultValue(field));
}
}
@SuppressWarnings("unchecked")
private static Class getDatumClass(GenericData model, Schema schema) {
if (model.getConversionFor(schema.getLogicalType()) != null) {
// use generic classes to pass data to conversions
return null;
}
if (model instanceof SpecificData) {
return (Class) ((SpecificData) model).getClass(schema);
}
return null;
}
private Schema.Field getAvroField(String parquetFieldName) {
Schema.Field avroField = avroSchema.getField(parquetFieldName);
for (Schema.Field f : avroSchema.getFields()) {
if (f.aliases().contains(parquetFieldName)) {
return f;
}
}
if (avroField == null) {
throw new InvalidRecordException(String.format(
"Parquet/Avro schema mismatch. Avro field '%s' not found.",
parquetFieldName));
}
return avroField;
}
private static Converter newConverter(Schema schema, Type type,
GenericData model, ParentValueContainer setter) {
LogicalType logicalType = schema.getLogicalType();
// the expected type is always null because it is determined by the parent
// datum class, which never helps for generic. when logical types are added
// to specific, this should pass the expected type here.
Conversion> conversion = model.getConversionFor(logicalType);
ParentValueContainer parent = ParentValueContainer
.getConversionContainer(setter, conversion, schema);
switch (schema.getType()) {
case ARRAY:
return new AvroArrayConverter(parent, type.asGroupType(), schema, model);
case BOOLEAN:
return new AvroConverters.FieldBooleanConverter(parent);
case BYTES:
return new AvroConverters.FieldByteBufferConverter(parent);
case DOUBLE:
return new AvroConverters.FieldDoubleConverter(parent);
case ENUM:
return new FieldEnumConverter(parent, schema, model);
case FIXED:
return new FieldFixedConverter(parent, schema, model);
case FLOAT:
return new AvroConverters.FieldFloatConverter(parent);
case INT:
return new AvroConverters.FieldIntegerConverter(parent);
case LONG:
return new AvroConverters.FieldLongConverter(parent);
case MAP:
return new MapConverter(parent, type.asGroupType(), schema, model);
case RECORD:
return new AvroIndexedRecordConverter(parent, type.asGroupType(), schema, model);
case STRING:
return new AvroConverters.FieldStringConverter(parent);
case UNION:
return new AvroUnionConverter(parent, type, schema, model);
case NULL: // fall through
default:
throw new UnsupportedOperationException(String.format("Cannot convert Avro type: %s" +
" (Parquet type: %s) ", schema, type));
}
}
private void set(int index, Object value) {
this.currentRecord.put(index, value);
}
@Override
public Converter getConverter(int fieldIndex) {
return converters[fieldIndex];
}
@Override
@SuppressWarnings("unchecked")
public void start() {
// Should do the right thing whether it is generic or specific
this.currentRecord = (T) ((this.specificClass == null) ?
new GenericData.Record(avroSchema) :
SpecificData.newInstance(specificClass, avroSchema));
}
@Override
public void end() {
fillInDefaults();
if (parent != null) {
parent.add(currentRecord);
}
}
private void fillInDefaults() {
for (Map.Entry entry : recordDefaults.entrySet()) {
Schema.Field f = entry.getKey();
// replace following with model.deepCopy once AVRO-1455 is being used
Object defaultValue = deepCopy(f.schema(), entry.getValue());
this.currentRecord.put(f.pos(), defaultValue);
}
}
private Object deepCopy(Schema schema, Object value) {
switch (schema.getType()) {
case BOOLEAN:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
return value;
default:
return model.deepCopy(schema, value);
}
}
T getCurrentRecord() {
return currentRecord;
}
static final class FieldEnumConverter extends PrimitiveConverter {
private final ParentValueContainer parent;
private final Class extends Enum> enumClass;
public FieldEnumConverter(ParentValueContainer parent, Schema enumSchema,
GenericData model) {
this.parent = parent;
this.enumClass = model instanceof SpecificData ?
((SpecificData) model).getClass(enumSchema) :
SpecificData.get().getClass(enumSchema);
}
@Override
final public void addBinary(Binary value) {
Object enumValue = value.toStringUsingUTF8();
if (enumClass != null) {
enumValue = (Enum.valueOf(enumClass,(String)enumValue));
}
parent.add(enumValue);
}
}
static final class FieldFixedConverter extends PrimitiveConverter {
private final ParentValueContainer parent;
private final Schema avroSchema;
private final Class extends GenericData.Fixed> fixedClass;
private final Constructor fixedClassCtor;
public FieldFixedConverter(ParentValueContainer parent, Schema avroSchema,
GenericData model) {
this.parent = parent;
this.avroSchema = avroSchema;
this.fixedClass = model instanceof SpecificData ?
((SpecificData) model).getClass(avroSchema) :
SpecificData.get().getClass(avroSchema);
if (fixedClass != null) {
try {
this.fixedClassCtor =
fixedClass.getConstructor(new Class[] { byte[].class });
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
this.fixedClassCtor = null;
}
}
@Override
final public void addBinary(Binary value) {
if (fixedClass == null) {
parent.add(new GenericData.Fixed(avroSchema, value.getBytes()));
} else {
if (fixedClassCtor == null) {
throw new IllegalArgumentException(
"fixedClass specified but fixedClassCtor is null.");
}
try {
Object fixed = fixedClassCtor.newInstance(value.getBytes());
parent.add(fixed);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
/**
* Converter for a list.
*
*
* optional group the_list (LIST) { <-- this layer
* repeated group array {
* optional (type) element;
* }
* }
*
*
* This class also implements LIST element backward-compatibility rules.
*/
static final class AvroArrayConverter extends GroupConverter {
private final ParentValueContainer parent;
private final Schema avroSchema;
private final Converter converter;
private GenericArray