Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.serde2.avro;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.Utf8;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
import org.apache.hadoop.io.Writable;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class AvroDeserializer {
private static final Log LOG = LogFactory.getLog(AvroDeserializer.class);
/**
* When encountering a record with an older schema than the one we're trying
* to read, it is necessary to re-encode with a reader against the newer schema.
* Because Hive doesn't provide a way to pass extra information to the
* inputformat, we're unable to provide the newer schema when we have it and it
* would be most useful - when the inputformat is reading the file.
*
* This is a slow process, so we try to cache as many of the objects as possible.
*/
static class SchemaReEncoder {
private final ByteArrayOutputStream baos = new ByteArrayOutputStream();
private final GenericDatumWriter gdw = new GenericDatumWriter();
private BinaryDecoder binaryDecoder = null;
private InstanceCache> gdrCache
= new InstanceCache>() {
@Override
protected GenericDatumReader makeInstance(ReaderWriterSchemaPair hv) {
return new GenericDatumReader(hv.getWriter(), hv.getReader());
}
};
public GenericRecord reencode(GenericRecord r, Schema readerSchema)
throws AvroSerdeException {
baos.reset();
BinaryEncoder be = EncoderFactory.get().directBinaryEncoder(baos, null);
gdw.setSchema(r.getSchema());
try {
gdw.write(r, be);
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
binaryDecoder = DecoderFactory.defaultFactory().createBinaryDecoder(bais, binaryDecoder);
ReaderWriterSchemaPair pair = new ReaderWriterSchemaPair(r.getSchema(), readerSchema);
GenericDatumReader gdr = gdrCache.retrieve(pair);
return gdr.read(r, binaryDecoder);
} catch (IOException e) {
throw new AvroSerdeException("Exception trying to re-encode record to new schema", e);
}
}
}
private List