org.apache.hadoop.io.Writable Maven / Gradle / Ivy
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// ================================================================================================
// !!! NOTICE !!!
//
// This interface has been directly copied from the Apache Hadoop project.
// It has been added to this project to allow compiling against the type "Writable"
// without adding the heavyweight Hadoop dependency. This keeps the project dependencies
// lightweight.
//
// At runtime, the JVM will load either this interface, or the interface from a Hadoop jar,
// if present. In both cases, the dynamic class loading, linking, and method lookup will
// allow the types to interoperate as long as package name, class name, and method signature
// of this interface are kept strictly in sync with the version packaged with Hadoop.
//
// This is a core interface of the Hadoop project and has been stable across all releases.
//
// ================================================================================================
package org.apache.hadoop.io;
import java.io.DataOutput;
import java.io.DataInput;
import java.io.IOException;
/**
* A serializable object which implements a simple, efficient, serialization
* protocol, based on {@link DataInput} and {@link DataOutput}.
*
* Any key
or value
type in the Hadoop Map-Reduce
* framework implements this interface.
*
* Implementations typically implement a static read(DataInput)
* method which constructs a new instance, calls {@link #readFields(DataInput)}
* and returns the instance.
*
* Example:
*
* public class MyWritable implements Writable {
* // Some data
* private int counter;
* private long timestamp;
*
* // Default constructor to allow (de)serialization
* MyWritable() { }
*
* public void write(DataOutput out) throws IOException {
* out.writeInt(counter);
* out.writeLong(timestamp);
* }
*
* public void readFields(DataInput in) throws IOException {
* counter = in.readInt();
* timestamp = in.readLong();
* }
*
* public static MyWritable read(DataInput in) throws IOException {
* MyWritable w = new MyWritable();
* w.readFields(in);
* return w;
* }
* }
*
*/
public interface Writable {
/**
* Serialize the fields of this object to out
.
*
* @param out
* DataOuput
to serialize this object into.
* @throws IOException
*/
void write(DataOutput out) throws IOException;
/**
* Deserialize the fields of this object from in
.
*
*
* For efficiency, implementations should attempt to re-use storage in the
* existing object where possible.
*
*
* @param in
* DataInput
to deseriablize this object from.
* @throws IOException
*/
void readFields(DataInput in) throws IOException;
}