com.spotify.dbeam.avro.JdbcAvroRecordConverter Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of dbeam-core Show documentation
Show all versions of dbeam-core Show documentation
Top level DBeam core implementation
/*-
* -\-\-
* DBeam Core
* --
* Copyright (C) 2016 - 2019 Spotify AB
* --
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* -/-/-
*/
package com.spotify.dbeam.avro;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.EncoderFactory;
public class JdbcAvroRecordConverter {
private final JdbcAvroRecord.SqlFunction[] mappings;
private final int columnCount;
private final ResultSet resultSet;
private final EncoderFactory encoderFactory = EncoderFactory.get();
public JdbcAvroRecordConverter(
final JdbcAvroRecord.SqlFunction[] mappings,
final int columnCount,
final ResultSet resultSet) {
this.mappings = mappings;
this.columnCount = columnCount;
this.resultSet = resultSet;
}
public static JdbcAvroRecordConverter create(final ResultSet resultSet) throws SQLException {
return new JdbcAvroRecordConverter(
computeAllMappings(resultSet), resultSet.getMetaData().getColumnCount(), resultSet);
}
@SuppressWarnings("unchecked")
static JdbcAvroRecord.SqlFunction[] computeAllMappings(
final ResultSet resultSet) throws SQLException {
final ResultSetMetaData meta = resultSet.getMetaData();
final int columnCount = meta.getColumnCount();
final JdbcAvroRecord.SqlFunction[] mappings =
new JdbcAvroRecord.SqlFunction[columnCount + 1];
for (int i = 1; i <= columnCount; i++) {
mappings[i] = JdbcAvroRecord.computeMapping(meta, i);
}
return mappings;
}
private BinaryEncoder binaryEncoder = null;
public static class MyByteArrayOutputStream extends ByteArrayOutputStream {
MyByteArrayOutputStream(int size) {
super(size);
}
// provide access to internal buffer, avoiding copy
byte[] getBufffer() {
return buf;
}
}
/**
* Read data from a single row of result set and and encode into a Avro record as byte array.
* Directly reading and encoding has the benefit of less need for copying bytes between objects.
*
* @return a ByteBuffer with binary encoded Avro record
* @throws SQLException in case reading row from JDBC fails
* @throws IOException in case binary encoding fails
*/
public ByteBuffer convertResultSetIntoAvroBytes() throws SQLException, IOException {
final MyByteArrayOutputStream out = new MyByteArrayOutputStream(columnCount * 64);
binaryEncoder = encoderFactory.directBinaryEncoder(out, binaryEncoder);
for (int i = 1; i <= columnCount; i++) {
final Object value = mappings[i].apply(resultSet);
if (value == null || resultSet.wasNull()) {
binaryEncoder.writeIndex(0);
binaryEncoder.writeNull();
} else {
binaryEncoder.writeIndex(1);
if (value instanceof String) {
binaryEncoder.writeString((String) value);
} else if (value instanceof Long) {
binaryEncoder.writeLong((Long) value);
} else if (value instanceof Integer) {
binaryEncoder.writeInt((Integer) value);
} else if (value instanceof Boolean) {
binaryEncoder.writeBoolean((Boolean) value);
} else if (value instanceof ByteBuffer) {
binaryEncoder.writeBytes((ByteBuffer) value);
} else if (value instanceof Double) {
binaryEncoder.writeDouble((Double) value);
} else if (value instanceof Float) {
binaryEncoder.writeFloat((Float) value);
}
}
}
binaryEncoder.flush();
return ByteBuffer.wrap(out.getBufffer(), 0, out.size());
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy