All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.aceql.jdbc.commons.AceQLBlob Maven / Gradle / Ivy

Go to download

The AceQL Java Client JDBC Driver allows to wrap the AceQL HTTP APIs and eliminates the tedious works of handling communications errors and parsing JSON results. Android and Java Desktop application developers can access remote SQL databases and/or SQL databases in the cloud by simply including standard JDBC calls in their code, just like they would for a local database.

The newest version!
/*
 * This file is part of AceQL JDBC Driver.
 * AceQL JDBC Driver: Remote JDBC access over HTTP with AceQL HTTP.
 * Copyright (c) 2023,  KawanSoft SAS
 * (http://www.kawansoft.com). All rights reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.aceql.jdbc.commons;

import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.sql.Blob;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;

import org.apache.commons.lang3.ArrayUtils;

import com.aceql.jdbc.commons.main.util.framework.FrameworkFileUtil;
import com.aceql.jdbc.commons.main.util.framework.Tag;
import com.aceql.jdbc.commons.main.util.framework.UniqueIDBuilder;

/**
 * Blob implementation. Allows to use {@link java.sql.Blob} objects for Blobs
 * uploading {@code (INSERT/UPDATE)} and downloading {@code (SELECT)}.
*
* {@code INSERT} example: *
*
 * File file = new File("/my/file/path");
 *
 * String sql = "insert into orderlog values (?, ?, ?, ?, ?, ?, ?, ?, ?)";
 * PreparedStatement preparedStatement = connection.prepareStatement(sql);
 *
 * int i = 1;
 * preparedStatement.setInt(i++, customerId);
 * // Etc.
 *
 * // Create the Blob instance using the current Connection:
 * Blob blob = connection.createBlob();
 *
 * // Syntax using a byte array:
 * byte[] bytes = Files.readAllBytes(file.toPath());
 * blob.setBytes(1, bytes);
 * preparedStatement.setBlob(i++, blob);
 *
 * // Syntax using a stream:
 * OutputStream out = blob.setBinaryStream(1);
 * Files.copy(file.toPath(), out);
 * preparedStatement.setBlob(i++, blob);
 *
 * // Etc.
 * preparedStatement.executeUpdate();
 * preparedStatement.close();
 * 
* * {@code SELECT} example using bytes: *
*
 * String sql = "select jpeg_image from orderlog where customer_id = ? and item_id = ?";
 * PreparedStatement preparedStatement = connection.prepareStatement(sql);
 * preparedStatement.setInt(1, 1);
 * preparedStatement.setInt(2, 1);
 *
 * ResultSet rs = preparedStatement.executeQuery();
 *
 * if (rs.next()) {
 *     Blob blob = rs.getBlob(1);
 *
 *     // Get the Blob bytes in memory and store them into a file
 *     byte[] bytes = blob.getBytes(1, (int) blob.length());
 *
 *     File file = new File("/my/file/path");
 *     try (InputStream in = new ByteArrayInputStream(bytes);) {
 * 	Files.copy(in, file.toPath());
 *     }
 * }
 * preparedStatement.close();
 * rs.close();
 * 
* * {@code SELECT} example using streams : *
*
 * String sql = "select jpeg_image from orderlog where customer_id = ? and item_id = ?";
 * PreparedStatement preparedStatement = connection.prepareStatement(sql);
 * preparedStatement.setInt(1, 1);
 * preparedStatement.setInt(2, 1);
 *
 * ResultSet rs = preparedStatement.executeQuery();
 *
 * if (rs.next()) {
 *     Blob blob = rs.getBlob(1);
 *
 *     File file = new File("/my/file/path");
 *     // Get the stream from the Blob and copy into a file
 *     try (InputStream in = blob.getBinaryStream()) {
 * 	Files.copy(in, file.toPath());
 *     }
 * }
 *
 * preparedStatement.close();
 * rs.close();
 * 
* Note that Blobs can be updated or read without using a {@link java.sql.Blob} instance. *
*
* {@code INSERT/UPDATE} can be done using: *
    *
  • {@link PreparedStatement#setBytes(int, byte[])}.
  • *
  • {@link PreparedStatement#setBinaryStream(int, InputStream)}.
  • *
*
* {@code SELECT} can be done using: *
    *
  • {@link ResultSet#getBytes(int)} or {@link ResultSet#getBytes(String)} using bytes.
  • *
  • {@link ResultSet#getBinaryStream(int)} or {@link ResultSet#getBinaryStream(String)} using streams.
  • *
* @since 6.0 * @author Nicolas de Pomereu * */ public class AceQLBlob implements Blob { private byte[] bytes; private File file; private InputStream inputStream; private OutputStream outputStream; /** * Protected constructor to be used only for upload by * {@code Connection#createBlob()}. {@code AceQLConnection.createBlob()} */ AceQLBlob() { this.file = createBlobFile(); } /** * To be used with ResultSet. Package protected constructor. * @param inputStream the input stream to use to build the Blob */ AceQLBlob(InputStream inputStream) { this.inputStream = inputStream; this.bytes = null; } /** * To be used with ResultSet. (bytes usage) * @param bytes the byte array to use to build the Blob */ AceQLBlob(byte[] bytes) { this.bytes = bytes; this.inputStream = null; } @Override public long length() throws SQLException { // if (editionType.equals(EditionType.Professional)) { // return -1; // } else { // return bytes == null ? 0 : bytes.length; // } if (bytes != null ) { return bytes.length; } return -1; } @Override public byte[] getBytes(long pos, int length) throws SQLException { return getBytesStatic(pos, length, bytes); } @Override public InputStream getBinaryStream() throws SQLException { // if (editionType.equals(EditionType.Professional)) { // return inputStream; // } else { // if (bytes == null) { // return null; // } // // ByteArrayInputStream arrayInputStream = new ByteArrayInputStream(bytes); // return arrayInputStream; // } if (bytes != null) { ByteArrayInputStream arrayInputStream = new ByteArrayInputStream(bytes); return arrayInputStream; } return inputStream; } /** * This method is not yet implemented in the AceQL JDBC Driver. */ @Override public long position(byte[] pattern, long start) throws SQLException { throw new UnsupportedOperationException(Tag.METHOD_NOT_YET_IMPLEMENTED); } /** * This method is not yet implemented in the AceQL JDBC Driver. */ @Override public long position(Blob pattern, long start) throws SQLException { throw new UnsupportedOperationException(Tag.METHOD_NOT_YET_IMPLEMENTED); } @Override public int setBytes(long pos, byte[] bytes) throws SQLException { if (pos != 1) { throw new SQLException(Tag.PRODUCT + " \"pos\" value can be 1 only."); } this.bytes = bytes; return bytes == null ? 0 : bytes.length; } /** * This method is not yet implemented in the AceQL JDBC Driver. */ @Override public int setBytes(long pos, byte[] bytes, int offset, int len) throws SQLException { throw new UnsupportedOperationException(Tag.METHOD_NOT_YET_IMPLEMENTED); } @Override public OutputStream setBinaryStream(long pos) throws SQLException { if (pos != 1) { throw new SQLException(Tag.PRODUCT + " \"pos\" value can be 1 only."); } if (file == null) { throw new SQLException(Tag.PRODUCT + " Can not call setBinaryStream() when reading a Blob file."); } try { outputStream = new BufferedOutputStream(new FileOutputStream(file)); return outputStream; } catch (FileNotFoundException e) { throw new SQLException(e); } } /** * This method is not yet implemented in the AceQL JDBC Driver. */ @Override public void truncate(long len) throws SQLException { throw new UnsupportedOperationException(Tag.METHOD_NOT_YET_IMPLEMENTED); } @Override public void free() throws SQLException { /* if (editionType.equals(EditionType.Professional) && inputStream != null) { try { inputStream.close(); } catch (IOException e) { throw new SQLException(e); } } */ if (inputStream != null) { try { inputStream.close(); } catch (IOException e) { throw new SQLException(e); } } if (file != null) { file.delete(); } } @Override public InputStream getBinaryStream(long pos, long length) throws SQLException { if (pos != 1) { throw new SQLException(Tag.PRODUCT + " \"pos\" value can be 1 only."); } if (length != 0) { throw new SQLException(Tag.PRODUCT + " \"length\" value can be 0 only (meaning all bytes are returned)."); } return getBinaryStream(); } /** * @return the file associated with the {@code OutputStream} created by * {@link AceQLBlob#setBinaryStream(long)}. */ File getFile() { try { outputStream.close(); } catch (Exception e) { System.err.println( Tag.PRODUCT + " Warning: error when closing Blob OutputStream: " + e.getLocalizedMessage()); } return file; } /** * getBytes wrapper, for easy external tests. * * @param pos * @param length * @return */ static byte[] getBytesStatic(long pos, int length, byte[] bytes) { List bytesList = new ArrayList<>(); for (int i = (int) pos - 1; i < length; i++) { bytesList.add(bytes[i]); } Byte[] bytesToReturnArray = bytesList.toArray(new Byte[bytesList.size()]); byte[] bytesToReturn = ArrayUtils.toPrimitive(bytesToReturnArray); return bytesToReturn; } private static File createBlobFile() { File file = new File(FrameworkFileUtil.getKawansoftTempDir() + File.separator + "blob-file-for-server-" + UniqueIDBuilder.getUniqueId() + ".txt"); return file; } }




© 2015 - 2025 Weber Informatics LLC | Privacy Policy