All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.pentaho.di.www.AddExportServlet Maven / Gradle / Ivy

The newest version!
/*! ******************************************************************************
 *
 * Pentaho Data Integration
 *
 * Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com
 *
 *******************************************************************************
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with
 * the License. You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 *
 ******************************************************************************/

package org.pentaho.di.www;

import org.apache.commons.vfs2.FileObject;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.logging.LoggingObjectType;
import org.pentaho.di.core.logging.SimpleLoggingObject;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobConfiguration;
import org.pentaho.di.job.JobExecutionConfiguration;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransConfiguration;
import org.pentaho.di.trans.TransExecutionConfiguration;
import org.pentaho.di.trans.TransMeta;
import org.w3c.dom.Document;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.util.Map;
import java.util.UUID;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

/**
 * This servlet allows you to transport an exported job or transformation over to the carte server as a zip file. It
 * ends up in a temporary file.
 *
 * The servlet returns the name of the file stored.
 *
 * @author matt
 *
 */
// has been replaced by RegisterPackageServlet
@Deprecated
public class AddExportServlet extends BaseHttpServlet implements CartePluginInterface {
  public static final String PARAMETER_LOAD = "load";
  public static final String PARAMETER_TYPE = "type";

  public static final String TYPE_JOB = "job";
  public static final String TYPE_TRANS = "trans";

  private static final long serialVersionUID = -6850701762586992604L;
  public static final String CONTEXT_PATH = "/kettle/addExport";

  public AddExportServlet() {
  }

  public AddExportServlet( JobMap jobMap, TransformationMap transformationMap ) {
    super( transformationMap, jobMap );
  }

  /**

    

/kettle/addExport

POST

Returns the list of users in the platform. This list is in an xml format as shown in the example response. Uploads and executes previously exported job or transformation. Uploads zip file containing job or transformation to be executed and executes it. Method relies on the input parameters to find the entity to be executed. The archive is transferred within request body. File url of the executed entity will be returned in the Response object or message describing error occurred. To determine if the call is successful rely on result parameter in response.

Example Request:

    POST /kettle/addExport/?type=job&load=dummy_job.kjb
    
Request body should contain zip file prepared for Carte execution.

Parameters

name description type
type The type of the entity to be executed either job or trans. query
load The name of the entity within archive to be executed. query

Response Body

element: (custom)
media types: application/xml

Response wraps file url of the entity that was executed or error stack trace if an error occurred. Response has result OK if there were no errors. Otherwise it returns ERROR.

Example Response:

    
    
      OK
      zip:file:///temp/export_ee2a67de-6a72-11e4-82c0-4701a2bac6a5.zip!dummy_job.kjb
      74cf4219-c881-4633-a71a-2ed16b7db7b8
    
    

Status Codes

code description
200 Request was processed and XML response is returned.
500 Internal server error occurs during request processing.
*/ public void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { if ( isJettyMode() && !request.getRequestURI().startsWith( CONTEXT_PATH ) ) { return; } if ( log.isDebug() ) { logDebug( "Addition of export requested" ); } PrintWriter out = response.getWriter(); InputStream in = request.getInputStream(); // read from the client if ( log.isDetailed() ) { logDetailed( "Encoding: " + request.getCharacterEncoding() ); } boolean isJob = TYPE_JOB.equalsIgnoreCase( request.getParameter( PARAMETER_TYPE ) ); String load = request.getParameter( PARAMETER_LOAD ); // the resource to load response.setContentType( "text/xml" ); out.print( XMLHandler.getXMLHeader() ); response.setStatus( HttpServletResponse.SC_OK ); OutputStream outputStream = null; try { FileObject tempFile = KettleVFS.createTempFile( "export", ".zip", System.getProperty( "java.io.tmpdir" ) ); outputStream = KettleVFS.getOutputStream( tempFile, false ); // Pass the input directly to a temporary file // // int size = 0; int c; while ( ( c = in.read() ) != -1 ) { outputStream.write( c ); // size++; } outputStream.flush(); outputStream.close(); outputStream = null; // don't close it twice String archiveUrl = tempFile.getName().toString(); String fileUrl = null; String carteObjectId = null; SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject( CONTEXT_PATH, LoggingObjectType.CARTE, null ); // Now open the top level resource... // if ( !Utils.isEmpty( load ) ) { fileUrl = "zip:" + archiveUrl + "!" + load; if ( isJob ) { // Open the job from inside the ZIP archive // KettleVFS.getFileObject( fileUrl ); JobMeta jobMeta = new JobMeta( fileUrl, null ); // never with a repository // Also read the execution configuration information // String configUrl = "zip:" + archiveUrl + "!" + Job.CONFIGURATION_IN_EXPORT_FILENAME; Document configDoc = XMLHandler.loadXMLFile( configUrl ); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration( XMLHandler.getSubNode( configDoc, JobExecutionConfiguration.XML_TAG ) ); carteObjectId = UUID.randomUUID().toString(); servletLoggingObject.setContainerObjectId( carteObjectId ); servletLoggingObject.setLogLevel( jobExecutionConfiguration.getLogLevel() ); Job job = new Job( null, jobMeta, servletLoggingObject ); // Do we need to expand the job when it's running? // Note: the plugin (Job and Trans) job entries need to call the delegation listeners in the parent job. // if ( jobExecutionConfiguration.isExpandingRemoteJob() ) { job.addDelegationListener( new CarteDelegationHandler( getTransformationMap(), getJobMap() ) ); } // store it all in the map... // getJobMap().addJob( job.getJobname(), carteObjectId, job, new JobConfiguration( jobMeta, jobExecutionConfiguration ) ); // Apply the execution configuration... // log.setLogLevel( jobExecutionConfiguration.getLogLevel() ); job.setArguments( jobExecutionConfiguration.getArgumentStrings() ); jobMeta.injectVariables( jobExecutionConfiguration.getVariables() ); // Also copy the parameters over... // Map params = jobExecutionConfiguration.getParams(); for ( String param : params.keySet() ) { String value = params.get( param ); jobMeta.setParameterValue( param, value ); } } else { // Open the transformation from inside the ZIP archive // TransMeta transMeta = new TransMeta( fileUrl ); // Also read the execution configuration information // String configUrl = "zip:" + archiveUrl + "!" + Trans.CONFIGURATION_IN_EXPORT_FILENAME; Document configDoc = XMLHandler.loadXMLFile( configUrl ); TransExecutionConfiguration executionConfiguration = new TransExecutionConfiguration( XMLHandler.getSubNode( configDoc, TransExecutionConfiguration.XML_TAG ) ); carteObjectId = UUID.randomUUID().toString(); servletLoggingObject.setContainerObjectId( carteObjectId ); servletLoggingObject.setLogLevel( executionConfiguration.getLogLevel() ); Trans trans = new Trans( transMeta, servletLoggingObject ); // store it all in the map... // getTransformationMap().addTransformation( trans.getName(), carteObjectId, trans, new TransConfiguration( transMeta, executionConfiguration ) ); } } else { fileUrl = archiveUrl; } out.println( new WebResult( WebResult.STRING_OK, fileUrl, carteObjectId ) ); } catch ( Exception ex ) { out.println( new WebResult( WebResult.STRING_ERROR, Const.getStackTracker( ex ) ) ); } finally { if ( outputStream != null ) { outputStream.close(); } } } public String toString() { return "Add export"; } public String getService() { return CONTEXT_PATH + " (" + toString() + ")"; } public String getContextPath() { return CONTEXT_PATH; } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy