All Downloads are FREE. Search and download functionalities are using the official Maven repository.

liquibase.ezproxy.penn.DatafarmExtraction.groovy Maven / Gradle / Ivy

The newest version!
/**
 * Copyright 2010 Trustees of the University of Pennsylvania Licensed under the
 * Educational Community License, Version 2.0 (the "License"); you may
 * not use this file except in compliance with the License. You may
 * obtain a copy of the License at
 *
 * http://www.osedu.org/licenses/ECL-2.0
 *
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an "AS IS"
 * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
 * or implied. See the License for the specific language governing
 * permissions and limitations under the License.
 */


jobs{

    job("loadDataFarmData") {
        runJobs "setup", "dataFarming"
    }

    job("setup") {

        loadProperties "datafarm", "metridoc"

	Class.forName(dataFarm.driverClass).newInstance();

        services.put "dataFarm",
            dataSource(user:dataFarm.user, password:dataFarm.password, driverClass:dataFarm.driverClass, jdbcUrl:dataFarm.jdbcUrl)

        services.put "repository",
            dataSource(user:repository.user, password:repository.password, driverClass:repository.driverClass, jdbcUrl:repository.jdbcUrl)

	def rankSrc = "sqlplus:select 'r' as id, rank, rank_name from ered_rank?dataSource=dataFarm"
        def rankDest = "sqlplus:insert into rank_test(rank, rank_name) values(#rank, #rank_name)?dataSource=repository"

	def patronSrc = "sqlplus:select 'p' as id, pennkey, org_code, rank from ered_person where pennkey_active_code = 'A' and pennkey != '-' and org_code is not null and rank is not null?dataSource=dataFarm"
        def patronDest = "sqlplus:insert into patron(id, org, rank) values(#id, #org, #rank)?dataSource=repository"

	def journalSrc = "sqlplus:select 'j' as id, resid, title, datecreated, datemodified from resources where datemodified > current_date-2 order by resid?dataSource=dataFarm"
        def journalDest = "sqlplus:insert into journal_catalog(journal_catalog_id, title, created, modified) values(#resid, #title, #created, #modified)?dataSource=repository"

	def resSrc = "sqlplus:select 's' as id, resid, sessionid, timestamp, domain from ursmap_archive_2011 where timestamp > current_date-1?dataSource=dataFarm"
        def resDest = "sqlplus:insert into ezproxy_resource_session(res_id, proxy_session_id, session_access_time, domain) values(#resid, #sessionid, #sat, #domain)?dataSource=repository"

        services.put("rankSource", rankSrc)
        services.put("rankDestination", rankDest)
        services.put("patronSource", patronSrc)
        services.put("patronDestination", patronDest)
        services.put("journalSource", journalSrc)
        services.put("journalDestination", journalDest)
        services.put("resourceSource", resSrc)
        services.put("resourceDestination", resDest)
    }

    job("dataFarming") {
        log.info "dataFarming..."
  
	def dataBinder = {
            def rows = it.in.body
            def dataSet = []
	    def record = [:]

            while (rows.next()) {
		switch ( rows.getString("id") ) {
		    case "r":
			record = [rank:rows.getString("rank"), rank_name:rows.getString("rank_name")]
		        break

		    case "p":
			record = [id:rows.getString("pennkey"), org:rows.getString("org_code"), rank:rows.getString("rank")]
		        break

		    case "j":
			record = [resid:rows.getString("resid"), title:rows.getString("title"), created:rows.getString("datecreated"), modified:rows.getString("datemodified")]
		        break

		    case "s":
			record = [resid:rows.getString("resid"), sessionid:rows.getString("sessionid"), sat:rows.getString("timestamp"), domain:rows.getString("domain")]
		        break
		}
		dataSet.add(record)
	    }

	    it.out.body = dataSet
	    it.out.headers = it.in.headers
        }

        runRoute {
//            from(rankSource).process(dataBinder).to(rankDestination);		// static table only loaded once
            from(patronSource).process(dataBinder).to(patronDestination);
            from(journalSource).process(dataBinder).to(journalDestination);
            from(resourceSource).process(dataBinder).to(resourceDestination);
        }
    }
}





© 2015 - 2025 Weber Informatics LLC | Privacy Policy