com.marklogic.flux.impl.custom.CustomExportRowsCommand Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of flux-api Show documentation
Show all versions of flux-api Show documentation
Flux API for data movement with MarkLogic
/*
* Copyright © 2024 MarkLogic Corporation. All Rights Reserved.
*/
package com.marklogic.flux.impl.custom;
import com.marklogic.flux.api.CustomExportWriteOptions;
import com.marklogic.flux.api.CustomRowsExporter;
import com.marklogic.flux.api.ReadRowsOptions;
import com.marklogic.flux.impl.AbstractCommand;
import com.marklogic.flux.impl.export.ReadRowsParams;
import org.apache.spark.sql.DataFrameReader;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import picocli.CommandLine;
import java.util.function.Consumer;
@CommandLine.Command(
name = "custom-export-rows",
description = "Read rows from MarkLogic and write them using a custom Spark connector or data source."
)
public class CustomExportRowsCommand extends AbstractCustomExportCommand implements CustomRowsExporter {
@CommandLine.Mixin
private ReadRowsParams readParams = new ReadRowsParams();
@Override
protected Dataset loadDataset(SparkSession session, DataFrameReader reader) {
return reader.format(AbstractCommand.MARKLOGIC_CONNECTOR)
.options(getConnectionParams().makeOptions())
.options(readParams.makeOptions())
.load();
}
@Override
public CustomRowsExporter from(Consumer consumer) {
consumer.accept(readParams);
return this;
}
@Override
public CustomRowsExporter from(String opticQuery) {
readParams.opticQuery(opticQuery);
return this;
}
@Override
public CustomRowsExporter to(Consumer consumer) {
consumer.accept(writeParams);
return this;
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy