All Downloads are FREE. Search and download functionalities are using the official Maven repository.

net.maizegenetics.pangenome.db_loading.LoadGenomeIntervalsToPHGdbPlugin Maven / Gradle / Ivy

There is a newer version: 1.10
Show newest version
package net.maizegenetics.pangenome.db_loading;

/**
 * 
 */

import com.google.common.collect.Range;
import com.google.common.collect.RangeMap;
import htsjdk.variant.variantcontext.VariantContext;
import net.maizegenetics.dna.map.Chromosome;
import net.maizegenetics.dna.map.GeneralPosition;
import net.maizegenetics.dna.map.GenomeSequence;
import net.maizegenetics.dna.map.GenomeSequenceBuilder;
import net.maizegenetics.dna.map.Position;
import net.maizegenetics.pangenome.processAssemblyGenomes.AssemblyProcessingUtils;
import net.maizegenetics.plugindef.AbstractPlugin;
import net.maizegenetics.plugindef.DataSet;
import net.maizegenetics.plugindef.GeneratePluginCode;
import net.maizegenetics.plugindef.PluginParameter;
import net.maizegenetics.util.Tuple;
import net.maizegenetics.util.Utils;
import org.apache.log4j.Logger;

import javax.swing.*;
import java.awt.*;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.sql.Connection;
import java.util.*;
import java.util.List;

/**
 *        
 * Before running this plugin, GetDBConnectionPlugin must be run.  The connection created by
 * GetDBConnectionPlugin is passed as a DataSet parameter to this plugin.
 * 
 * This Plugin takes an anchorsFile indicating chrom, start, end, and a reference genome.  It
 * creates anchors from the ref genome fasta based on the start/end from the anchorsFile and
 * loads them to the specified PHG database.  The genome data file contains details to be added
 * to the genotypes, gametes and method tables.
 * 
 * When finished loading anchor regions, inter-anchor regions are identified and loaded.
 * 
 * INPUT:
 *   1. anchors file: tab delimited file, header lines begin with #: 
 *      data lines have columns for chr,startpos,endpos (other columns may be present but will be ignored)
 *   2. path to reference genome fasta file
 *   3. Tab delimited file containing genome data with columns:
 *      Genotype Hapnumber Dataline Ploidy Reference GenesPhased ChromsPhased Confidence Method MethodDetails RefVersion
 *      
 *      The RefVersion will be stored in the genome_interval_versions table.  The Method and MethodDetails will be stored in the
 *      methods table.  The MethodDetails will also be stored in the description field of the genome_interval_versions
 *      table.
 *      
 *      HapNumber starts with 0, and should be 0 for the reference line and all inbreds.
 *   
 * OUTPUT:
 *   1. database (postgres or sqlite) is created and populated with anchor data.
 *   
 * This also creates entries for the anchor_methods table (tells how the anchors
 * were created).  In this case, it is the methods anchor.
 * 
 * For the trimmed anchors, Zack's data will have 
 *   anchorid, chrom,trimmed_start, trimmed_end, left_trim_count, right_trim_count.
 * Need to take this file and create  a file that contains the formats:
 *   chrom, startPos, endPos, geneStart, GeneEnd
 *   
 * @author lcj34
 *
 */
@Deprecated
public class LoadGenomeIntervalsToPHGdbPlugin extends AbstractPlugin {
    private static final Logger myLogger = Logger.getLogger(LoadGenomeIntervalsToPHGdbPlugin.class);

    private PluginParameter refGenome = new PluginParameter.Builder("ref", null, String.class).guiName("Reference Genome File").required(true).inFile()
            .description("Referemce Genome File for aligning against ").build();
    private PluginParameter anchors = new PluginParameter.Builder("anchors", null, String.class).guiName("Anchors File").required(true).inFile()
            .description("Tab-delimited file containing columns in this order: Chrom, StartPosition, EndPosition, Type, where Type is either RefRegion or RefInterRegion").build();
    private PluginParameter genomeData = new PluginParameter.Builder("genomeData", null, String.class).guiName("Genome Data File").required(true)
            .description("Path to tab-delimited file containing genome specific data with header line:\nGenotype Hapnumber Dataline Ploidy Reference GenePhased ChromPhased Confidence Method MethodDetails RefVersion")
            .build();
    private PluginParameter outputDir = new PluginParameter.Builder("outputDir", null, String.class).guiName("Output Directory").inDir().required(true)
            .description("Directory to write liquibase changeLogSync output ").build();

    // This data is populated from the genomeData file
    private String line; // refName to be stored as line_name in genotypes table, e.g. B73Ref
    private String line_data ;
    private int ploidy ;
    private int hapNumber;
    private boolean genesPhased;
    private boolean chromsPhased;
    private float conf;
    private String hapMethod;
    private String hapMethodDetails;

    static GenomeSequence myRefSequence = null;

    public LoadGenomeIntervalsToPHGdbPlugin() {
        super(null, false);
    }

    public LoadGenomeIntervalsToPHGdbPlugin(Frame parentFrame) {
        super(parentFrame, false);
    }

    public LoadGenomeIntervalsToPHGdbPlugin(Frame parentFrame, boolean isInteractive) {
        super(parentFrame, isInteractive);
    }

    @Override
    public void postProcessParameters() {
        // parse input file to find arguments
        myLogger.info("postProcessParameters: reading genomeDataFile: " + genomeData());
        BufferedReader br = Utils.getBufferedReader(genomeData());
        try {
            String headers = br.readLine(); // read the header line
            int lineIndex = -1;
            int lineDataIndex = -1;
            int ploidyIndex = -1;
            int hapNumberIndex = -1;
            int genesPhasedIndex = -1;
            int chromPhasedIndex = -1;
            int confIndex = -1;
            int methodIndex = -1;
            int methodDetailsIndex = -1;

            int idx = 0;
            myLogger.info("GenomeFile header line: " + headers);
            for (String header : headers.split("\\t")) {
                if (header.equalsIgnoreCase("Genotype")) {
                    lineIndex = idx;
                } else if (header.equalsIgnoreCase("Hapnumber")) {
                    hapNumberIndex = idx;
                } else if (header.equalsIgnoreCase("Dataline")) {
                    lineDataIndex = idx;
                } else if (header.equalsIgnoreCase("ploidy")) {
                    ploidyIndex = idx;
                } else if (header.equalsIgnoreCase("genesPhased")) {
                    genesPhasedIndex = idx;
                } else if (header.equalsIgnoreCase("chromsPhased")) {
                    chromPhasedIndex = idx;
                } else if (header.equalsIgnoreCase("confidence")) {
                    confIndex = idx;
                } else if (header.equalsIgnoreCase("Method")) {
                    methodIndex = idx;
                } else if (header.equalsIgnoreCase("MethodDetails")) {
                    methodDetailsIndex = idx;
                } 
                idx++;
            }
            if (lineIndex == -1 || lineDataIndex == -1 || ploidyIndex == -1 ||
                    hapNumberIndex == -1 || genesPhasedIndex == -1 ||
                    chromPhasedIndex == -1 || confIndex == -1 || methodIndex == -1 ||
                    methodDetailsIndex == -1 ) {
                myLogger.error("LoadGenomeIntervalsToPHGdbPlugin: ERROR - Genotype datafile does not contain the required 9 fields");
                myLogger.error("Please check your file for the tab delimited, case-insensistive headers: ");
                myLogger.error("  Genotype Hapnumber Dataline Ploidy GenesPhased ChromsPhased Confidence Method MethodDetails");
                throw new IllegalArgumentException("Wrong number of header columns in genome data file");

            }
            // All headers are present - now get the data
            String dataLine = br.readLine();
            String[] dataTokens = dataLine.split("\\t");
            if (dataTokens.length != 9) {
                throw new IllegalArgumentException("ERROR - wrong number of data items in genotype datafile, expecting 9, found " + dataTokens.length);
            }
            line = dataTokens[lineIndex];
            line_data = dataTokens[lineDataIndex];          
            ploidy = Integer.parseInt(dataTokens[ploidyIndex]);
            hapNumber = Integer.parseInt(dataTokens[hapNumberIndex]);
            genesPhased = Boolean.parseBoolean(dataTokens[genesPhasedIndex]);
            chromsPhased = Boolean.parseBoolean(dataTokens[chromPhasedIndex]);         
            conf = Float.parseFloat(dataTokens[confIndex]);
            hapMethod = dataTokens[methodIndex];
            hapMethodDetails = dataTokens[methodDetailsIndex];
        } catch (IOException ioe){
            myLogger.error("LoadGenomeIntervalsToPHGdbPlugin: error parsing ref genome data file");
            throw new IllegalArgumentException("Error parsing ref genome data file: " + ioe.getMessage());
        }       
    }


    @Override
    public DataSet processData(DataSet input) {

        // Verify anchor file
        Set overlappingAnchors = DBLoadingUtils.verifyIntervalRanges(anchors());
        if (overlappingAnchors.size() > 0) {
            overlappingAnchors.stream().forEach(entry -> {
                myLogger.error("LoadGenomeIntervals: anchorOverlap entry: " + entry);
            });
            throw new IllegalArgumentException("LoadGenomeIntervalsToPHGdbPlugin: intervals file has overlapping positions. Please consolidate/remove overlaps");
        }
        long totalTime = System.nanoTime();
        long time=System.nanoTime();

        Connection dbConnect = (Connection)input.getData(0).getData();

        if (dbConnect == null) {
            throw new IllegalStateException("LoadGenomeIntervalsToPHSdbPlugin: no connection supplied!");
        }
        myLogger.info("LoadGenomeIntervaltoDBPlugin: have connection, create PHGdbAccess object");
        PHGDataWriter phg = new PHGdbAccess(dbConnect);

        myRefSequence = GenomeSequenceBuilder.instance(refGenome());

        myLogger.info("LoadGenomeIntervaltoDBPlugin:  finished GenomeSequenceBUilder for ref genome");

        Map methodParams = pluginParameters();
        methodParams.put("notes",hapMethodDetails);
        // anchorRangeList to be used when creating/loading inter-anchors below
        Tuple>,List> anchorsAndRefIds =  createLoadRefAnchors( phg,  anchors() ,  refGenome(),  ploidy,  line,
                line_data,  hapMethod, methodParams,   hapNumber,  genesPhased,
                chromsPhased, conf);

        List> anchorRangeList = anchorsAndRefIds.getX();
        List refRegionRangeIds = anchorsAndRefIds.getY();

        if (anchorRangeList == null) {
            throw new IllegalStateException("LoadGenomeIntervalsToPHSdbPlugin: error processing anchor regions, no range list for inter-anchors returned. ");
        }

        time = System.nanoTime();
        // Ref anchors are loaded - now create and load the interanchors
        boolean success = createLoadRefInterAnchors(phg, anchorRangeList,  line, hapNumber, hapMethod,methodParams,refRegionRangeIds);
        myLogger.info("Time to load create and load inter-anchors: " + (System.nanoTime()-time)/1e9 + " seconds");
        
        // Reference intervals have been loaded.  WIth just 1 reference per DB, there should not yet be haplotypes.
        // Pre-load some allele data:
        
        int maxKmerLen = 5; // defaulting to 5, which gives us 3905 initial allele strings
        
        List initialAlleleList = DBLoadingUtils.createInitialAlleles(maxKmerLen);
        // Use LinkedHashSet so alleles are processed in the order in which they were inserted.
        // This allows for A,C,G,T,N followed by AA,CA,GA,TA,NA,AC,CC etc
        Set initialAlleleSet = new LinkedHashSet(initialAlleleList);
        phg.putAlleleData(initialAlleleSet);
        
        try {
            ((PHGdbAccess)phg).close();
        } catch (Exception exc) {
            myLogger.error("Error attempting to close PHG db");
        }

        // Write file for liquibase db version check.
        writeLiquibaseFile(outputDir());

        myLogger.info("\nFinished, TotalTime for LoadGenomeIntervalsToPHGdbPlugin was " + (System.nanoTime() - totalTime) / 1e9 + " seconds");

        return null;
    }

    private static Tuple>,List>  createLoadRefAnchors(PHGDataWriter phg, String anchors , String refGenome, int ploidy, String refLine,
            String line_data,  String hapMethod, Map method_details,  int hapNumber, boolean genesPhased,
            boolean chromsPhased,float conf) {
        BufferedReader br = Utils.getBufferedReader(anchors);

        List> anchorRangeList = new ArrayList>(); // used for interAnchor creation
        List anchorsToLoad = new ArrayList();
        try {
            String chrom = "-1";
            String prevChrom = "-1";
            String line = null;
            Chromosome chr = null;

            int chromAnchors = 0;
            while ((line = br.readLine()) != null) {
                // this is based on bed file format of chr, startpos,endpos, (other ignored fields)
                // All header lines in our bed files will begin with #
                
                if (line.startsWith("#")) continue; // skip header lines
                String[] tokens = line.split("\\t");

                chrom = tokens[0];
                if (!chrom.equals(prevChrom)) {
                    myLogger.info("Total anchors for chrom " + prevChrom + ": " + chromAnchors);
                    myLogger.info("Starting chrom " + chrom);
                    chr =  Chromosome.instance(chrom);
                    prevChrom = chrom;
                    chromAnchors=0;
                }

                int anchorStart = Integer.parseInt(tokens[1]) + 1; // convert to physical position
                int anchorEnd = Integer.parseInt(tokens[2]); // bed file is exclusive, no need to change

                chromAnchors++;
                // get bytes from reference, convert to string, add data to list                          
                String anchorString = myRefSequence.genotypeAsString(chr, anchorStart, anchorEnd);
                Position intervalStart = new GeneralPosition.Builder( Chromosome.instance(chrom),anchorStart).build(); 
                Position intervalEnd = new GeneralPosition.Builder( Chromosome.instance(chrom),anchorEnd).build(); 
                Range intervalRange =  Range.closed(intervalStart, intervalEnd);
                anchorRangeList.add(intervalRange); // for inter-anchor creation

                // no longer storing genes but until we change the db table schema this code remains
                Position geneS = new GeneralPosition.Builder( Chromosome.instance(chrom),0).build(); 
                Position geneE = new GeneralPosition.Builder( Chromosome.instance(chrom),0).build(); 
                Range geneRange =  Range.closed(geneS, geneE);

                // Create VCList:
                List rangeVCList = new ArrayList<>();
                // ref and asm are the same here for encodeVariantContextListToByteArray
                VariantContext vc = AssemblyProcessingUtils.createRefRangeVC(myRefSequence, refLine, intervalStart, intervalEnd, intervalStart, intervalEnd);
                rangeVCList.add(vc);
                byte[] variants =  DBLoadingUtils.encodeVariantContextListToByteArray(rangeVCList,true);
                // Using refGenome() as the vcf as this is reference,
                AnchorDataPHG adata = new AnchorDataPHG( intervalRange, geneRange,
                        refGenome, variants, anchorString);
                anchorsToLoad.add(adata);
            }
            myLogger.info("Total anchors for chrom " + prevChrom + ": " + chromAnchors);
            br.close();
        } catch (Exception exc) {
            throw new IllegalStateException("LoadGenomeIntervalsToPHGdbPlugin:createLoadRefAnchors error, possible issue reading the bed file.  Please ensure your anchors files is in bed file format: " + exc.getMessage()); 
        }
        myLogger.info("Anchor array created, load into DB ...");

        // References are added as haplotype - don't use "B73" as line.  Make it distinct, e.g. "B73REF"
        GenoHaploData ghd = new GenoHaploData(ploidy,true,refLine, line_data,genesPhased, chromsPhased, hapNumber,  conf);
        phg.putGenoAndHaploTypeData(ghd);

        // Put the method data - identifies for each haplotype how the sequences were created  
        // Also identifies the initial ref_range_group method.
        int hapMethodId = phg.putMethod(hapMethod, DBLoadingUtils.MethodType.ANCHOR_HAPLOTYPES,method_details);
        String refGrpMethodName = DBLoadingUtils.REGION_REFERENCE_RANGE_GROUP;

        String refGrpMethodDetails = "Group consists of all ranges included in user bed file for reference line name " + refLine;

        // Adding as "grpNotes" instead of "notes" as the calling method already added "notes" based on the user method_description from
        // the genome data file
        method_details.put("grpNotes",refGrpMethodDetails);
        int hapGrpMethodID = phg.putMethod(refGrpMethodName, DBLoadingUtils.MethodType.REF_RANGE_GROUP,method_details);
        
        // Load the gamete_groups and gamete_haplotypes table
        String nameWithHap = refLine + "_" + hapNumber; 
        List gameteGroupList = new ArrayList();
        gameteGroupList.add(nameWithHap);
        phg.putGameteGroupAndHaplotypes(gameteGroupList);

        // Put the reference anchor data
        myLogger.info("mainProcessData: line has been added, load anchor data ...");
        phg.putAllAnchors(anchorsToLoad,   hapGrpMethodID);

        // Get all refRangeIds for all chroms:
        // Only refRegion has been loaded so far, so all refRangeIds belong to group REGION_REFERENCE_RANGE_GROUP
        // THe method name was loaded above
        RangeMap refRangeIdMap = phg.getIntervalRangesWithIDForChrom( "all");

        List refRegionRangeIds = new ArrayList<>();
        refRangeIdMap.asMapOfRanges().keySet().stream().forEach( range -> {
            int rangeId = refRangeIdMap.asMapOfRanges().get(range);
            refRegionRangeIds.add(rangeId);
        });
        phg.putRefRangeRefRangeMethod(hapGrpMethodID,refRegionRangeIds);

        // Put ref anchor_sequences data,  it calls putHaplotypes to store sequence data. 
        // Currently the gvcf we're passing is just the refGenome.
        Set methodNames = new HashSet<>();
        methodNames.add(refGrpMethodName);
        phg.putRefAnchorData(refLine, hapNumber, anchorsToLoad, hapMethodId, methodNames, refLine,null);

        return new Tuple>,List>(anchorRangeList,refRegionRangeIds);
    }

    // This method takes a list of genome position ranges, determines the inter-range positions,
    // grabs the sequence from the genome and loads the inter- ranges to the db
    private static boolean createLoadRefInterAnchors(PHGDataWriter phg, List> anchorRangeList,
             String line, int hapNumber, String hapMethod, Map method_details, List refRegionRangeIds) {

        int noInterAnchor = 0;
        int chromAnchors = 0;
        int interEnd = 0;
        int nextStart = 0;
        Chromosome curChrom = Chromosome.instance("-1");
        Chromosome prevChrom = Chromosome.instance("-1");

        List anchorsToLoad = new ArrayList();
        for (Range pos : anchorRangeList) {

            curChrom = pos.lowerEndpoint().getChromosome();
            
            if (!curChrom.equals(prevChrom)) {

                if (!prevChrom.getName().equals("-1")) {
                    // process the last interanchor of the previous chromosome
                    int chromSize = myRefSequence.chromosomeSize(prevChrom); 
                    if (nextStart <= chromSize) {
                        // if nextStart > chromSize, it indicates the anchor included
                        // the last bp of the chromosome.  There is no inter-anchor in this case
                        AnchorDataPHG adata = getAdata( nextStart,  chromSize,  prevChrom, line );
                        anchorsToLoad.add(adata);
                        chromAnchors++;
                    }                   
                    myLogger.info("Total interanchors for chrom " + prevChrom.getName() + ": " + chromAnchors);
                }

                interEnd = pos.lowerEndpoint().getPosition()-1;
                if (interEnd > 0) {
                    // This handles the case of the first anchor starting at position 1.
                    // Not likely with real data, but the SmallSeqTest has the first anchor
                    // created with positions 1-157.  In this case, no inter-anchor
                    // at the beginning.
                    AnchorDataPHG adata = getAdata( 1,  interEnd,  curChrom, line );                    
                    anchorsToLoad.add(adata);
                }
 
                nextStart = pos.upperEndpoint().getPosition()+1;
                prevChrom = curChrom;
                chromAnchors = 1;
                continue;
            }
            interEnd = pos.lowerEndpoint().getPosition()-1;
            if (nextStart <= interEnd ) {
                chromAnchors++;
                // it's good - write it.  anchorEnd is less than anchorstart when there is no interanchor
                AnchorDataPHG adata = getAdata( nextStart,  interEnd,  curChrom, line );
                anchorsToLoad.add(adata);
            } else {
                noInterAnchor++;
            }
            nextStart = pos.upperEndpoint().getPosition()+1;
        }

        // write last value for last chromosome
        int chromSize = myRefSequence.chromosomeSize(curChrom);
        // If the  last anchor region doesn't include the last bp on the chromosome, then
        // create the final inter-anchor region.
        if (nextStart <= chromSize) {           
            AnchorDataPHG adata = getAdata( nextStart,  chromSize,  curChrom, line );          
            anchorsToLoad.add(adata);
        }
        
        chromAnchors++;
        myLogger.info("Total interanchors for chrom " + curChrom.getName() + ": " + chromAnchors);

        myLogger.info("Number with no interanchors: " + noInterAnchor);
        myLogger.info("\nAnchor array created, load into DB ... size of anchorMap: " + anchorsToLoad.size());      

        // add method for non-focus anchors
               
        int hapMethodID = phg.putMethod(hapMethod, DBLoadingUtils.MethodType.ANCHOR_HAPLOTYPES,method_details);
        String refGrpMethodName = DBLoadingUtils.INTER_REGION_REFERENCE_RANGE_GROUP;
        String refGrpMethodDetails = "Group consists of all genomic ranges not included in user bed file for reference line name " + line;

        // This is added as "grpNotes" instead of "notes" as the calling method already added a "notes" pair
        // based on the user description from the genome data file.
        method_details.put("grpNotes",refGrpMethodDetails);
        int hapGrpMethodID = phg.putMethod(refGrpMethodName, DBLoadingUtils.MethodType.REF_RANGE_GROUP,method_details);

        // Put the reference inter-range intervals to reference_ranges table
        phg.putAllAnchors(anchorsToLoad, hapGrpMethodID);


        // Load the ref_range_ref_range_method table for the inter-anchors
        RangeMap refRangeIdMap = phg.getIntervalRangesWithIDForChrom( "all");

        List refInterRegionIds = new ArrayList<>();
        refRangeIdMap.asMapOfRanges().keySet().stream().forEach( range -> {
            int rangeId = refRangeIdMap.asMapOfRanges().get(range);
            if (!refRegionRangeIds.contains(rangeId)) {
                // inter region ids are those that were not originally on the list,
                // these were added via the putAllAnchors call above
                refInterRegionIds.add(rangeId);
            }
        });

        phg.putRefRangeRefRangeMethod(hapGrpMethodID,refInterRegionIds);
        Set methodNames = new HashSet<>();
        methodNames.add(refGrpMethodName);
        // Add data to the haplotypes table
        phg.putRefAnchorData(line, hapNumber, anchorsToLoad, hapMethodID, methodNames,line,null);
        return true;
    }

    private static AnchorDataPHG getAdata(int nextStart, int interEnd, Chromosome curChrom, String line ) {
        
        String anchorString = myRefSequence.genotypeAsString(curChrom,nextStart, interEnd);
        Position startPos = new GeneralPosition.Builder( curChrom,nextStart).build(); 
        Position endPos = new GeneralPosition.Builder( curChrom,interEnd).build(); 
        Range intervalRange =  Range.closed(startPos, endPos);

        Position geneS = new GeneralPosition.Builder( curChrom,nextStart).build(); // not used
        Position geneE = new GeneralPosition.Builder( curChrom,interEnd).build(); // not used
        Range geneRange = Range.closed(geneS, geneE);// not relevant for inter-regions
        
        // Create VCList:
        List rangeVCList = new ArrayList<>();
        // ref and asm are the same here for encodeVariantContextListToByteArray
        VariantContext vc = AssemblyProcessingUtils.createRefRangeVC(myRefSequence, line, startPos, endPos, startPos, endPos);
        rangeVCList.add(vc);
        byte[] variants;
        try {
            variants =  DBLoadingUtils.encodeVariantContextListToByteArray(rangeVCList,true);
        } catch (Exception exc) {
            myLogger.debug(exc.getMessage(), exc);
            throw new IllegalStateException("LoadGEnomeIntervalsToPHGdbPlugin:getAdata:  error prcoessing variants for chrom " + curChrom + ", start: " + nextStart + ", end: " + interEnd);
        }
        // Null for gene, gvcf and variants is appropriate for ref inter-genetic regions
        AnchorDataPHG adata = new AnchorDataPHG(  intervalRange, geneRange,null,variants, anchorString);
        
        return adata;
    }

    private static void writeLiquibaseFile(String outputDir) {
        String runYes = outputDir + "/run_yes.txt";
        try (BufferedWriter bw = Utils.getBufferedWriter(runYes) ){
            bw.write("yes/n");
        } catch (Exception exc) {
            myLogger.error(exc.getMessage(),exc);
            throw new IllegalStateException("LoadGenomeIntervalsToHPGdbPlugin:writeLiquibaseFile - error writing file " + runYes);
        }
    }
    public static void main(String[] args) {
        GeneratePluginCode.generate(LoadGenomeIntervalsToPHGdbPlugin.class);
    }


    @Override
    public ImageIcon getIcon() {
        return null;
    }

    @Override
    public String getButtonName() {
        return ("Load intervals to reference_ranges table");
    }

    @Override
    public String getToolTipText() {
        return ("Load intervals to reference_ranges table");
    }

    /**
     * Referemce Genome File for aligning against 
     *
     * @return Reference Genome File
     */
    public String refGenome() {
        return refGenome.value();
    }

    /**
     * Set Reference Genome File. Referemce Genome File for
     * aligning against 
     *
     * @param value Reference Genome File
     *
     * @return this plugin
     */
    public LoadGenomeIntervalsToPHGdbPlugin refGenome(String value) {
        refGenome = new PluginParameter<>(refGenome, value);
        return this;
    }

    /**
     * Tab-delimited file containing chrom, start position,
     * end position and type, where type is either "RefRegion"
     * or "RefInterRegion"
     *
     * @return Anchors File
     */
    public String anchors() {
        return anchors.value();
    }

    /**
     * Set Anchors File. Tab-delimited file containing chrom,
     * start position, end position and type where type is either
     * "RefRegion" or "RefInterRegion"
     *
     * @param value Anchors File
     *
     * @return this plugin
     */
    public LoadGenomeIntervalsToPHGdbPlugin anchors(String value) {
        anchors = new PluginParameter<>(anchors, value);
        return this;
    }

    /**
     * Path to tab-delimited file containing genome specific
     * data with header line:
     * Genotype Hapnumber Dataline Ploidy Reference GenePhased
     * ChromPhased Confidence Method MethodDetails RefVersion
     *
     * @return Genome Data File
     */
    public String genomeData() {
        return genomeData.value();
    }

    /**
     * Set Genome Data File. Path to tab-delimited file containing
     * genome speciic data with header line:
     * Genotype Hapnumber Dataline Ploidy Reference GenePhased
     * ChromPhased Confidence Method MethodDetails RefVersion
     *
     * @param value Genome Data File
     *
     * @return this plugin
     */
    public LoadGenomeIntervalsToPHGdbPlugin genomeData(String value) {
        genomeData = new PluginParameter<>(genomeData, value);
        return this;
    }

    /**
     * Directory to write liquibase changeLogSync output
     *
     * @return Output Directory
     */
    public String outputDir() {
        return outputDir.value();
    }

    /**
     * Set Output Directory. Directory to write liquibase
     * changeLogSync output
     *
     * @param value Output Directory
     *
     * @return this plugin
     */
    public LoadGenomeIntervalsToPHGdbPlugin outputDir(String value) {
        outputDir = new PluginParameter<>(outputDir, value);
        return this;
    }


}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy