All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.seeq.model.DatafileInputV1 Maven / Gradle / Ivy

There is a newer version: 66.0.0-v202407310200
Show newest version
/*
 * Seeq REST API
 * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
 *
 * OpenAPI spec version: 60.1.3-v202304250417
 * 
 *
 * NOTE: This class is auto generated by the swagger code generator program.
 * https://github.com/swagger-api/swagger-codegen.git
 * Do not edit the class manually.
 */

package com.seeq.model;

import java.util.Objects;
import java.util.Arrays;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import com.seeq.model.ScalarPropertyV1;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.ArrayList;
import java.util.List;
/**
 * DatafileInputV1
 */
public class DatafileInputV1 {
  @JsonProperty("additionalProperties")
  private List additionalProperties = new ArrayList();

  @JsonProperty("append")
  private Boolean append = false;

  @JsonProperty("conditionName")
  private String conditionName = null;

  @JsonProperty("dataId")
  private String dataId = null;

  @JsonProperty("dayFirstDefault")
  private Boolean dayFirstDefault = false;

  @JsonProperty("description")
  private String description = null;

  @JsonProperty("descriptionRow")
  private Integer descriptionRow = null;

  @JsonProperty("endColumnIndex")
  private Integer endColumnIndex = null;

  @JsonProperty("endColumnName")
  private String endColumnName = null;

  /**
   * The character used as the CSV field delimiter. The possibilities are comma, semicolon, and tab. If not specified, defaults to comma.
   */
  public enum FieldDelimiterEnum {
    COMMA("Comma"),
    SEMICOLON("Semicolon"),
    TAB("Tab");

    private String value;

    FieldDelimiterEnum(String value) {
      this.value = value;
    }
    @JsonValue
    public String getValue() {
      return value;
    }

    @Override
    public String toString() {
      return String.valueOf(value);
    }
    @JsonCreator
    public static FieldDelimiterEnum fromValue(String input) {
      for (FieldDelimiterEnum b : FieldDelimiterEnum.values()) {
        if (b.value.equals(input)) {
          return b;
        }
      }
      return null;
    }

  }  @JsonProperty("fieldDelimiter")
  private FieldDelimiterEnum fieldDelimiter = null;

  @JsonProperty("filename")
  private String filename = null;

  @JsonProperty("firstDataRow")
  private Integer firstDataRow = null;

  @JsonProperty("hostId")
  private String hostId = null;

  @JsonProperty("interpolationMethod")
  private String interpolationMethod = null;

  @JsonProperty("interpolationMethodRow")
  private Integer interpolationMethodRow = null;

  /**
   * The type of item to be imported from the CSV file. Supported types include signal and condition.
   */
  public enum ItemTypeEnum {
    SIGNAL("Signal"),
    CONDITION("Condition"),
    SUPPORTEDITEMTYPE("SupportedItemType");

    private String value;

    ItemTypeEnum(String value) {
      this.value = value;
    }
    @JsonValue
    public String getValue() {
      return value;
    }

    @Override
    public String toString() {
      return String.valueOf(value);
    }
    @JsonCreator
    public static ItemTypeEnum fromValue(String input) {
      for (ItemTypeEnum b : ItemTypeEnum.values()) {
        if (b.value.equals(input)) {
          return b;
        }
      }
      return null;
    }

  }  @JsonProperty("itemType")
  private ItemTypeEnum itemType = null;

  @JsonProperty("keyColumnIndex")
  private Integer keyColumnIndex = null;

  @JsonProperty("keyColumnName")
  private String keyColumnName = null;

  /**
   * The format of the sample timestamps for signals or the format of the capsule start and end times for a condition. If not specified, defaults to ISO8601.
   */
  public enum KeyFormatEnum {
    ISO8601("ISO8601"),
    MONTH_DAY_YEAR_24HRCLOCK("MONTH_DAY_YEAR_24HRCLOCK"),
    MONTH_DAY_YEAR_12HRCLOCK("MONTH_DAY_YEAR_12HRCLOCK"),
    UNIX_EPOCH_SECONDS("UNIX_EPOCH_SECONDS"),
    KEYFORMATTYPE("KeyFormatType");

    private String value;

    KeyFormatEnum(String value) {
      this.value = value;
    }
    @JsonValue
    public String getValue() {
      return value;
    }

    @Override
    public String toString() {
      return String.valueOf(value);
    }
    @JsonCreator
    public static KeyFormatEnum fromValue(String input) {
      for (KeyFormatEnum b : KeyFormatEnum.values()) {
        if (b.value.equals(input)) {
          return b;
        }
      }
      return null;
    }

  }  @JsonProperty("keyFormat")
  private KeyFormatEnum keyFormat = null;

  @JsonProperty("lenientDaylightSavings")
  private Boolean lenientDaylightSavings = false;

  @JsonProperty("maximumDuration")
  private String maximumDuration = null;

  @JsonProperty("maximumInterpolation")
  private String maximumInterpolation = null;

  @JsonProperty("maximumInterpolationRow")
  private Integer maximumInterpolationRow = null;

  @JsonProperty("name")
  private String name = null;

  @JsonProperty("namePrefix")
  private String namePrefix = null;

  @JsonProperty("nameRow")
  private Integer nameRow = null;

  @JsonProperty("nameSuffix")
  private String nameSuffix = null;

  @JsonProperty("properties")
  private List properties = new ArrayList();

  @JsonProperty("scopedTo")
  private String scopedTo = null;

  @JsonProperty("timeZone")
  private String timeZone = null;

  @JsonProperty("uploadFilename")
  private String uploadFilename = null;

  /**
   * The approach to use when CSV data cannot be parsed. Choices are Fail, Skip, Invalid. If Fail (default), then cells that cannot be parsed will cause the import to fail with error messages.If Skip, those cells will be skipped meaning that no sample will be created for signals from that row of the file. For conditions, if it is the start or end cell, no capsule will be created from that row. If the cell is a capsule property, the capsule is still created but without that capsule property. If Invalid and the cell is a sample key or capsule start/end, no sample or capsule is created from that row of the file. If the cell is a sample value or capsule property, the sample or capsule property is created with the value INVALID.
   */
  public enum ValidationModeEnum {
    FAIL("Fail"),
    SKIP("Skip"),
    INVALID("Invalid");

    private String value;

    ValidationModeEnum(String value) {
      this.value = value;
    }
    @JsonValue
    public String getValue() {
      return value;
    }

    @Override
    public String toString() {
      return String.valueOf(value);
    }
    @JsonCreator
    public static ValidationModeEnum fromValue(String input) {
      for (ValidationModeEnum b : ValidationModeEnum.values()) {
        if (b.value.equals(input)) {
          return b;
        }
      }
      return null;
    }

  }  @JsonProperty("validationMode")
  private ValidationModeEnum validationMode = null;

  @JsonProperty("valueColumnIndices")
  private String valueColumnIndices = null;

  @JsonProperty("valueColumnNames")
  private String valueColumnNames = null;

  @JsonProperty("valueUom")
  private String valueUom = null;

  @JsonProperty("valueUomRow")
  private Integer valueUomRow = null;

  public DatafileInputV1 additionalProperties(List additionalProperties) {
    this.additionalProperties = additionalProperties;
    return this;
  }

  public DatafileInputV1 addAdditionalPropertiesItem(ScalarPropertyV1 additionalPropertiesItem) {
    if (this.additionalProperties == null) {
      this.additionalProperties = new ArrayList();
    }
    this.additionalProperties.add(additionalPropertiesItem);
    return this;
  }

   /**
   * Get additionalProperties
   * @return additionalProperties
  **/
  @Schema(description = "")
  public List getAdditionalProperties() {
    return additionalProperties;
  }

  public void setAdditionalProperties(List additionalProperties) {
    this.additionalProperties = additionalProperties;
  }

  public DatafileInputV1 append(Boolean append) {
    this.append = append;
    return this;
  }

   /**
   * If true, append the data in this CSV file to the signals or condition.
   * @return append
  **/
  @Schema(description = "If true, append the data in this CSV file to the signals or condition.")
  public Boolean getAppend() {
    return append;
  }

  public void setAppend(Boolean append) {
    this.append = append;
  }

  public DatafileInputV1 conditionName(String conditionName) {
    this.conditionName = conditionName;
    return this;
  }

   /**
   * The name to use for the condition being imported. If this datafile already has a condition by this name, the import will modify the already existing condition rather than creating another condition with the same name. This setting is ignored when importing signals.
   * @return conditionName
  **/
  @Schema(description = "The name to use for the condition being imported. If this datafile already has a condition by this name, the import will modify the already existing condition rather than creating another condition with the same name. This setting is ignored when importing signals.")
  public String getConditionName() {
    return conditionName;
  }

  public void setConditionName(String conditionName) {
    this.conditionName = conditionName;
  }

  public DatafileInputV1 dataId(String dataId) {
    this.dataId = dataId;
    return this;
  }

   /**
   * The data ID of this item. Note: This is not the Seeq ID, but the unique identifier that the remote datasource uses.
   * @return dataId
  **/
  @Schema(description = "The data ID of this item. Note: This is not the Seeq ID, but the unique identifier that the remote datasource uses.")
  public String getDataId() {
    return dataId;
  }

  public void setDataId(String dataId) {
    this.dataId = dataId;
  }

  public DatafileInputV1 dayFirstDefault(Boolean dayFirstDefault) {
    this.dayFirstDefault = dayFirstDefault;
    return this;
  }

   /**
   * If true, assume day first dates when ambiguous. If false (default), assume month first dates when ambiguous. For example, 07/01/16 is ambiguous and could be a day first or month first date. This setting is only used when there is not enough information in the column to distinguish month first from day first dates.
   * @return dayFirstDefault
  **/
  @Schema(description = "If true, assume day first dates when ambiguous. If false (default), assume month first dates when ambiguous. For example, 07/01/16 is ambiguous and could be a day first or month first date. This setting is only used when there is not enough information in the column to distinguish month first from day first dates.")
  public Boolean getDayFirstDefault() {
    return dayFirstDefault;
  }

  public void setDayFirstDefault(Boolean dayFirstDefault) {
    this.dayFirstDefault = dayFirstDefault;
  }

  public DatafileInputV1 description(String description) {
    this.description = description;
    return this;
  }

   /**
   * Clarifying information or other plain language description of this item. An input of just whitespaces is equivalent to a null input.
   * @return description
  **/
  @Schema(description = "Clarifying information or other plain language description of this item. An input of just whitespaces is equivalent to a null input.")
  public String getDescription() {
    return description;
  }

  public void setDescription(String description) {
    this.description = description;
  }

  public DatafileInputV1 descriptionRow(Integer descriptionRow) {
    this.descriptionRow = descriptionRow;
    return this;
  }

   /**
   * Integer that identifies the row containing the description for each signal. If there is no such row, set to 0. If not specified, defaults to 0. (Row 1 is the first row of the file.) If importing a condition, the content of this row is ignored.
   * @return descriptionRow
  **/
  @Schema(description = "Integer that identifies the row containing the description for each signal. If there is no such row, set to 0. If not specified, defaults to 0. (Row 1 is the first row of the file.) If importing a condition, the content of this row is ignored.")
  public Integer getDescriptionRow() {
    return descriptionRow;
  }

  public void setDescriptionRow(Integer descriptionRow) {
    this.descriptionRow = descriptionRow;
  }

  public DatafileInputV1 endColumnIndex(Integer endColumnIndex) {
    this.endColumnIndex = endColumnIndex;
    return this;
  }

   /**
   * Integer that identifies the column containing the capsule end key for the condition. If not specified, defaults to keyColumnIndex + 1. If endColumnName is specified, this setting will be ignored. If importing a signal, this setting is ignored.
   * @return endColumnIndex
  **/
  @Schema(description = "Integer that identifies the column containing the capsule end key for the condition. If not specified, defaults to keyColumnIndex + 1. If endColumnName is specified, this setting will be ignored. If importing a signal, this setting is ignored.")
  public Integer getEndColumnIndex() {
    return endColumnIndex;
  }

  public void setEndColumnIndex(Integer endColumnIndex) {
    this.endColumnIndex = endColumnIndex;
  }

  public DatafileInputV1 endColumnName(String endColumnName) {
    this.endColumnName = endColumnName;
    return this;
  }

   /**
   * The name of the column containing the capsule end key for the condition. If not specified or whitespace, the endColumnIndex will be used. If specified, the endColumnIndex will be ignored.
   * @return endColumnName
  **/
  @Schema(description = "The name of the column containing the capsule end key for the condition. If not specified or whitespace, the endColumnIndex will be used. If specified, the endColumnIndex will be ignored.")
  public String getEndColumnName() {
    return endColumnName;
  }

  public void setEndColumnName(String endColumnName) {
    this.endColumnName = endColumnName;
  }

  public DatafileInputV1 fieldDelimiter(FieldDelimiterEnum fieldDelimiter) {
    this.fieldDelimiter = fieldDelimiter;
    return this;
  }

   /**
   * The character used as the CSV field delimiter. The possibilities are comma, semicolon, and tab. If not specified, defaults to comma.
   * @return fieldDelimiter
  **/
  @Schema(description = "The character used as the CSV field delimiter. The possibilities are comma, semicolon, and tab. If not specified, defaults to comma.")
  public FieldDelimiterEnum getFieldDelimiter() {
    return fieldDelimiter;
  }

  public void setFieldDelimiter(FieldDelimiterEnum fieldDelimiter) {
    this.fieldDelimiter = fieldDelimiter;
  }

  public DatafileInputV1 filename(String filename) {
    this.filename = filename;
    return this;
  }

   /**
   * The name of the file that the client uploaded. Useful for keeping track of what file was used to create the Datafile.
   * @return filename
  **/
  @Schema(description = "The name of the file that the client uploaded. Useful for keeping track of what file was used to create the Datafile.")
  public String getFilename() {
    return filename;
  }

  public void setFilename(String filename) {
    this.filename = filename;
  }

  public DatafileInputV1 firstDataRow(Integer firstDataRow) {
    this.firstDataRow = firstDataRow;
    return this;
  }

   /**
   * Integer that identifies the row at which to start reading the data. If not specified, defaults to 2. (Row 1 is the first row of the file.)
   * @return firstDataRow
  **/
  @Schema(description = "Integer that identifies the row at which to start reading the data. If not specified, defaults to 2. (Row 1 is the first row of the file.)")
  public Integer getFirstDataRow() {
    return firstDataRow;
  }

  public void setFirstDataRow(Integer firstDataRow) {
    this.firstDataRow = firstDataRow;
  }

  public DatafileInputV1 hostId(String hostId) {
    this.hostId = hostId;
    return this;
  }

   /**
   * The ID of the datasource hosting this item. Note that this is a Seeq-generated ID, not the way that the datasource identifies itself.
   * @return hostId
  **/
  @Schema(description = "The ID of the datasource hosting this item. Note that this is a Seeq-generated ID, not the way that the datasource identifies itself.")
  public String getHostId() {
    return hostId;
  }

  public void setHostId(String hostId) {
    this.hostId = hostId;
  }

  public DatafileInputV1 interpolationMethod(String interpolationMethod) {
    this.interpolationMethod = interpolationMethod;
    return this;
  }

   /**
   * The interpolation method used to represent the values between samples in the signal. The possibilities are: Linear, PILinear, and Step. If not specified, defaults to Linear. If an interpolation method row is specified, the information in that row overrides this setting. If importing a condition, this setting is ignored.
   * @return interpolationMethod
  **/
  @Schema(description = "The interpolation method used to represent the values between samples in the signal. The possibilities are: Linear, PILinear, and Step. If not specified, defaults to Linear. If an interpolation method row is specified, the information in that row overrides this setting. If importing a condition, this setting is ignored.")
  public String getInterpolationMethod() {
    return interpolationMethod;
  }

  public void setInterpolationMethod(String interpolationMethod) {
    this.interpolationMethod = interpolationMethod;
  }

  public DatafileInputV1 interpolationMethodRow(Integer interpolationMethodRow) {
    this.interpolationMethodRow = interpolationMethodRow;
    return this;
  }

   /**
   * Integer that identifies the row containing the interpolation method for each signal. If there is no such row, set to 0. If not specified, defaults to 0. If an interpolation method in the row is not recognized, the import will fail. If an interpolation method in the row is blank, the interpolationMethod setting will be used as the default. (Row 1 is the first row of the file.)If importing a condition, the content of this row is ignored.
   * @return interpolationMethodRow
  **/
  @Schema(description = "Integer that identifies the row containing the interpolation method for each signal. If there is no such row, set to 0. If not specified, defaults to 0. If an interpolation method in the row is not recognized, the import will fail. If an interpolation method in the row is blank, the interpolationMethod setting will be used as the default. (Row 1 is the first row of the file.)If importing a condition, the content of this row is ignored.")
  public Integer getInterpolationMethodRow() {
    return interpolationMethodRow;
  }

  public void setInterpolationMethodRow(Integer interpolationMethodRow) {
    this.interpolationMethodRow = interpolationMethodRow;
  }

  public DatafileInputV1 itemType(ItemTypeEnum itemType) {
    this.itemType = itemType;
    return this;
  }

   /**
   * The type of item to be imported from the CSV file. Supported types include signal and condition.
   * @return itemType
  **/
  @Schema(description = "The type of item to be imported from the CSV file. Supported types include signal and condition.")
  public ItemTypeEnum getItemType() {
    return itemType;
  }

  public void setItemType(ItemTypeEnum itemType) {
    this.itemType = itemType;
  }

  public DatafileInputV1 keyColumnIndex(Integer keyColumnIndex) {
    this.keyColumnIndex = keyColumnIndex;
    return this;
  }

   /**
   * Integer that identifies the column containing the sample timestamps for the signal(s) or the column containing the capsule start key for the condition. If not specified, defaults to 1, the first column. If keyColumnName is specified, this setting will be ignored.
   * @return keyColumnIndex
  **/
  @Schema(description = "Integer that identifies the column containing the sample timestamps for the signal(s) or the column containing the capsule start key for the condition. If not specified, defaults to 1, the first column. If keyColumnName is specified, this setting will be ignored.")
  public Integer getKeyColumnIndex() {
    return keyColumnIndex;
  }

  public void setKeyColumnIndex(Integer keyColumnIndex) {
    this.keyColumnIndex = keyColumnIndex;
  }

  public DatafileInputV1 keyColumnName(String keyColumnName) {
    this.keyColumnName = keyColumnName;
    return this;
  }

   /**
   * The name of the column containing the signal timestamps for the signal(s) or the column containing the capsule start key for the condition. If not specified or whitespace, the keyColumnIndex will be used. If specified, the keyColumnIndex will be ignored.
   * @return keyColumnName
  **/
  @Schema(description = "The name of the column containing the signal timestamps for the signal(s) or the column containing the capsule start key for the condition. If not specified or whitespace, the keyColumnIndex will be used. If specified, the keyColumnIndex will be ignored.")
  public String getKeyColumnName() {
    return keyColumnName;
  }

  public void setKeyColumnName(String keyColumnName) {
    this.keyColumnName = keyColumnName;
  }

  public DatafileInputV1 keyFormat(KeyFormatEnum keyFormat) {
    this.keyFormat = keyFormat;
    return this;
  }

   /**
   * The format of the sample timestamps for signals or the format of the capsule start and end times for a condition. If not specified, defaults to ISO8601.
   * @return keyFormat
  **/
  @Schema(description = "The format of the sample timestamps for signals or the format of the capsule start and end times for a condition. If not specified, defaults to ISO8601.")
  public KeyFormatEnum getKeyFormat() {
    return keyFormat;
  }

  public void setKeyFormat(KeyFormatEnum keyFormat) {
    this.keyFormat = keyFormat;
  }

  public DatafileInputV1 lenientDaylightSavings(Boolean lenientDaylightSavings) {
    this.lenientDaylightSavings = lenientDaylightSavings;
    return this;
  }

   /**
   * If true, hours are allowed that don't exist due to the spring forward daylight savings transition. They are interpreted as occurring in the following hour. The true setting should not be needed if the data was logged appropriately for its time zone. If false (default), data in hours that don't exist will cause the import to fail.
   * @return lenientDaylightSavings
  **/
  @Schema(description = "If true, hours are allowed that don't exist due to the spring forward daylight savings transition. They are interpreted as occurring in the following hour. The true setting should not be needed if the data was logged appropriately for its time zone. If false (default), data in hours that don't exist will cause the import to fail.")
  public Boolean getLenientDaylightSavings() {
    return lenientDaylightSavings;
  }

  public void setLenientDaylightSavings(Boolean lenientDaylightSavings) {
    this.lenientDaylightSavings = lenientDaylightSavings;
  }

  public DatafileInputV1 maximumDuration(String maximumDuration) {
    this.maximumDuration = maximumDuration;
    return this;
  }

   /**
   * The maximum duration of the capsules in the condition. Capsules greater than this duration will be imported but will not returned when data from the condition is requested. If importing a signal, this setting is ignored.
   * @return maximumDuration
  **/
  @Schema(description = "The maximum duration of the capsules in the condition. Capsules greater than this duration will be imported but will not returned when data from the condition is requested. If importing a signal, this setting is ignored.")
  public String getMaximumDuration() {
    return maximumDuration;
  }

  public void setMaximumDuration(String maximumDuration) {
    this.maximumDuration = maximumDuration;
  }

  public DatafileInputV1 maximumInterpolation(String maximumInterpolation) {
    this.maximumInterpolation = maximumInterpolation;
    return this;
  }

   /**
   * The maximum spacing between adjacent sample keys that can be interpolated across. If two samples are spaced by more than maximum interpolation, there will be a hole in the signal between them. If not specified, defaults to 40h. If a maximum interpolation row is specified, the information in that row overrides this setting. If importing a condition, this setting is ignored.
   * @return maximumInterpolation
  **/
  @Schema(description = "The maximum spacing between adjacent sample keys that can be interpolated across. If two samples are spaced by more than maximum interpolation, there will be a hole in the signal between them. If not specified, defaults to 40h. If a maximum interpolation row is specified, the information in that row overrides this setting. If importing a condition, this setting is ignored.")
  public String getMaximumInterpolation() {
    return maximumInterpolation;
  }

  public void setMaximumInterpolation(String maximumInterpolation) {
    this.maximumInterpolation = maximumInterpolation;
  }

  public DatafileInputV1 maximumInterpolationRow(Integer maximumInterpolationRow) {
    this.maximumInterpolationRow = maximumInterpolationRow;
    return this;
  }

   /**
   * Integer that identifies the row containing the maximum interpolation for each signal. If there is no such row, set to 0. If not specified, defaults to 0. If a maximum duration in the row is not recognized, the import will fail. If an maximum interpolation in the row is blank, the maximumInterpolation setting will be used as the default. (Row 1 is the first row of the file.)If importing a condition, the content of this row is ignored.
   * @return maximumInterpolationRow
  **/
  @Schema(description = "Integer that identifies the row containing the maximum interpolation for each signal. If there is no such row, set to 0. If not specified, defaults to 0. If a maximum duration in the row is not recognized, the import will fail. If an maximum interpolation in the row is blank, the maximumInterpolation setting will be used as the default. (Row 1 is the first row of the file.)If importing a condition, the content of this row is ignored.")
  public Integer getMaximumInterpolationRow() {
    return maximumInterpolationRow;
  }

  public void setMaximumInterpolationRow(Integer maximumInterpolationRow) {
    this.maximumInterpolationRow = maximumInterpolationRow;
  }

  public DatafileInputV1 name(String name) {
    this.name = name;
    return this;
  }

   /**
   * Human readable name. Required during creation. An input of just whitespaces is equivalent to a null input.
   * @return name
  **/
  @Schema(required = true, description = "Human readable name. Required during creation. An input of just whitespaces is equivalent to a null input.")
  public String getName() {
    return name;
  }

  public void setName(String name) {
    this.name = name;
  }

  public DatafileInputV1 namePrefix(String namePrefix) {
    this.namePrefix = namePrefix;
    return this;
  }

   /**
   * Prefix prepended to the name of each signal when importing signal(s) and prepended to the name of each capsule property when importing a condition. Leading whitespace is ignored.
   * @return namePrefix
  **/
  @Schema(description = "Prefix prepended to the name of each signal when importing signal(s) and prepended to the name of each capsule property when importing a condition. Leading whitespace is ignored.")
  public String getNamePrefix() {
    return namePrefix;
  }

  public void setNamePrefix(String namePrefix) {
    this.namePrefix = namePrefix;
  }

  public DatafileInputV1 nameRow(Integer nameRow) {
    this.nameRow = nameRow;
    return this;
  }

   /**
   * Integer that identifies the header row used to name the signal(s) when importing signal(s) and used to name the capsule properties when importing a condition. If not specified, defaults to 1 (first row). The name row is required and must have unique non-whitespace entries.
   * @return nameRow
  **/
  @Schema(description = "Integer that identifies the header row used to name the signal(s) when importing signal(s) and used to name the capsule properties when importing a condition. If not specified, defaults to 1 (first row). The name row is required and must have unique non-whitespace entries.")
  public Integer getNameRow() {
    return nameRow;
  }

  public void setNameRow(Integer nameRow) {
    this.nameRow = nameRow;
  }

  public DatafileInputV1 nameSuffix(String nameSuffix) {
    this.nameSuffix = nameSuffix;
    return this;
  }

   /**
   * Suffix appended to the name of each signal when importing signal(s) and appended to the name of each capsule property when importing a condition. Trailing whitespace is ignored.
   * @return nameSuffix
  **/
  @Schema(description = "Suffix appended to the name of each signal when importing signal(s) and appended to the name of each capsule property when importing a condition. Trailing whitespace is ignored.")
  public String getNameSuffix() {
    return nameSuffix;
  }

  public void setNameSuffix(String nameSuffix) {
    this.nameSuffix = nameSuffix;
  }

  public DatafileInputV1 properties(List properties) {
    this.properties = properties;
    return this;
  }

  public DatafileInputV1 addPropertiesItem(ScalarPropertyV1 propertiesItem) {
    if (this.properties == null) {
      this.properties = new ArrayList();
    }
    this.properties.add(propertiesItem);
    return this;
  }

   /**
   * Get properties
   * @return properties
  **/
  @Schema(description = "")
  public List getProperties() {
    return properties;
  }

  public void setProperties(List properties) {
    this.properties = properties;
  }

  public DatafileInputV1 scopedTo(String scopedTo) {
    this.scopedTo = scopedTo;
    return this;
  }

   /**
   * The ID of the workbook to which this item will be scoped.
   * @return scopedTo
  **/
  @Schema(description = "The ID of the workbook to which this item will be scoped.")
  public String getScopedTo() {
    return scopedTo;
  }

  public void setScopedTo(String scopedTo) {
    this.scopedTo = scopedTo;
  }

  public DatafileInputV1 timeZone(String timeZone) {
    this.timeZone = timeZone;
    return this;
  }

   /**
   * If the timestamps (key for signals, start/end for a condition) contain no time zone information, they will be interpreted as being in this time zone.  If not specified and the timestamps contain no time zone information, the time zone of the Seeq server is used. If the timestamps contain time zone information, this setting is ignored.
   * @return timeZone
  **/
  @Schema(description = "If the timestamps (key for signals, start/end for a condition) contain no time zone information, they will be interpreted as being in this time zone.  If not specified and the timestamps contain no time zone information, the time zone of the Seeq server is used. If the timestamps contain time zone information, this setting is ignored.")
  public String getTimeZone() {
    return timeZone;
  }

  public void setTimeZone(String timeZone) {
    this.timeZone = timeZone;
  }

  public DatafileInputV1 uploadFilename(String uploadFilename) {
    this.uploadFilename = uploadFilename;
    return this;
  }

   /**
   * The server-side name of the CSV file that is returned from the upload endpoint.
   * @return uploadFilename
  **/
  @Schema(required = true, description = "The server-side name of the CSV file that is returned from the upload endpoint.")
  public String getUploadFilename() {
    return uploadFilename;
  }

  public void setUploadFilename(String uploadFilename) {
    this.uploadFilename = uploadFilename;
  }

  public DatafileInputV1 validationMode(ValidationModeEnum validationMode) {
    this.validationMode = validationMode;
    return this;
  }

   /**
   * The approach to use when CSV data cannot be parsed. Choices are Fail, Skip, Invalid. If Fail (default), then cells that cannot be parsed will cause the import to fail with error messages.If Skip, those cells will be skipped meaning that no sample will be created for signals from that row of the file. For conditions, if it is the start or end cell, no capsule will be created from that row. If the cell is a capsule property, the capsule is still created but without that capsule property. If Invalid and the cell is a sample key or capsule start/end, no sample or capsule is created from that row of the file. If the cell is a sample value or capsule property, the sample or capsule property is created with the value INVALID.
   * @return validationMode
  **/
  @Schema(description = "The approach to use when CSV data cannot be parsed. Choices are Fail, Skip, Invalid. If Fail (default), then cells that cannot be parsed will cause the import to fail with error messages.If Skip, those cells will be skipped meaning that no sample will be created for signals from that row of the file. For conditions, if it is the start or end cell, no capsule will be created from that row. If the cell is a capsule property, the capsule is still created but without that capsule property. If Invalid and the cell is a sample key or capsule start/end, no sample or capsule is created from that row of the file. If the cell is a sample value or capsule property, the sample or capsule property is created with the value INVALID.")
  public ValidationModeEnum getValidationMode() {
    return validationMode;
  }

  public void setValidationMode(ValidationModeEnum validationMode) {
    this.validationMode = validationMode;
  }

  public DatafileInputV1 valueColumnIndices(String valueColumnIndices) {
    this.valueColumnIndices = valueColumnIndices;
    return this;
  }

   /**
   * List of integers identifying the columns to import. When importing signals, these columns will be combined with the key column to create signals. When importing a condition, these columns will become the capsule properties. Valid formats are a comma separated list of 'N' or 'N-M' where N and M are integers greater than zero and M >= N. Example: '2, 5-7, 10, 12-14'. The first column of the file is column 1. If the column(s) representing a signal key or condition start/end is included in the list, it will be ignored. If neither valueColumnNames nor valueColumnIndices are specified, all columns other than the key/start/end column will result in signals when importing signals and will result in capsule properties when importing a condition. An entry of 0 alone indicates that no columns should be imported as capsule properties. If a column in this list cannot be found in the file, the import will fail. Any column is only imported once no matter how many times it is listed.
   * @return valueColumnIndices
  **/
  @Schema(description = "List of integers identifying the columns to import. When importing signals, these columns will be combined with the key column to create signals. When importing a condition, these columns will become the capsule properties. Valid formats are a comma separated list of 'N' or 'N-M' where N and M are integers greater than zero and M >= N. Example: '2, 5-7, 10, 12-14'. The first column of the file is column 1. If the column(s) representing a signal key or condition start/end is included in the list, it will be ignored. If neither valueColumnNames nor valueColumnIndices are specified, all columns other than the key/start/end column will result in signals when importing signals and will result in capsule properties when importing a condition. An entry of 0 alone indicates that no columns should be imported as capsule properties. If a column in this list cannot be found in the file, the import will fail. Any column is only imported once no matter how many times it is listed.")
  public String getValueColumnIndices() {
    return valueColumnIndices;
  }

  public void setValueColumnIndices(String valueColumnIndices) {
    this.valueColumnIndices = valueColumnIndices;
  }

  public DatafileInputV1 valueColumnNames(String valueColumnNames) {
    this.valueColumnNames = valueColumnNames;
    return this;
  }

   /**
   * List of comma separated case sensitive names of the columns to import. When importing signals, these columns will be combined with the key column to create signals. When importing a condition, these columns will become the capsule properties. If the column(s) representing a signal key or condition start/end is included in the list, it will be ignored. If not specified, valueColumnIndices will be used. If specified, valueColumnIndices will be ignored. If a column name in this list cannot be found in the file, the import will fail. Any column is only imported once no matter how many times it is listed.
   * @return valueColumnNames
  **/
  @Schema(description = "List of comma separated case sensitive names of the columns to import. When importing signals, these columns will be combined with the key column to create signals. When importing a condition, these columns will become the capsule properties. If the column(s) representing a signal key or condition start/end is included in the list, it will be ignored. If not specified, valueColumnIndices will be used. If specified, valueColumnIndices will be ignored. If a column name in this list cannot be found in the file, the import will fail. Any column is only imported once no matter how many times it is listed.")
  public String getValueColumnNames() {
    return valueColumnNames;
  }

  public void setValueColumnNames(String valueColumnNames) {
    this.valueColumnNames = valueColumnNames;
  }

  public DatafileInputV1 valueUom(String valueUom) {
    this.valueUom = valueUom;
    return this;
  }

   /**
   * The unit of measure to be used for every signal when importing signals and for every capsule property when importing a condition. If not specified, defaults to unitless. If a unit of measure row is specified, the information in that row overrides this setting. If this unit of measure is not recognized, the import will fail.
   * @return valueUom
  **/
  @Schema(description = "The unit of measure to be used for every signal when importing signals and for every capsule property when importing a condition. If not specified, defaults to unitless. If a unit of measure row is specified, the information in that row overrides this setting. If this unit of measure is not recognized, the import will fail.")
  public String getValueUom() {
    return valueUom;
  }

  public void setValueUom(String valueUom) {
    this.valueUom = valueUom;
  }

  public DatafileInputV1 valueUomRow(Integer valueUomRow) {
    this.valueUomRow = valueUomRow;
    return this;
  }

   /**
   * Integer that identifies the row containing the unit of measure for each signal when importing signal(s) or for each capsule property when importing a condition. If there is no such row, set to 0. If not specified, defaults to 0. If a unit of measure in the row is not recognized, unitless will be used instead. (Row 1 is the first row of the file.)
   * @return valueUomRow
  **/
  @Schema(description = "Integer that identifies the row containing the unit of measure for each signal when importing signal(s) or for each capsule property when importing a condition. If there is no such row, set to 0. If not specified, defaults to 0. If a unit of measure in the row is not recognized, unitless will be used instead. (Row 1 is the first row of the file.)")
  public Integer getValueUomRow() {
    return valueUomRow;
  }

  public void setValueUomRow(Integer valueUomRow) {
    this.valueUomRow = valueUomRow;
  }


  @Override
  public boolean equals(java.lang.Object o) {
    if (this == o) {
      return true;
    }
    if (o == null || getClass() != o.getClass()) {
      return false;
    }
    DatafileInputV1 datafileInputV1 = (DatafileInputV1) o;
    return Objects.equals(this.additionalProperties, datafileInputV1.additionalProperties) &&
        Objects.equals(this.append, datafileInputV1.append) &&
        Objects.equals(this.conditionName, datafileInputV1.conditionName) &&
        Objects.equals(this.dataId, datafileInputV1.dataId) &&
        Objects.equals(this.dayFirstDefault, datafileInputV1.dayFirstDefault) &&
        Objects.equals(this.description, datafileInputV1.description) &&
        Objects.equals(this.descriptionRow, datafileInputV1.descriptionRow) &&
        Objects.equals(this.endColumnIndex, datafileInputV1.endColumnIndex) &&
        Objects.equals(this.endColumnName, datafileInputV1.endColumnName) &&
        Objects.equals(this.fieldDelimiter, datafileInputV1.fieldDelimiter) &&
        Objects.equals(this.filename, datafileInputV1.filename) &&
        Objects.equals(this.firstDataRow, datafileInputV1.firstDataRow) &&
        Objects.equals(this.hostId, datafileInputV1.hostId) &&
        Objects.equals(this.interpolationMethod, datafileInputV1.interpolationMethod) &&
        Objects.equals(this.interpolationMethodRow, datafileInputV1.interpolationMethodRow) &&
        Objects.equals(this.itemType, datafileInputV1.itemType) &&
        Objects.equals(this.keyColumnIndex, datafileInputV1.keyColumnIndex) &&
        Objects.equals(this.keyColumnName, datafileInputV1.keyColumnName) &&
        Objects.equals(this.keyFormat, datafileInputV1.keyFormat) &&
        Objects.equals(this.lenientDaylightSavings, datafileInputV1.lenientDaylightSavings) &&
        Objects.equals(this.maximumDuration, datafileInputV1.maximumDuration) &&
        Objects.equals(this.maximumInterpolation, datafileInputV1.maximumInterpolation) &&
        Objects.equals(this.maximumInterpolationRow, datafileInputV1.maximumInterpolationRow) &&
        Objects.equals(this.name, datafileInputV1.name) &&
        Objects.equals(this.namePrefix, datafileInputV1.namePrefix) &&
        Objects.equals(this.nameRow, datafileInputV1.nameRow) &&
        Objects.equals(this.nameSuffix, datafileInputV1.nameSuffix) &&
        Objects.equals(this.properties, datafileInputV1.properties) &&
        Objects.equals(this.scopedTo, datafileInputV1.scopedTo) &&
        Objects.equals(this.timeZone, datafileInputV1.timeZone) &&
        Objects.equals(this.uploadFilename, datafileInputV1.uploadFilename) &&
        Objects.equals(this.validationMode, datafileInputV1.validationMode) &&
        Objects.equals(this.valueColumnIndices, datafileInputV1.valueColumnIndices) &&
        Objects.equals(this.valueColumnNames, datafileInputV1.valueColumnNames) &&
        Objects.equals(this.valueUom, datafileInputV1.valueUom) &&
        Objects.equals(this.valueUomRow, datafileInputV1.valueUomRow);
  }

  @Override
  public int hashCode() {
    return Objects.hash(additionalProperties, append, conditionName, dataId, dayFirstDefault, description, descriptionRow, endColumnIndex, endColumnName, fieldDelimiter, filename, firstDataRow, hostId, interpolationMethod, interpolationMethodRow, itemType, keyColumnIndex, keyColumnName, keyFormat, lenientDaylightSavings, maximumDuration, maximumInterpolation, maximumInterpolationRow, name, namePrefix, nameRow, nameSuffix, properties, scopedTo, timeZone, uploadFilename, validationMode, valueColumnIndices, valueColumnNames, valueUom, valueUomRow);
  }


  @Override
  public String toString() {
    StringBuilder sb = new StringBuilder();
    sb.append("class DatafileInputV1 {\n");
    
    sb.append("    additionalProperties: ").append(toIndentedString(additionalProperties)).append("\n");
    sb.append("    append: ").append(toIndentedString(append)).append("\n");
    sb.append("    conditionName: ").append(toIndentedString(conditionName)).append("\n");
    sb.append("    dataId: ").append(toIndentedString(dataId)).append("\n");
    sb.append("    dayFirstDefault: ").append(toIndentedString(dayFirstDefault)).append("\n");
    sb.append("    description: ").append(toIndentedString(description)).append("\n");
    sb.append("    descriptionRow: ").append(toIndentedString(descriptionRow)).append("\n");
    sb.append("    endColumnIndex: ").append(toIndentedString(endColumnIndex)).append("\n");
    sb.append("    endColumnName: ").append(toIndentedString(endColumnName)).append("\n");
    sb.append("    fieldDelimiter: ").append(toIndentedString(fieldDelimiter)).append("\n");
    sb.append("    filename: ").append(toIndentedString(filename)).append("\n");
    sb.append("    firstDataRow: ").append(toIndentedString(firstDataRow)).append("\n");
    sb.append("    hostId: ").append(toIndentedString(hostId)).append("\n");
    sb.append("    interpolationMethod: ").append(toIndentedString(interpolationMethod)).append("\n");
    sb.append("    interpolationMethodRow: ").append(toIndentedString(interpolationMethodRow)).append("\n");
    sb.append("    itemType: ").append(toIndentedString(itemType)).append("\n");
    sb.append("    keyColumnIndex: ").append(toIndentedString(keyColumnIndex)).append("\n");
    sb.append("    keyColumnName: ").append(toIndentedString(keyColumnName)).append("\n");
    sb.append("    keyFormat: ").append(toIndentedString(keyFormat)).append("\n");
    sb.append("    lenientDaylightSavings: ").append(toIndentedString(lenientDaylightSavings)).append("\n");
    sb.append("    maximumDuration: ").append(toIndentedString(maximumDuration)).append("\n");
    sb.append("    maximumInterpolation: ").append(toIndentedString(maximumInterpolation)).append("\n");
    sb.append("    maximumInterpolationRow: ").append(toIndentedString(maximumInterpolationRow)).append("\n");
    sb.append("    name: ").append(toIndentedString(name)).append("\n");
    sb.append("    namePrefix: ").append(toIndentedString(namePrefix)).append("\n");
    sb.append("    nameRow: ").append(toIndentedString(nameRow)).append("\n");
    sb.append("    nameSuffix: ").append(toIndentedString(nameSuffix)).append("\n");
    sb.append("    properties: ").append(toIndentedString(properties)).append("\n");
    sb.append("    scopedTo: ").append(toIndentedString(scopedTo)).append("\n");
    sb.append("    timeZone: ").append(toIndentedString(timeZone)).append("\n");
    sb.append("    uploadFilename: ").append(toIndentedString(uploadFilename)).append("\n");
    sb.append("    validationMode: ").append(toIndentedString(validationMode)).append("\n");
    sb.append("    valueColumnIndices: ").append(toIndentedString(valueColumnIndices)).append("\n");
    sb.append("    valueColumnNames: ").append(toIndentedString(valueColumnNames)).append("\n");
    sb.append("    valueUom: ").append(toIndentedString(valueUom)).append("\n");
    sb.append("    valueUomRow: ").append(toIndentedString(valueUomRow)).append("\n");
    sb.append("}");
    return sb.toString();
  }

  /**
   * Convert the given object to string with each line indented by 4 spaces
   * (except the first line).
   */
  private String toIndentedString(java.lang.Object o) {
    if (o == null) {
      return "null";
    }
    return o.toString().replace("\n", "\n    ");
  }
  
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy