All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.seeq.model.DatafileOutputV1 Maven / Gradle / Ivy

There is a newer version: 66.0.0-v202407310200
Show newest version
/*
 * Seeq REST API
 * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
 *
 * OpenAPI spec version: 64.3.0-v202405012032
 * 
 *
 * NOTE: This class is auto generated by the swagger code generator program.
 * https://github.com/swagger-api/swagger-codegen.git
 * Do not edit the class manually.
 */

package com.seeq.model;

import java.util.Objects;
import java.util.Arrays;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import com.seeq.model.PermissionsV1;
import com.seeq.model.ScalarPropertyV1;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.ArrayList;
import java.util.List;
/**
 * DatafileOutputV1
 */
public class DatafileOutputV1 {
  @JsonProperty("additionalProperties")
  private List additionalProperties = new ArrayList();

  @JsonProperty("append")
  private Boolean append = false;

  @JsonProperty("conditionName")
  private String conditionName = null;

  @JsonProperty("createdAt")
  private String createdAt = null;

  @JsonProperty("dataId")
  private String dataId = null;

  @JsonProperty("datasourceClass")
  private String datasourceClass = null;

  @JsonProperty("datasourceId")
  private String datasourceId = null;

  @JsonProperty("dayFirstDefault")
  private Boolean dayFirstDefault = false;

  @JsonProperty("description")
  private String description = null;

  @JsonProperty("descriptionRow")
  private Long descriptionRow = null;

  @JsonProperty("effectivePermissions")
  private PermissionsV1 effectivePermissions = null;

  @JsonProperty("endColumnIndex")
  private Long endColumnIndex = null;

  @JsonProperty("endColumnName")
  private String endColumnName = null;

  /**
   * The character used as the CSV field delimiter.
   */
  public enum FieldDelimiterEnum {
    COMMA("Comma"),
    SEMICOLON("Semicolon"),
    TAB("Tab");

    private String value;

    FieldDelimiterEnum(String value) {
      this.value = value;
    }
    @JsonValue
    public String getValue() {
      return value;
    }

    @Override
    public String toString() {
      return String.valueOf(value);
    }
    @JsonCreator
    public static FieldDelimiterEnum fromValue(String input) {
      for (FieldDelimiterEnum b : FieldDelimiterEnum.values()) {
        if (b.value.equals(input)) {
          return b;
        }
      }
      return null;
    }

  }  @JsonProperty("fieldDelimiter")
  private FieldDelimiterEnum fieldDelimiter = null;

  @JsonProperty("filename")
  private String filename = null;

  @JsonProperty("firstDataRow")
  private Long firstDataRow = null;

  @JsonProperty("id")
  private String id = null;

  @JsonProperty("interpolationMethod")
  private String interpolationMethod = null;

  @JsonProperty("interpolationMethodRow")
  private Long interpolationMethodRow = null;

  @JsonProperty("isArchived")
  private Boolean isArchived = false;

  @JsonProperty("isRedacted")
  private Boolean isRedacted = false;

  /**
   * The type of item to be imported from the CSV file. Supported types include signal and condition.
   */
  public enum ItemTypeEnum {
    SIGNAL("Signal"),
    CONDITION("Condition"),
    SUPPORTEDITEMTYPE("SupportedItemType");

    private String value;

    ItemTypeEnum(String value) {
      this.value = value;
    }
    @JsonValue
    public String getValue() {
      return value;
    }

    @Override
    public String toString() {
      return String.valueOf(value);
    }
    @JsonCreator
    public static ItemTypeEnum fromValue(String input) {
      for (ItemTypeEnum b : ItemTypeEnum.values()) {
        if (b.value.equals(input)) {
          return b;
        }
      }
      return null;
    }

  }  @JsonProperty("itemType")
  private ItemTypeEnum itemType = null;

  @JsonProperty("keyColumnIndex")
  private Long keyColumnIndex = null;

  @JsonProperty("keyColumnName")
  private String keyColumnName = null;

  /**
   * The format of the sample timestamps for signals or the format of the capsule start and end times for a condition.
   */
  public enum KeyFormatEnum {
    ISO8601("ISO8601"),
    MONTH_DAY_YEAR_24HRCLOCK("MONTH_DAY_YEAR_24HRCLOCK"),
    MONTH_DAY_YEAR_12HRCLOCK("MONTH_DAY_YEAR_12HRCLOCK"),
    UNIX_EPOCH_SECONDS("UNIX_EPOCH_SECONDS"),
    KEYFORMATTYPE("KeyFormatType");

    private String value;

    KeyFormatEnum(String value) {
      this.value = value;
    }
    @JsonValue
    public String getValue() {
      return value;
    }

    @Override
    public String toString() {
      return String.valueOf(value);
    }
    @JsonCreator
    public static KeyFormatEnum fromValue(String input) {
      for (KeyFormatEnum b : KeyFormatEnum.values()) {
        if (b.value.equals(input)) {
          return b;
        }
      }
      return null;
    }

  }  @JsonProperty("keyFormat")
  private KeyFormatEnum keyFormat = null;

  @JsonProperty("lenientDaylightSavings")
  private Boolean lenientDaylightSavings = false;

  @JsonProperty("maximumDuration")
  private String maximumDuration = null;

  @JsonProperty("maximumInterpolation")
  private String maximumInterpolation = null;

  @JsonProperty("maximumInterpolationRow")
  private Long maximumInterpolationRow = null;

  @JsonProperty("name")
  private String name = null;

  @JsonProperty("namePrefix")
  private String namePrefix = null;

  @JsonProperty("nameRow")
  private Long nameRow = null;

  @JsonProperty("nameSuffix")
  private String nameSuffix = null;

  @JsonProperty("scopedTo")
  private String scopedTo = null;

  @JsonProperty("statusMessage")
  private String statusMessage = null;

  @JsonProperty("timeZone")
  private String timeZone = null;

  @JsonProperty("translationKey")
  private String translationKey = null;

  @JsonProperty("type")
  private String type = null;

  @JsonProperty("updatedAt")
  private String updatedAt = null;

  /**
   * The approach to use when CSV data cannot be parsed. If Fail (default), then cells that cannot be parsed will cause the import to fail with error messages.If Skip, those cells will be skipped meaning that no sample will be created for signals from that row of the file. For conditions, if it is the start or end cell, no capsule will be created from that row. If the cell is a capsule property, the capsule is still created but without that capsule property. If Invalid and the cell is a sample key or capsule start/end, no sample or capsule is created from that row of the file. If the cell is a sample value or capsule property, the sample or capsule property is created with the value INVALID.
   */
  public enum ValidationModeEnum {
    FAIL("Fail"),
    SKIP("Skip"),
    INVALID("Invalid");

    private String value;

    ValidationModeEnum(String value) {
      this.value = value;
    }
    @JsonValue
    public String getValue() {
      return value;
    }

    @Override
    public String toString() {
      return String.valueOf(value);
    }
    @JsonCreator
    public static ValidationModeEnum fromValue(String input) {
      for (ValidationModeEnum b : ValidationModeEnum.values()) {
        if (b.value.equals(input)) {
          return b;
        }
      }
      return null;
    }

  }  @JsonProperty("validationMode")
  private ValidationModeEnum validationMode = null;

  @JsonProperty("valueColumnIndices")
  private String valueColumnIndices = null;

  @JsonProperty("valueColumnNames")
  private String valueColumnNames = null;

  @JsonProperty("valueUom")
  private String valueUom = null;

  @JsonProperty("valueUomRow")
  private Long valueUomRow = null;

  public DatafileOutputV1 additionalProperties(List additionalProperties) {
    this.additionalProperties = additionalProperties;
    return this;
  }

  public DatafileOutputV1 addAdditionalPropertiesItem(ScalarPropertyV1 additionalPropertiesItem) {
    if (this.additionalProperties == null) {
      this.additionalProperties = new ArrayList();
    }
    this.additionalProperties.add(additionalPropertiesItem);
    return this;
  }

   /**
   * Additional properties of the item
   * @return additionalProperties
  **/
  @Schema(description = "Additional properties of the item")
  public List getAdditionalProperties() {
    return additionalProperties;
  }

  public void setAdditionalProperties(List additionalProperties) {
    this.additionalProperties = additionalProperties;
  }

  public DatafileOutputV1 append(Boolean append) {
    this.append = append;
    return this;
  }

   /**
   * If true, append the data in this CSV file to the signals or condition.
   * @return append
  **/
  @Schema(description = "If true, append the data in this CSV file to the signals or condition.")
  public Boolean getAppend() {
    return append;
  }

  public void setAppend(Boolean append) {
    this.append = append;
  }

  public DatafileOutputV1 conditionName(String conditionName) {
    this.conditionName = conditionName;
    return this;
  }

   /**
   * The name to use for the condition being imported. If this datafile already has a condition by this name, the import will modify the already existing condition rather than creating another condition with the same name. This setting is ignored when importing signals.
   * @return conditionName
  **/
  @Schema(description = "The name to use for the condition being imported. If this datafile already has a condition by this name, the import will modify the already existing condition rather than creating another condition with the same name. This setting is ignored when importing signals.")
  public String getConditionName() {
    return conditionName;
  }

  public void setConditionName(String conditionName) {
    this.conditionName = conditionName;
  }

  public DatafileOutputV1 createdAt(String createdAt) {
    this.createdAt = createdAt;
    return this;
  }

   /**
   * The ISO 8601 date of when the datafile was created (YYYY-MM-DDThh:mm:ss.sssssssss±hh:mm)
   * @return createdAt
  **/
  @Schema(description = "The ISO 8601 date of when the datafile was created (YYYY-MM-DDThh:mm:ss.sssssssss±hh:mm)")
  public String getCreatedAt() {
    return createdAt;
  }

  public void setCreatedAt(String createdAt) {
    this.createdAt = createdAt;
  }

  public DatafileOutputV1 dataId(String dataId) {
    this.dataId = dataId;
    return this;
  }

   /**
   * The data ID of this asset. Note: This is not the Seeq ID, but the unique identifier that the remote datasource uses.
   * @return dataId
  **/
  @Schema(description = "The data ID of this asset. Note: This is not the Seeq ID, but the unique identifier that the remote datasource uses.")
  public String getDataId() {
    return dataId;
  }

  public void setDataId(String dataId) {
    this.dataId = dataId;
  }

  public DatafileOutputV1 datasourceClass(String datasourceClass) {
    this.datasourceClass = datasourceClass;
    return this;
  }

   /**
   * The datasource class, which is the type of system holding the item, such as OSIsoft PI
   * @return datasourceClass
  **/
  @Schema(description = "The datasource class, which is the type of system holding the item, such as OSIsoft PI")
  public String getDatasourceClass() {
    return datasourceClass;
  }

  public void setDatasourceClass(String datasourceClass) {
    this.datasourceClass = datasourceClass;
  }

  public DatafileOutputV1 datasourceId(String datasourceId) {
    this.datasourceId = datasourceId;
    return this;
  }

   /**
   * The datasource identifier, which is how the datasource holding this item identifies itself
   * @return datasourceId
  **/
  @Schema(description = "The datasource identifier, which is how the datasource holding this item identifies itself")
  public String getDatasourceId() {
    return datasourceId;
  }

  public void setDatasourceId(String datasourceId) {
    this.datasourceId = datasourceId;
  }

  public DatafileOutputV1 dayFirstDefault(Boolean dayFirstDefault) {
    this.dayFirstDefault = dayFirstDefault;
    return this;
  }

   /**
   * If true, assume day first dates when ambiguous. If false (default), assume month first dates when ambiguous. For example, 07/01/16 is ambiguous and could be a day first or month first date. This setting is only used when there is not enough information in the column to distinguish month first from day first dates.
   * @return dayFirstDefault
  **/
  @Schema(description = "If true, assume day first dates when ambiguous. If false (default), assume month first dates when ambiguous. For example, 07/01/16 is ambiguous and could be a day first or month first date. This setting is only used when there is not enough information in the column to distinguish month first from day first dates.")
  public Boolean getDayFirstDefault() {
    return dayFirstDefault;
  }

  public void setDayFirstDefault(Boolean dayFirstDefault) {
    this.dayFirstDefault = dayFirstDefault;
  }

  public DatafileOutputV1 description(String description) {
    this.description = description;
    return this;
  }

   /**
   * Clarifying information or other plain language description of this item
   * @return description
  **/
  @Schema(description = "Clarifying information or other plain language description of this item")
  public String getDescription() {
    return description;
  }

  public void setDescription(String description) {
    this.description = description;
  }

  public DatafileOutputV1 descriptionRow(Long descriptionRow) {
    this.descriptionRow = descriptionRow;
    return this;
  }

   /**
   * Integer that identifies the row containing the description for each signal. A setting of 0 indicates that there is no description header row. (Row 1 is the first row of the file.) If importing a condition, the content of this row is ignored.
   * @return descriptionRow
  **/
  @Schema(description = "Integer that identifies the row containing the description for each signal. A setting of 0 indicates that there is no description header row. (Row 1 is the first row of the file.) If importing a condition, the content of this row is ignored.")
  public Long getDescriptionRow() {
    return descriptionRow;
  }

  public void setDescriptionRow(Long descriptionRow) {
    this.descriptionRow = descriptionRow;
  }

  public DatafileOutputV1 effectivePermissions(PermissionsV1 effectivePermissions) {
    this.effectivePermissions = effectivePermissions;
    return this;
  }

   /**
   * Get effectivePermissions
   * @return effectivePermissions
  **/
  @Schema(description = "")
  public PermissionsV1 getEffectivePermissions() {
    return effectivePermissions;
  }

  public void setEffectivePermissions(PermissionsV1 effectivePermissions) {
    this.effectivePermissions = effectivePermissions;
  }

  public DatafileOutputV1 endColumnIndex(Long endColumnIndex) {
    this.endColumnIndex = endColumnIndex;
    return this;
  }

   /**
   * Integer that identifies the column containing the capsule end key for the condition. If importing a signal, this setting is ignored.
   * @return endColumnIndex
  **/
  @Schema(description = "Integer that identifies the column containing the capsule end key for the condition. If importing a signal, this setting is ignored.")
  public Long getEndColumnIndex() {
    return endColumnIndex;
  }

  public void setEndColumnIndex(Long endColumnIndex) {
    this.endColumnIndex = endColumnIndex;
  }

  public DatafileOutputV1 endColumnName(String endColumnName) {
    this.endColumnName = endColumnName;
    return this;
  }

   /**
   * The name of the column containing the capsule end key for the condition. If not specified or whitespace, the endColumnIndex will be used.
   * @return endColumnName
  **/
  @Schema(description = "The name of the column containing the capsule end key for the condition. If not specified or whitespace, the endColumnIndex will be used.")
  public String getEndColumnName() {
    return endColumnName;
  }

  public void setEndColumnName(String endColumnName) {
    this.endColumnName = endColumnName;
  }

  public DatafileOutputV1 fieldDelimiter(FieldDelimiterEnum fieldDelimiter) {
    this.fieldDelimiter = fieldDelimiter;
    return this;
  }

   /**
   * The character used as the CSV field delimiter.
   * @return fieldDelimiter
  **/
  @Schema(description = "The character used as the CSV field delimiter.")
  public FieldDelimiterEnum getFieldDelimiter() {
    return fieldDelimiter;
  }

  public void setFieldDelimiter(FieldDelimiterEnum fieldDelimiter) {
    this.fieldDelimiter = fieldDelimiter;
  }

  public DatafileOutputV1 filename(String filename) {
    this.filename = filename;
    return this;
  }

   /**
   * The name and path of the CSV file used to generate the content
   * @return filename
  **/
  @Schema(description = "The name and path of the CSV file used to generate the content")
  public String getFilename() {
    return filename;
  }

  public void setFilename(String filename) {
    this.filename = filename;
  }

  public DatafileOutputV1 firstDataRow(Long firstDataRow) {
    this.firstDataRow = firstDataRow;
    return this;
  }

   /**
   * Integer that identifies the row at which to start reading the data. (Row 1 is the first row of the file.)
   * @return firstDataRow
  **/
  @Schema(description = "Integer that identifies the row at which to start reading the data. (Row 1 is the first row of the file.)")
  public Long getFirstDataRow() {
    return firstDataRow;
  }

  public void setFirstDataRow(Long firstDataRow) {
    this.firstDataRow = firstDataRow;
  }

  public DatafileOutputV1 id(String id) {
    this.id = id;
    return this;
  }

   /**
   * The ID that can be used to interact with the item
   * @return id
  **/
  @Schema(required = true, description = "The ID that can be used to interact with the item")
  public String getId() {
    return id;
  }

  public void setId(String id) {
    this.id = id;
  }

  public DatafileOutputV1 interpolationMethod(String interpolationMethod) {
    this.interpolationMethod = interpolationMethod;
    return this;
  }

   /**
   * The interpolation method used to represent the values between samples in the signal. If a maximum interpolation row is specified, the information in that row overrides this setting. If importing a condition, this setting is ignored.
   * @return interpolationMethod
  **/
  @Schema(description = "The interpolation method used to represent the values between samples in the signal. If a maximum interpolation row is specified, the information in that row overrides this setting. If importing a condition, this setting is ignored.")
  public String getInterpolationMethod() {
    return interpolationMethod;
  }

  public void setInterpolationMethod(String interpolationMethod) {
    this.interpolationMethod = interpolationMethod;
  }

  public DatafileOutputV1 interpolationMethodRow(Long interpolationMethodRow) {
    this.interpolationMethodRow = interpolationMethodRow;
    return this;
  }

   /**
   * Integer that identifies the row containing the interpolation method for each signal. A setting of 0 indicates that there is no interpolation method header row. (Row 1 is the first row of the file.) If importing a condition, the content of this row is ignored.
   * @return interpolationMethodRow
  **/
  @Schema(description = "Integer that identifies the row containing the interpolation method for each signal. A setting of 0 indicates that there is no interpolation method header row. (Row 1 is the first row of the file.) If importing a condition, the content of this row is ignored.")
  public Long getInterpolationMethodRow() {
    return interpolationMethodRow;
  }

  public void setInterpolationMethodRow(Long interpolationMethodRow) {
    this.interpolationMethodRow = interpolationMethodRow;
  }

  public DatafileOutputV1 isArchived(Boolean isArchived) {
    this.isArchived = isArchived;
    return this;
  }

   /**
   * Whether item is archived
   * @return isArchived
  **/
  @Schema(description = "Whether item is archived")
  public Boolean getIsArchived() {
    return isArchived;
  }

  public void setIsArchived(Boolean isArchived) {
    this.isArchived = isArchived;
  }

  public DatafileOutputV1 isRedacted(Boolean isRedacted) {
    this.isRedacted = isRedacted;
    return this;
  }

   /**
   * Whether item is redacted
   * @return isRedacted
  **/
  @Schema(description = "Whether item is redacted")
  public Boolean getIsRedacted() {
    return isRedacted;
  }

  public void setIsRedacted(Boolean isRedacted) {
    this.isRedacted = isRedacted;
  }

  public DatafileOutputV1 itemType(ItemTypeEnum itemType) {
    this.itemType = itemType;
    return this;
  }

   /**
   * The type of item to be imported from the CSV file. Supported types include signal and condition.
   * @return itemType
  **/
  @Schema(description = "The type of item to be imported from the CSV file. Supported types include signal and condition.")
  public ItemTypeEnum getItemType() {
    return itemType;
  }

  public void setItemType(ItemTypeEnum itemType) {
    this.itemType = itemType;
  }

  public DatafileOutputV1 keyColumnIndex(Long keyColumnIndex) {
    this.keyColumnIndex = keyColumnIndex;
    return this;
  }

   /**
   * Integer that identifies the column containing the sample timestamps for the signal(s) or the column containing the capsule start key for the condition. Column 1 is the first column of the file.
   * @return keyColumnIndex
  **/
  @Schema(description = "Integer that identifies the column containing the sample timestamps for the signal(s) or the column containing the capsule start key for the condition. Column 1 is the first column of the file.")
  public Long getKeyColumnIndex() {
    return keyColumnIndex;
  }

  public void setKeyColumnIndex(Long keyColumnIndex) {
    this.keyColumnIndex = keyColumnIndex;
  }

  public DatafileOutputV1 keyColumnName(String keyColumnName) {
    this.keyColumnName = keyColumnName;
    return this;
  }

   /**
   * The name of the column containing the signal timestamps for the signal(s) orthe column containing the capsule start key for the condition. If not specified or whitespace, the keyColumnIndex will be used.
   * @return keyColumnName
  **/
  @Schema(description = "The name of the column containing the signal timestamps for the signal(s) orthe column containing the capsule start key for the condition. If not specified or whitespace, the keyColumnIndex will be used.")
  public String getKeyColumnName() {
    return keyColumnName;
  }

  public void setKeyColumnName(String keyColumnName) {
    this.keyColumnName = keyColumnName;
  }

  public DatafileOutputV1 keyFormat(KeyFormatEnum keyFormat) {
    this.keyFormat = keyFormat;
    return this;
  }

   /**
   * The format of the sample timestamps for signals or the format of the capsule start and end times for a condition.
   * @return keyFormat
  **/
  @Schema(description = "The format of the sample timestamps for signals or the format of the capsule start and end times for a condition.")
  public KeyFormatEnum getKeyFormat() {
    return keyFormat;
  }

  public void setKeyFormat(KeyFormatEnum keyFormat) {
    this.keyFormat = keyFormat;
  }

  public DatafileOutputV1 lenientDaylightSavings(Boolean lenientDaylightSavings) {
    this.lenientDaylightSavings = lenientDaylightSavings;
    return this;
  }

   /**
   * If true, hours are allowed that don't exist due to the spring forward daylight savings transition. They are interpreted as occurring in the following hour. The true setting should not be needed if the data was logged appropriately for its time zone. If false (default), data in hours that don't exist will cause the import to fail.
   * @return lenientDaylightSavings
  **/
  @Schema(description = "If true, hours are allowed that don't exist due to the spring forward daylight savings transition. They are interpreted as occurring in the following hour. The true setting should not be needed if the data was logged appropriately for its time zone. If false (default), data in hours that don't exist will cause the import to fail.")
  public Boolean getLenientDaylightSavings() {
    return lenientDaylightSavings;
  }

  public void setLenientDaylightSavings(Boolean lenientDaylightSavings) {
    this.lenientDaylightSavings = lenientDaylightSavings;
  }

  public DatafileOutputV1 maximumDuration(String maximumDuration) {
    this.maximumDuration = maximumDuration;
    return this;
  }

   /**
   * The maximum duration of the capsules in the condition. Capsules greater than this duration will be imported but will not returned when data from the condition is requested. If importing a signal, this setting is ignored.
   * @return maximumDuration
  **/
  @Schema(description = "The maximum duration of the capsules in the condition. Capsules greater than this duration will be imported but will not returned when data from the condition is requested. If importing a signal, this setting is ignored.")
  public String getMaximumDuration() {
    return maximumDuration;
  }

  public void setMaximumDuration(String maximumDuration) {
    this.maximumDuration = maximumDuration;
  }

  public DatafileOutputV1 maximumInterpolation(String maximumInterpolation) {
    this.maximumInterpolation = maximumInterpolation;
    return this;
  }

   /**
   * The maximum spacing between adjacent sample keys that can be interpolated across. If two samples are spaced by more than maximum interpolation, there will be a hole in the signal between them. If a maximum interpolation row is specified, the information in that row overrides this setting. If importing a condition, this setting is ignored.
   * @return maximumInterpolation
  **/
  @Schema(description = "The maximum spacing between adjacent sample keys that can be interpolated across. If two samples are spaced by more than maximum interpolation, there will be a hole in the signal between them. If a maximum interpolation row is specified, the information in that row overrides this setting. If importing a condition, this setting is ignored.")
  public String getMaximumInterpolation() {
    return maximumInterpolation;
  }

  public void setMaximumInterpolation(String maximumInterpolation) {
    this.maximumInterpolation = maximumInterpolation;
  }

  public DatafileOutputV1 maximumInterpolationRow(Long maximumInterpolationRow) {
    this.maximumInterpolationRow = maximumInterpolationRow;
    return this;
  }

   /**
   * Integer that identifies the row containing tje maximum interpolation for each signal. A setting of 0 indicates that there is no maximum interpolation header row. (Row 1 is the first row of the file.)If importing a condition, the content of this row is ignored.
   * @return maximumInterpolationRow
  **/
  @Schema(description = "Integer that identifies the row containing tje maximum interpolation for each signal. A setting of 0 indicates that there is no maximum interpolation header row. (Row 1 is the first row of the file.)If importing a condition, the content of this row is ignored.")
  public Long getMaximumInterpolationRow() {
    return maximumInterpolationRow;
  }

  public void setMaximumInterpolationRow(Long maximumInterpolationRow) {
    this.maximumInterpolationRow = maximumInterpolationRow;
  }

  public DatafileOutputV1 name(String name) {
    this.name = name;
    return this;
  }

   /**
   * The human readable name
   * @return name
  **/
  @Schema(required = true, description = "The human readable name")
  public String getName() {
    return name;
  }

  public void setName(String name) {
    this.name = name;
  }

  public DatafileOutputV1 namePrefix(String namePrefix) {
    this.namePrefix = namePrefix;
    return this;
  }

   /**
   * Prefix prepended to the name of each signal when importing signal(s) and prepended to the name of each capsule property when importing a condition.
   * @return namePrefix
  **/
  @Schema(description = "Prefix prepended to the name of each signal when importing signal(s) and prepended to the name of each capsule property when importing a condition.")
  public String getNamePrefix() {
    return namePrefix;
  }

  public void setNamePrefix(String namePrefix) {
    this.namePrefix = namePrefix;
  }

  public DatafileOutputV1 nameRow(Long nameRow) {
    this.nameRow = nameRow;
    return this;
  }

   /**
   * Integer that identifies the header row used to name the signal(s) when importing signal(s) and used to name the capsule properties when importing a condition. A setting of 0 indicates that there is no name header row. (Row 1 is the first row of the file.)
   * @return nameRow
  **/
  @Schema(description = "Integer that identifies the header row used to name the signal(s) when importing signal(s) and used to name the capsule properties when importing a condition. A setting of 0 indicates that there is no name header row. (Row 1 is the first row of the file.)")
  public Long getNameRow() {
    return nameRow;
  }

  public void setNameRow(Long nameRow) {
    this.nameRow = nameRow;
  }

  public DatafileOutputV1 nameSuffix(String nameSuffix) {
    this.nameSuffix = nameSuffix;
    return this;
  }

   /**
   * Suffix appended to the name of each signal when importing signal(s) and appended to the name of each capsule property when importing a condition.
   * @return nameSuffix
  **/
  @Schema(description = "Suffix appended to the name of each signal when importing signal(s) and appended to the name of each capsule property when importing a condition.")
  public String getNameSuffix() {
    return nameSuffix;
  }

  public void setNameSuffix(String nameSuffix) {
    this.nameSuffix = nameSuffix;
  }

  public DatafileOutputV1 scopedTo(String scopedTo) {
    this.scopedTo = scopedTo;
    return this;
  }

   /**
   * The ID of the workbook to which this item is scoped or null if it is in the global scope.
   * @return scopedTo
  **/
  @Schema(description = "The ID of the workbook to which this item is scoped or null if it is in the global scope.")
  public String getScopedTo() {
    return scopedTo;
  }

  public void setScopedTo(String scopedTo) {
    this.scopedTo = scopedTo;
  }

  public DatafileOutputV1 statusMessage(String statusMessage) {
    this.statusMessage = statusMessage;
    return this;
  }

   /**
   * A plain language status message with information about any issues that may have been encountered during an operation
   * @return statusMessage
  **/
  @Schema(description = "A plain language status message with information about any issues that may have been encountered during an operation")
  public String getStatusMessage() {
    return statusMessage;
  }

  public void setStatusMessage(String statusMessage) {
    this.statusMessage = statusMessage;
  }

  public DatafileOutputV1 timeZone(String timeZone) {
    this.timeZone = timeZone;
    return this;
  }

   /**
   * If the timestamps (key for signals, start/end for a condition) contain no time zone information, they will be interpreted as being in this time zone. 
   * @return timeZone
  **/
  @Schema(description = "If the timestamps (key for signals, start/end for a condition) contain no time zone information, they will be interpreted as being in this time zone. ")
  public String getTimeZone() {
    return timeZone;
  }

  public void setTimeZone(String timeZone) {
    this.timeZone = timeZone;
  }

  public DatafileOutputV1 translationKey(String translationKey) {
    this.translationKey = translationKey;
    return this;
  }

   /**
   * The item's translation key, if any
   * @return translationKey
  **/
  @Schema(description = "The item's translation key, if any")
  public String getTranslationKey() {
    return translationKey;
  }

  public void setTranslationKey(String translationKey) {
    this.translationKey = translationKey;
  }

  public DatafileOutputV1 type(String type) {
    this.type = type;
    return this;
  }

   /**
   * The type of the item
   * @return type
  **/
  @Schema(required = true, description = "The type of the item")
  public String getType() {
    return type;
  }

  public void setType(String type) {
    this.type = type;
  }

  public DatafileOutputV1 updatedAt(String updatedAt) {
    this.updatedAt = updatedAt;
    return this;
  }

   /**
   * The ISO 8601 date of when the datafile was updated (YYYY-MM-DDThh:mm:ss.sssssssss±hh:mm)
   * @return updatedAt
  **/
  @Schema(description = "The ISO 8601 date of when the datafile was updated (YYYY-MM-DDThh:mm:ss.sssssssss±hh:mm)")
  public String getUpdatedAt() {
    return updatedAt;
  }

  public void setUpdatedAt(String updatedAt) {
    this.updatedAt = updatedAt;
  }

  public DatafileOutputV1 validationMode(ValidationModeEnum validationMode) {
    this.validationMode = validationMode;
    return this;
  }

   /**
   * The approach to use when CSV data cannot be parsed. If Fail (default), then cells that cannot be parsed will cause the import to fail with error messages.If Skip, those cells will be skipped meaning that no sample will be created for signals from that row of the file. For conditions, if it is the start or end cell, no capsule will be created from that row. If the cell is a capsule property, the capsule is still created but without that capsule property. If Invalid and the cell is a sample key or capsule start/end, no sample or capsule is created from that row of the file. If the cell is a sample value or capsule property, the sample or capsule property is created with the value INVALID.
   * @return validationMode
  **/
  @Schema(description = "The approach to use when CSV data cannot be parsed. If Fail (default), then cells that cannot be parsed will cause the import to fail with error messages.If Skip, those cells will be skipped meaning that no sample will be created for signals from that row of the file. For conditions, if it is the start or end cell, no capsule will be created from that row. If the cell is a capsule property, the capsule is still created but without that capsule property. If Invalid and the cell is a sample key or capsule start/end, no sample or capsule is created from that row of the file. If the cell is a sample value or capsule property, the sample or capsule property is created with the value INVALID.")
  public ValidationModeEnum getValidationMode() {
    return validationMode;
  }

  public void setValidationMode(ValidationModeEnum validationMode) {
    this.validationMode = validationMode;
  }

  public DatafileOutputV1 valueColumnIndices(String valueColumnIndices) {
    this.valueColumnIndices = valueColumnIndices;
    return this;
  }

   /**
   * List of integers identifying columns. When importing signals, these columns will be combined with the key column to create signals. When importing a condition, these columns will become the capsule properties. Valid formats are a comma separated list of 'N' or 'N-M' where N and M are integers greater than zero and M >= N. Example: '2, 5-7, 10, 12-14'. The first column of the file is column 1. If the column(s) representing a signal key or condition start/end is included in the list, it will be ignored. If neither valueColumnNames nor valueColumnIndices are specified, all columns other than the key/start/end column will result in signals when importing signals and will result in capsule properties when importing a condition. An entry of 0 alone indicates that no columns should be imported as capsule properties. Any column is only imported once no matter how many times it is listed.
   * @return valueColumnIndices
  **/
  @Schema(description = "List of integers identifying columns. When importing signals, these columns will be combined with the key column to create signals. When importing a condition, these columns will become the capsule properties. Valid formats are a comma separated list of 'N' or 'N-M' where N and M are integers greater than zero and M >= N. Example: '2, 5-7, 10, 12-14'. The first column of the file is column 1. If the column(s) representing a signal key or condition start/end is included in the list, it will be ignored. If neither valueColumnNames nor valueColumnIndices are specified, all columns other than the key/start/end column will result in signals when importing signals and will result in capsule properties when importing a condition. An entry of 0 alone indicates that no columns should be imported as capsule properties. Any column is only imported once no matter how many times it is listed.")
  public String getValueColumnIndices() {
    return valueColumnIndices;
  }

  public void setValueColumnIndices(String valueColumnIndices) {
    this.valueColumnIndices = valueColumnIndices;
  }

  public DatafileOutputV1 valueColumnNames(String valueColumnNames) {
    this.valueColumnNames = valueColumnNames;
    return this;
  }

   /**
   * List of comma separated case sensitive names of the columns. When importing signals, these columns will be combined with the key column to create signals. When importing a condition, these columns will become the capsule properties. If the column(s) representing a signal key or condition start/end is included in the list, it will be ignored. If not specified, valueColumnIndices will be used. If specified, valueColumnIndices will be ignored. Any column is only imported once no matter how many times it is listed.
   * @return valueColumnNames
  **/
  @Schema(description = "List of comma separated case sensitive names of the columns. When importing signals, these columns will be combined with the key column to create signals. When importing a condition, these columns will become the capsule properties. If the column(s) representing a signal key or condition start/end is included in the list, it will be ignored. If not specified, valueColumnIndices will be used. If specified, valueColumnIndices will be ignored. Any column is only imported once no matter how many times it is listed.")
  public String getValueColumnNames() {
    return valueColumnNames;
  }

  public void setValueColumnNames(String valueColumnNames) {
    this.valueColumnNames = valueColumnNames;
  }

  public DatafileOutputV1 valueUom(String valueUom) {
    this.valueUom = valueUom;
    return this;
  }

   /**
   * The unit of measure to be used for every signal when importing signals and for every capsule property when importing a condition. If not specified, defaults to unitless. If a unit of measure row is specified, the information in that row overrides this setting.
   * @return valueUom
  **/
  @Schema(description = "The unit of measure to be used for every signal when importing signals and for every capsule property when importing a condition. If not specified, defaults to unitless. If a unit of measure row is specified, the information in that row overrides this setting.")
  public String getValueUom() {
    return valueUom;
  }

  public void setValueUom(String valueUom) {
    this.valueUom = valueUom;
  }

  public DatafileOutputV1 valueUomRow(Long valueUomRow) {
    this.valueUomRow = valueUomRow;
    return this;
  }

   /**
   * Integer that identifies the row containing the unit of measure for each signal when importing signal(s) or for each capsule property when importing a condition. A setting of 0 indicates that there is no unit of measure header row. (Row 1 is the first row of the file.)
   * @return valueUomRow
  **/
  @Schema(description = "Integer that identifies the row containing the unit of measure for each signal when importing signal(s) or for each capsule property when importing a condition. A setting of 0 indicates that there is no unit of measure header row. (Row 1 is the first row of the file.)")
  public Long getValueUomRow() {
    return valueUomRow;
  }

  public void setValueUomRow(Long valueUomRow) {
    this.valueUomRow = valueUomRow;
  }


  @Override
  public boolean equals(java.lang.Object o) {
    if (this == o) {
      return true;
    }
    if (o == null || getClass() != o.getClass()) {
      return false;
    }
    DatafileOutputV1 datafileOutputV1 = (DatafileOutputV1) o;
    return Objects.equals(this.additionalProperties, datafileOutputV1.additionalProperties) &&
        Objects.equals(this.append, datafileOutputV1.append) &&
        Objects.equals(this.conditionName, datafileOutputV1.conditionName) &&
        Objects.equals(this.createdAt, datafileOutputV1.createdAt) &&
        Objects.equals(this.dataId, datafileOutputV1.dataId) &&
        Objects.equals(this.datasourceClass, datafileOutputV1.datasourceClass) &&
        Objects.equals(this.datasourceId, datafileOutputV1.datasourceId) &&
        Objects.equals(this.dayFirstDefault, datafileOutputV1.dayFirstDefault) &&
        Objects.equals(this.description, datafileOutputV1.description) &&
        Objects.equals(this.descriptionRow, datafileOutputV1.descriptionRow) &&
        Objects.equals(this.effectivePermissions, datafileOutputV1.effectivePermissions) &&
        Objects.equals(this.endColumnIndex, datafileOutputV1.endColumnIndex) &&
        Objects.equals(this.endColumnName, datafileOutputV1.endColumnName) &&
        Objects.equals(this.fieldDelimiter, datafileOutputV1.fieldDelimiter) &&
        Objects.equals(this.filename, datafileOutputV1.filename) &&
        Objects.equals(this.firstDataRow, datafileOutputV1.firstDataRow) &&
        Objects.equals(this.id, datafileOutputV1.id) &&
        Objects.equals(this.interpolationMethod, datafileOutputV1.interpolationMethod) &&
        Objects.equals(this.interpolationMethodRow, datafileOutputV1.interpolationMethodRow) &&
        Objects.equals(this.isArchived, datafileOutputV1.isArchived) &&
        Objects.equals(this.isRedacted, datafileOutputV1.isRedacted) &&
        Objects.equals(this.itemType, datafileOutputV1.itemType) &&
        Objects.equals(this.keyColumnIndex, datafileOutputV1.keyColumnIndex) &&
        Objects.equals(this.keyColumnName, datafileOutputV1.keyColumnName) &&
        Objects.equals(this.keyFormat, datafileOutputV1.keyFormat) &&
        Objects.equals(this.lenientDaylightSavings, datafileOutputV1.lenientDaylightSavings) &&
        Objects.equals(this.maximumDuration, datafileOutputV1.maximumDuration) &&
        Objects.equals(this.maximumInterpolation, datafileOutputV1.maximumInterpolation) &&
        Objects.equals(this.maximumInterpolationRow, datafileOutputV1.maximumInterpolationRow) &&
        Objects.equals(this.name, datafileOutputV1.name) &&
        Objects.equals(this.namePrefix, datafileOutputV1.namePrefix) &&
        Objects.equals(this.nameRow, datafileOutputV1.nameRow) &&
        Objects.equals(this.nameSuffix, datafileOutputV1.nameSuffix) &&
        Objects.equals(this.scopedTo, datafileOutputV1.scopedTo) &&
        Objects.equals(this.statusMessage, datafileOutputV1.statusMessage) &&
        Objects.equals(this.timeZone, datafileOutputV1.timeZone) &&
        Objects.equals(this.translationKey, datafileOutputV1.translationKey) &&
        Objects.equals(this.type, datafileOutputV1.type) &&
        Objects.equals(this.updatedAt, datafileOutputV1.updatedAt) &&
        Objects.equals(this.validationMode, datafileOutputV1.validationMode) &&
        Objects.equals(this.valueColumnIndices, datafileOutputV1.valueColumnIndices) &&
        Objects.equals(this.valueColumnNames, datafileOutputV1.valueColumnNames) &&
        Objects.equals(this.valueUom, datafileOutputV1.valueUom) &&
        Objects.equals(this.valueUomRow, datafileOutputV1.valueUomRow);
  }

  @Override
  public int hashCode() {
    return Objects.hash(additionalProperties, append, conditionName, createdAt, dataId, datasourceClass, datasourceId, dayFirstDefault, description, descriptionRow, effectivePermissions, endColumnIndex, endColumnName, fieldDelimiter, filename, firstDataRow, id, interpolationMethod, interpolationMethodRow, isArchived, isRedacted, itemType, keyColumnIndex, keyColumnName, keyFormat, lenientDaylightSavings, maximumDuration, maximumInterpolation, maximumInterpolationRow, name, namePrefix, nameRow, nameSuffix, scopedTo, statusMessage, timeZone, translationKey, type, updatedAt, validationMode, valueColumnIndices, valueColumnNames, valueUom, valueUomRow);
  }


  @Override
  public String toString() {
    StringBuilder sb = new StringBuilder();
    sb.append("class DatafileOutputV1 {\n");
    
    sb.append("    additionalProperties: ").append(toIndentedString(additionalProperties)).append("\n");
    sb.append("    append: ").append(toIndentedString(append)).append("\n");
    sb.append("    conditionName: ").append(toIndentedString(conditionName)).append("\n");
    sb.append("    createdAt: ").append(toIndentedString(createdAt)).append("\n");
    sb.append("    dataId: ").append(toIndentedString(dataId)).append("\n");
    sb.append("    datasourceClass: ").append(toIndentedString(datasourceClass)).append("\n");
    sb.append("    datasourceId: ").append(toIndentedString(datasourceId)).append("\n");
    sb.append("    dayFirstDefault: ").append(toIndentedString(dayFirstDefault)).append("\n");
    sb.append("    description: ").append(toIndentedString(description)).append("\n");
    sb.append("    descriptionRow: ").append(toIndentedString(descriptionRow)).append("\n");
    sb.append("    effectivePermissions: ").append(toIndentedString(effectivePermissions)).append("\n");
    sb.append("    endColumnIndex: ").append(toIndentedString(endColumnIndex)).append("\n");
    sb.append("    endColumnName: ").append(toIndentedString(endColumnName)).append("\n");
    sb.append("    fieldDelimiter: ").append(toIndentedString(fieldDelimiter)).append("\n");
    sb.append("    filename: ").append(toIndentedString(filename)).append("\n");
    sb.append("    firstDataRow: ").append(toIndentedString(firstDataRow)).append("\n");
    sb.append("    id: ").append(toIndentedString(id)).append("\n");
    sb.append("    interpolationMethod: ").append(toIndentedString(interpolationMethod)).append("\n");
    sb.append("    interpolationMethodRow: ").append(toIndentedString(interpolationMethodRow)).append("\n");
    sb.append("    isArchived: ").append(toIndentedString(isArchived)).append("\n");
    sb.append("    isRedacted: ").append(toIndentedString(isRedacted)).append("\n");
    sb.append("    itemType: ").append(toIndentedString(itemType)).append("\n");
    sb.append("    keyColumnIndex: ").append(toIndentedString(keyColumnIndex)).append("\n");
    sb.append("    keyColumnName: ").append(toIndentedString(keyColumnName)).append("\n");
    sb.append("    keyFormat: ").append(toIndentedString(keyFormat)).append("\n");
    sb.append("    lenientDaylightSavings: ").append(toIndentedString(lenientDaylightSavings)).append("\n");
    sb.append("    maximumDuration: ").append(toIndentedString(maximumDuration)).append("\n");
    sb.append("    maximumInterpolation: ").append(toIndentedString(maximumInterpolation)).append("\n");
    sb.append("    maximumInterpolationRow: ").append(toIndentedString(maximumInterpolationRow)).append("\n");
    sb.append("    name: ").append(toIndentedString(name)).append("\n");
    sb.append("    namePrefix: ").append(toIndentedString(namePrefix)).append("\n");
    sb.append("    nameRow: ").append(toIndentedString(nameRow)).append("\n");
    sb.append("    nameSuffix: ").append(toIndentedString(nameSuffix)).append("\n");
    sb.append("    scopedTo: ").append(toIndentedString(scopedTo)).append("\n");
    sb.append("    statusMessage: ").append(toIndentedString(statusMessage)).append("\n");
    sb.append("    timeZone: ").append(toIndentedString(timeZone)).append("\n");
    sb.append("    translationKey: ").append(toIndentedString(translationKey)).append("\n");
    sb.append("    type: ").append(toIndentedString(type)).append("\n");
    sb.append("    updatedAt: ").append(toIndentedString(updatedAt)).append("\n");
    sb.append("    validationMode: ").append(toIndentedString(validationMode)).append("\n");
    sb.append("    valueColumnIndices: ").append(toIndentedString(valueColumnIndices)).append("\n");
    sb.append("    valueColumnNames: ").append(toIndentedString(valueColumnNames)).append("\n");
    sb.append("    valueUom: ").append(toIndentedString(valueUom)).append("\n");
    sb.append("    valueUomRow: ").append(toIndentedString(valueUomRow)).append("\n");
    sb.append("}");
    return sb.toString();
  }

  /**
   * Convert the given object to string with each line indented by 4 spaces
   * (except the first line).
   */
  private String toIndentedString(java.lang.Object o) {
    if (o == null) {
      return "null";
    }
    return o.toString().replace("\n", "\n    ");
  }
  
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy