All Downloads are FREE. Search and download functionalities are using the official Maven repository.

azure.reference.conf Maven / Gradle / Ivy

There is a newer version: 0.2.6
Show newest version

datasets = "/tmp/datasets"
datasets = ${?COMET_DATASETS}

metadata = "/tmp/metadata"
metadata = ${?COMET_METADATA}

tmpdir = "/tmp/comet_tmp"
tmpdir = ${?COMET_TMPDIR}

archive = true
archive = ${?COMET_ARCHIVE}

launcher = airflow
launcher = simple
launcher = ${?COMET_LAUNCHER}

hive = false
hive = ${?COMET_HIVE}

grouped = false
grouped = ${?COMET_GROUPED}

analyze = true
analyze = ${?COMET_ANALYZE}

# deprecated
write-format = parquet
write-format = ${?COMET_WRITE_FORMAT}

merge-force-distinct = false
merge-force-distinct = ${?COMET_MERGE_FORCE_DISTINCT}

#file-system = "gs://nps-datastore"
#file-system = "hdfs://localhost:9000"
file-system = "abfs://[email protected]"
file-system = ${?COMET_FS}

chewer-prefix = "comet.chewer"
chewer-prefix = ${?COMET_CHEWER_PREFIX}

lock {
  path = "/tmp/locks"
  path = ${?COMET_LOCK_PATH}

  ingestion-timeout = -1
  ingestion-timeout = ${?COMET_LOCK_INGESTION_TIMEOUT}

  metrics-timeout = -1
  metrics-timeout = ${?COMET_LOCK_METRICS_TIMEOUT}
}

hadoop {
}

audit {

  active = true
  active = ${?COMET_AUDIT_ACTIVE}

  #  path = "/tmp/metrics/{domain}/{schema}"
  path = "/tmp/audit"
  path = ${?COMET_AUDIT_PATH}

  audit-timeout = -1
  audit-timeout = ${?COMET_LOCK_AUDIT_TIMEOUT}


}

metrics {
  active = false
  active = ${?COMET_METRICS_ACTIVE}

  #  path = "/tmp/metrics/{domain}/{schema}"
  path = "/tmp/metrics/{domain}"
  path = ${?COMET_METRICS_PATH}

  discrete-max-cardinality = 10
  discrete-max-cardinality = ${?COMET_METRICS_DISCRETE_MAX_CARDINALITY}
}

area {
  pending = "pending"
  pending = ${?COMET_AREA_PENDING}
  unresolved = "unresolved"
  unresolved = ${?COMET_AREA_UNRESOLVED}
  archive = "archive"
  archive = ${?COMET_AREA_ARCHIVE}
  ingesting = "ingesting"
  ingesting = ${?COMET_AREA_INGESTING}
  accepted = "accepted"
  accepted = ${?COMET_AREA_ACCEPTED}
  rejected = "rejected"
  rejected = ${?COMET_AREA_REJECTED}
  business = "business"
  business = ${?COMET_AREA_BUSINESS}
}

privacy {
  options = {
    "none": "com.ebiznext.comet.utils.No",
    "hide": "com.ebiznext.comet.utils.Hide",
    "md5": "com.ebiznext.comet.utils.Md5",
    "sha1": "com.ebiznext.comet.utils.Sha1",
    "sha256": "com.ebiznext.comet.utils.Sha256",
    "sha512": "com.ebiznext.comet.utils.Sha512",
  }
}


elasticsearch {
  active = false
  active = ${?COMET_ES_ACTIVE}
  options = {
    "es.nodes": "localhost",
    "es.port": "9200",

    #  net.http.auth.user = ""
    #  net.http.auth.pass = ""

    "es.net.ssl": "false",
    "es.net.ssl.cert.allow.self.signed": "false",

    "es.batch.size.entries": "1000",
    "es.batch.size.bytes": "1mb",
    "es.batch.write.retry.count": "3",
    "es.batch.write.retry.wait": "10s"
  }
}

atlas {
  uri = "http://127.0.0.1:21000"
  uri = ${?COMET_ATLAS_URI}
  user = "admin"
  user = ${?COMET_ATLAS_USER}
  password = "admin"
  password = ${?COMET_ATLAS_PASSWORD}
  owner = "system"
  owner = ${?COMET_ATLAS_OWNER}
}

spark {
  #  sql.hive.convertMetastoreParquet = false
  #extraListeners = com.hortonworks.spark.atlas.SparkAtlasEventTracker
  #sql.queryExecutionListeners = com.hortonworks.spark.atlas.SparkAtlasEventTracker
  #sql.streaming.streamingQueryListeners=com.hortonworks.spark.atlas.SparkAtlasStreamingQueryEventTracker
  #  yarn.principal = "invalid"
  #  yarn.keytab = "invalid"
  #  yarn.principal = ${?SPARK_YARN_PRINCIPAL}
  #  yarn.keytab = ${?SPARK_YARN_KEYTAB}
  master = "local[*]"
  #  sql.catalogImplementation="hive"
  #  sql.catalogImplementation="in-memory"
}


// curl -v -H 'Cache-Control: no-cache'  -H 'Content-Type: application/json'  -XPOST localhost:8080/api/experimental/dags/comet_validator/dag_runs -d '{"conf":"{\"key\":\"value\"}"}'

airflow {
  ingest = "comet_ingest"
  ingest = ${?AIRFLOW_INGEST}
  endpoint = "http://127.0.0.1:8080/api/experimental"
  endpoint = ${?AIRFLOW_ENDPOINT}
}





© 2015 - 2024 Weber Informatics LLC | Privacy Policy