ai.h2o.sparkling.ml.params.H2OIsolationForestParams.scala Maven / Gradle / Ivy
The newest version!
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.ml.params
import hex.tree.isofor.IsolationForestModel.IsolationForestParameters
import ai.h2o.sparkling.H2OFrame
import hex.Model.Parameters.CategoricalEncodingScheme
import hex.ScoreKeeper.StoppingMetric
trait H2OIsolationForestParams
extends H2OAlgoParamsBase
with HasIgnoredCols
with HasCalibrationDataFrame
with HasValidationLabelCol {
protected def paramTag = reflect.classTag[IsolationForestParameters]
//
// Parameter definitions
//
protected val sampleSize = longParam(
name = "sampleSize",
doc = """Number of randomly sampled observations used to train each Isolation Forest tree. Only one of parameters sample_size and sample_rate should be defined. If sample_rate is defined, sample_size will be ignored.""")
protected val sampleRate = doubleParam(
name = "sampleRate",
doc = """Rate of randomly sampled observations used to train each Isolation Forest tree. Needs to be in range from 0.0 to 1.0. If set to -1, sample_rate is disabled and sample_size will be used instead.""")
protected val mtries = intParam(
name = "mtries",
doc = """Number of variables randomly sampled as candidates at each split. If set to -1, defaults (number of predictors)/3.""")
protected val contamination = doubleParam(
name = "contamination",
doc = """Contamination ratio - the proportion of anomalies in the input dataset. If undefined (-1) the predict function will not mark observations as anomalies and only anomaly score will be returned. Defaults to -1 (undefined).""")
protected val ntrees = intParam(
name = "ntrees",
doc = """Number of trees.""")
protected val maxDepth = intParam(
name = "maxDepth",
doc = """Maximum tree depth (0 for unlimited).""")
protected val minRows = doubleParam(
name = "minRows",
doc = """Fewest allowed (weighted) observations in a leaf.""")
protected val seed = longParam(
name = "seed",
doc = """Seed for pseudo random number generator (if applicable).""")
protected val buildTreeOneNode = booleanParam(
name = "buildTreeOneNode",
doc = """Run on one node only; no network overhead but fewer cpus used. Suitable for small datasets.""")
protected val colSampleRatePerTree = doubleParam(
name = "colSampleRatePerTree",
doc = """Column sample rate per tree (from 0.0 to 1.0).""")
protected val colSampleRateChangePerLevel = doubleParam(
name = "colSampleRateChangePerLevel",
doc = """Relative change of the column sampling rate for every level (must be > 0.0 and <= 2.0).""")
protected val scoreTreeInterval = intParam(
name = "scoreTreeInterval",
doc = """Score the model after every so many trees. Disabled if set to 0.""")
protected val modelId = nullableStringParam(
name = "modelId",
doc = """Destination id for this model; auto-generated if not specified.""")
protected val categoricalEncoding = stringParam(
name = "categoricalEncoding",
doc = """Encoding scheme for categorical features. Possible values are ``"AUTO"``, ``"OneHotInternal"``, ``"OneHotExplicit"``, ``"Enum"``, ``"Binary"``, ``"Eigen"``, ``"LabelEncoder"``, ``"SortByResponse"``, ``"EnumLimited"``.""")
protected val ignoreConstCols = booleanParam(
name = "ignoreConstCols",
doc = """Ignore constant columns.""")
protected val scoreEachIteration = booleanParam(
name = "scoreEachIteration",
doc = """Whether to score during each iteration of model training.""")
protected val stoppingRounds = intParam(
name = "stoppingRounds",
doc = """Early stopping based on convergence of stopping_metric. Stop if simple moving average of length k of the stopping_metric does not improve for k:=stopping_rounds scoring events (0 to disable).""")
protected val maxRuntimeSecs = doubleParam(
name = "maxRuntimeSecs",
doc = """Maximum allowed runtime in seconds for model training. Use 0 to disable.""")
protected val stoppingMetric = stringParam(
name = "stoppingMetric",
doc = """Metric to use for early stopping (AUTO: logloss for classification, deviance for regression and anomaly_score for Isolation Forest). Note that custom and custom_increasing can only be used in GBM and DRF with the Python client. Possible values are ``"AUTO"``, ``"deviance"``, ``"logloss"``, ``"MSE"``, ``"RMSE"``, ``"MAE"``, ``"RMSLE"``, ``"AUC"``, ``"AUCPR"``, ``"lift_top_group"``, ``"misclassification"``, ``"mean_per_class_error"``, ``"anomaly_score"``, ``"AUUC"``, ``"ATE"``, ``"ATT"``, ``"ATC"``, ``"qini"``, ``"custom"``, ``"custom_increasing"``.""")
protected val stoppingTolerance = doubleParam(
name = "stoppingTolerance",
doc = """Relative tolerance for metric-based stopping criterion (stop if relative improvement is not at least this much).""")
protected val exportCheckpointsDir = nullableStringParam(
name = "exportCheckpointsDir",
doc = """Automatically export generated models to this directory.""")
//
// Default values
//
setDefault(
sampleSize -> 256L,
sampleRate -> -1.0,
mtries -> -1,
contamination -> -1.0,
ntrees -> 50,
maxDepth -> 8,
minRows -> 1.0,
seed -> -1L,
buildTreeOneNode -> false,
colSampleRatePerTree -> 1.0,
colSampleRateChangePerLevel -> 1.0,
scoreTreeInterval -> 0,
modelId -> null,
categoricalEncoding -> CategoricalEncodingScheme.AUTO.name(),
ignoreConstCols -> true,
scoreEachIteration -> false,
stoppingRounds -> 0,
maxRuntimeSecs -> 0.0,
stoppingMetric -> StoppingMetric.AUTO.name(),
stoppingTolerance -> 0.01,
exportCheckpointsDir -> null)
//
// Getters
//
def getSampleSize(): Long = $(sampleSize)
def getSampleRate(): Double = $(sampleRate)
def getMtries(): Int = $(mtries)
def getContamination(): Double = $(contamination)
def getNtrees(): Int = $(ntrees)
def getMaxDepth(): Int = $(maxDepth)
def getMinRows(): Double = $(minRows)
def getSeed(): Long = $(seed)
def getBuildTreeOneNode(): Boolean = $(buildTreeOneNode)
def getColSampleRatePerTree(): Double = $(colSampleRatePerTree)
def getColSampleRateChangePerLevel(): Double = $(colSampleRateChangePerLevel)
def getScoreTreeInterval(): Int = $(scoreTreeInterval)
def getModelId(): String = $(modelId)
def getCategoricalEncoding(): String = $(categoricalEncoding)
def getIgnoreConstCols(): Boolean = $(ignoreConstCols)
def getScoreEachIteration(): Boolean = $(scoreEachIteration)
def getStoppingRounds(): Int = $(stoppingRounds)
def getMaxRuntimeSecs(): Double = $(maxRuntimeSecs)
def getStoppingMetric(): String = $(stoppingMetric)
def getStoppingTolerance(): Double = $(stoppingTolerance)
def getExportCheckpointsDir(): String = $(exportCheckpointsDir)
//
// Setters
//
def setSampleSize(value: Long): this.type = {
set(sampleSize, value)
}
def setSampleRate(value: Double): this.type = {
set(sampleRate, value)
}
def setMtries(value: Int): this.type = {
set(mtries, value)
}
def setContamination(value: Double): this.type = {
set(contamination, value)
}
def setNtrees(value: Int): this.type = {
set(ntrees, value)
}
def setMaxDepth(value: Int): this.type = {
set(maxDepth, value)
}
def setMinRows(value: Double): this.type = {
set(minRows, value)
}
def setSeed(value: Long): this.type = {
set(seed, value)
}
def setBuildTreeOneNode(value: Boolean): this.type = {
set(buildTreeOneNode, value)
}
def setColSampleRatePerTree(value: Double): this.type = {
set(colSampleRatePerTree, value)
}
def setColSampleRateChangePerLevel(value: Double): this.type = {
set(colSampleRateChangePerLevel, value)
}
def setScoreTreeInterval(value: Int): this.type = {
set(scoreTreeInterval, value)
}
def setModelId(value: String): this.type = {
set(modelId, value)
}
def setCategoricalEncoding(value: String): this.type = {
val validated = EnumParamValidator.getValidatedEnumValue[CategoricalEncodingScheme](value)
set(categoricalEncoding, validated)
}
def setIgnoreConstCols(value: Boolean): this.type = {
set(ignoreConstCols, value)
}
def setScoreEachIteration(value: Boolean): this.type = {
set(scoreEachIteration, value)
}
def setStoppingRounds(value: Int): this.type = {
set(stoppingRounds, value)
}
def setMaxRuntimeSecs(value: Double): this.type = {
set(maxRuntimeSecs, value)
}
def setStoppingMetric(value: String): this.type = {
val validated = EnumParamValidator.getValidatedEnumValue[StoppingMetric](value)
set(stoppingMetric, validated)
}
def setStoppingTolerance(value: Double): this.type = {
set(stoppingTolerance, value)
}
def setExportCheckpointsDir(value: String): this.type = {
set(exportCheckpointsDir, value)
}
override private[sparkling] def getH2OAlgorithmParams(trainingFrame: H2OFrame): Map[String, Any] = {
super.getH2OAlgorithmParams(trainingFrame) ++ getH2OIsolationForestParams(trainingFrame)
}
private[sparkling] def getH2OIsolationForestParams(trainingFrame: H2OFrame): Map[String, Any] = {
Map(
"sample_size" -> getSampleSize(),
"sample_rate" -> getSampleRate(),
"mtries" -> getMtries(),
"contamination" -> getContamination(),
"ntrees" -> getNtrees(),
"max_depth" -> getMaxDepth(),
"min_rows" -> getMinRows(),
"seed" -> getSeed(),
"build_tree_one_node" -> getBuildTreeOneNode(),
"col_sample_rate_per_tree" -> getColSampleRatePerTree(),
"col_sample_rate_change_per_level" -> getColSampleRateChangePerLevel(),
"score_tree_interval" -> getScoreTreeInterval(),
"model_id" -> getModelId(),
"categorical_encoding" -> getCategoricalEncoding(),
"ignore_const_cols" -> getIgnoreConstCols(),
"score_each_iteration" -> getScoreEachIteration(),
"stopping_rounds" -> getStoppingRounds(),
"max_runtime_secs" -> getMaxRuntimeSecs(),
"stopping_metric" -> getStoppingMetric(),
"stopping_tolerance" -> getStoppingTolerance(),
"export_checkpoints_dir" -> getExportCheckpointsDir()) +++
getIgnoredColsParam(trainingFrame) +++
getCalibrationDataFrameParam(trainingFrame) +++
getValidationLabelColParam(trainingFrame)
}
override private[sparkling] def getSWtoH2OParamNameMap(): Map[String, String] = {
super.getSWtoH2OParamNameMap() ++
Map(
"sampleSize" -> "sample_size",
"sampleRate" -> "sample_rate",
"mtries" -> "mtries",
"contamination" -> "contamination",
"ntrees" -> "ntrees",
"maxDepth" -> "max_depth",
"minRows" -> "min_rows",
"seed" -> "seed",
"buildTreeOneNode" -> "build_tree_one_node",
"colSampleRatePerTree" -> "col_sample_rate_per_tree",
"colSampleRateChangePerLevel" -> "col_sample_rate_change_per_level",
"scoreTreeInterval" -> "score_tree_interval",
"modelId" -> "model_id",
"categoricalEncoding" -> "categorical_encoding",
"ignoreConstCols" -> "ignore_const_cols",
"scoreEachIteration" -> "score_each_iteration",
"stoppingRounds" -> "stopping_rounds",
"maxRuntimeSecs" -> "max_runtime_secs",
"stoppingMetric" -> "stopping_metric",
"stoppingTolerance" -> "stopping_tolerance",
"exportCheckpointsDir" -> "export_checkpoints_dir")
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy