
scala.googleapis.bigquery.AggregateClassificationMetrics.scala Maven / Gradle / Ivy
package googleapis.bigquery
import io.circe._
import io.circe.syntax._
final case class AggregateClassificationMetrics(
/** Precision is the fraction of actual positive predictions that had positive actual labels. For multiclass this is a macro-averaged metric treating each class as a binary classifier.
*/
precision: Option[Double] = None,
/** Area Under a ROC Curve. For multiclass this is a macro-averaged metric.
*/
rocAuc: Option[Double] = None,
/** Logarithmic Loss. For multiclass this is a macro-averaged metric.
*/
logLoss: Option[Double] = None,
/** The F1 score is an average of recall and precision. For multiclass this is a macro-averaged metric.
*/
f1Score: Option[Double] = None,
/** Accuracy is the fraction of predictions given the correct label. For multiclass this is a micro-averaged metric.
*/
accuracy: Option[Double] = None,
/** Threshold at which the metrics are computed. For binary classification models this is the positive class threshold. For multi-class classfication models this is the confidence threshold.
*/
threshold: Option[Double] = None,
/** Recall is the fraction of actual positive labels that were given a positive prediction. For multiclass this is a macro-averaged metric.
*/
recall: Option[Double] = None,
)
object AggregateClassificationMetrics {
implicit val encoder: Encoder[
AggregateClassificationMetrics
] = Encoder.instance { x =>
Json.obj(
"precision" := x.precision,
"rocAuc" := x.rocAuc,
"logLoss" := x.logLoss,
"f1Score" := x.f1Score,
"accuracy" := x.accuracy,
"threshold" := x.threshold,
"recall" := x.recall,
)
}
implicit val decoder: Decoder[
AggregateClassificationMetrics
] = Decoder.instance { c =>
for {
v0 <- c.get[Option[Double]]("precision")
v1 <- c.get[Option[Double]]("rocAuc")
v2 <- c.get[Option[Double]]("logLoss")
v3 <- c.get[Option[Double]]("f1Score")
v4 <- c.get[Option[Double]]("accuracy")
v5 <- c.get[Option[Double]]("threshold")
v6 <- c.get[Option[Double]]("recall")
} yield AggregateClassificationMetrics(v0, v1, v2, v3, v4, v5, v6)
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy