scala.googleapis.bigquery.JobStatistics3.scala Maven / Gradle / Ivy
package googleapis.bigquery
import io.circe._
import io.circe.syntax._
final case class JobStatistics3(
/** Output only. Number of bytes of source data in a load job.
*/
inputFileBytes: Option[Long] = None,
/** Output only. Size of the loaded data in bytes. Note that while a load job is in the running state, this value may change.
*/
outputBytes: Option[Long] = None,
/** Output only. Number of source files in a load job.
*/
inputFiles: Option[Long] = None,
/** Output only. Describes a timeline of job execution.
*/
timeline: Option[List[QueryTimelineSample]] = None,
/** Output only. Number of rows imported in a load job. Note that while an import job is in the running state, this value may change.
*/
outputRows: Option[Long] = None,
/** Output only. The number of bad records encountered. Note that if the job has failed because of more bad records encountered than the maximum allowed in the load job configuration, then this number can be less than the total number of bad records present in the input data.
*/
badRecords: Option[Long] = None,
)
object JobStatistics3 {
implicit val encoder: Encoder[JobStatistics3] = Encoder.instance { x =>
Json.obj(
"inputFileBytes" := x.inputFileBytes,
"outputBytes" := x.outputBytes,
"inputFiles" := x.inputFiles,
"timeline" := x.timeline,
"outputRows" := x.outputRows,
"badRecords" := x.badRecords,
)
}
implicit val decoder: Decoder[JobStatistics3] = Decoder.instance { c =>
for {
v0 <- c.get[Option[Long]]("inputFileBytes")
v1 <- c.get[Option[Long]]("outputBytes")
v2 <- c.get[Option[Long]]("inputFiles")
v3 <- c.get[Option[List[QueryTimelineSample]]]("timeline")
v4 <- c.get[Option[Long]]("outputRows")
v5 <- c.get[Option[Long]]("badRecords")
} yield JobStatistics3(v0, v1, v2, v3, v4, v5)
}
}