All Downloads are FREE. Search and download functionalities are using the official Maven repository.

icu.wuhufly.dws.accumulate_standby12.scala Maven / Gradle / Ivy

The newest version!
package icu.wuhufly.dws

import icu.wuhufly.utils.{CreateUtils, WriteUtils}
import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, SparkSession}

object accumulate_standby12 {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = CreateUtils.getSpark()
    val sc: SparkContext = spark.sparkContext
    import spark.implicits._
    import org.apache.spark.sql.functions._

    spark.sql("use dwd")
    val df: DataFrame = spark.sql(
      """
        |select distinct start_date, start_hour,
        |  sum(duration) over(partition by start_date, start_hour) as hour_add_standby,
        |  sum(duration) over(partition by start_date order by start_hour) as day_agg_standby
        |from (
        |select date_format(ChangeStartTime, 'yyyy-MM-dd') as start_date,
        |  date_format(ChangeStartTime, 'HH') as start_hour,
        |  cast(ChangeEndTime as long) - cast(ChangeStartTime as long) as duration
        |from (
        |select to_timestamp(ChangeEndTime, 'yyyy-MM-dd HH:mm:ss') as ChangeEndTime,
        |    to_timestamp(ChangeStartTime, 'yyyy-MM-dd HH:mm:ss') as ChangeStartTime
        |  from fact_change_record
        |  where ChangeRecordState = '待机'
        |    and to_timestamp(ChangeStartTime, 'yyyy-MM-dd HH:mm:ss') >= cast('2021-10-12' as date)
        |) t1
        |) t1
        |""".stripMargin)

    WriteUtils.writeToMysql(
      "accumulate_standby", df
    )

    sc.stop()
  }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy