org.apache.spark.examples.HdfsTest.scala Maven / Gradle / Ivy
The newest version!
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples
import java.util.concurrent.TimeUnit
import org.apache.spark.sql.SparkSession
object HdfsTest {
/** Usage: HdfsTest [file] */
def main(args: Array[String]): Unit = {
if (args.length < 1) {
System.err.println("Usage: HdfsTest ")
System.exit(1)
}
val spark = SparkSession
.builder
.appName("HdfsTest")
.getOrCreate()
val file = spark.read.text(args(0)).rdd
val mapped = file.map(s => s.length).cache()
for (iter <- 1 to 10) {
val startTimeNs = System.nanoTime()
for (x <- mapped) { x + 2 }
val durationMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeNs)
println(s"Iteration $iter took $durationMs ms")
}
println(s"File contents: ${file.map(_.toString).take(1).mkString(",").slice(0, 10)}")
println(s"Returned length(s) of: ${file.map(_.length).sum().toString}")
spark.stop()
}
}
// scalastyle:on println
© 2015 - 2025 Weber Informatics LLC | Privacy Policy