![JAR search and dependency download from the Maven repository](/logo.png)
com.tencent.angel.spark.examples.util.PSExamples.scala Maven / Gradle / Ivy
/*
* Tencent is pleased to support the open source community by making Angel available.
*
* Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
*
* Licensed under the BSD 3-Clause License (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* https://opensource.org/licenses/BSD-3-Clause
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
*/
package com.tencent.angel.spark.examples.util
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession
object PSExamples {
var N = 2000 // Number of data points
var DIM = 1000 // Number of dimensions
var ITERATIONS = 5
var numSlices = 2
def parseArgs(args: Array[String]): Unit = {
if (args.length == 3) {
N = args(0).toInt
DIM = args(1).toInt
numSlices = args(2).toInt
}
}
def runWithSparkContext(name: String)(body: SparkContext => Unit): Unit = {
val conf = new SparkConf
val master = conf.getOption("spark.master")
val isLocalTest = if (master.isEmpty || master.get.toLowerCase.startsWith("local")) true else false
val sparkBuilder = SparkSession.builder().appName(name)
if (isLocalTest) {
sparkBuilder.master("local")
.config("spark.ps.mode", "LOCAL")
.config("spark.ps.jars", "")
.config("spark.ps.instances", "1")
.config("spark.ps.cores", "1")
}
val sc = sparkBuilder.getOrCreate().sparkContext
body(sc)
val wait = sys.props.get("spark.local.wait").exists(_.toBoolean)
if (isLocalTest && wait) {
println("press Enter to exit!")
Console.in.read()
}
sc.stop()
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy