Skip to content

Commit

Permalink
Fix tools to be consistent with scalastyle
Browse files Browse the repository at this point in the history
  • Loading branch information
jtescher committed Mar 7, 2015
1 parent 2d30354 commit d823bcf
Show file tree
Hide file tree
Showing 13 changed files with 234 additions and 93 deletions.
17 changes: 9 additions & 8 deletions tools/src/main/scala/RegisterEngine.scala
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,11 @@ object RegisterEngine extends Logging {
// Configure local FS or HDFS
val conf = new Configuration
val fss =
if (copyLocal)
if (copyLocal) {
Seq(FileSystem.get(conf), FileSystem.getLocal(conf))
else
} else {
Seq(FileSystem.get(conf))
}
val enginesdir = sys.env.get("PIO_FS_ENGINESDIR") match {
case Some(s) => s
case None =>
Expand All @@ -85,14 +86,14 @@ object RegisterEngine extends Logging {
val destFilePath =
new Path(destDir.:+(f.getName).mkString(Path.SEPARATOR_CHAR + ""))
val destPathString = fs.makeQualified(destFilePath).toString
//if (fs.exists(destFilePath) &&
// f.length == fs.getFileStatus(destFilePath).getLen)
// info(s"Skip copying ${f.toURI} because ${destPathString} exists " +
// "and their file sizes are equal")
//else {
// if (fs.exists(destFilePath) &&
// f.length == fs.getFileStatus(destFilePath).getLen)
// info(s"Skip copying ${f.toURI} because ${destPathString} exists " +
// "and their file sizes are equal")
// else {
info(s"Copying ${f.toURI} to ${destPathString}")
fs.copyFromLocalFile(new Path(f.toURI), destPath)
//}
// }
destPathString
}
}
Expand Down
43 changes: 26 additions & 17 deletions tools/src/main/scala/RunServer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -43,26 +43,29 @@ object RunServer extends Logging {
val driverClassPathIndex =
ca.common.sparkPassThrough.indexOf("--driver-class-path")
val driverClassPathPrefix =
if (driverClassPathIndex != -1)
if (driverClassPathIndex != -1) {
Seq(ca.common.sparkPassThrough(driverClassPathIndex + 1))
else
} else {
Seq()
}
val extraClasspaths =
driverClassPathPrefix ++ WorkflowUtils.thirdPartyClasspaths

val deployModeIndex =
ca.common.sparkPassThrough.indexOf("--deploy-mode")
val deployMode = if (deployModeIndex != -1)
val deployMode = if (deployModeIndex != -1) {
ca.common.sparkPassThrough(deployModeIndex + 1)
else
} else {
"client"
}

val mainJar =
if (ca.build.uberJar) {
if (deployMode == "cluster")
if (deployMode == "cluster") {
em.files.filter(_.startsWith("hdfs")).head
else
} else {
em.files.filterNot(_.startsWith("hdfs")).head
}
} else {
if (deployMode == "cluster") {
em.files.filter(_.contains("pio-assembly")).head
Expand All @@ -82,20 +85,23 @@ object RunServer extends Logging {
(if (!ca.build.uberJar) {
Seq("--jars", em.files.mkString(","))
} else Seq()) ++
(if (extraFiles.size > 0)
(if (extraFiles.size > 0) {
Seq("--files", extraFiles.mkString(","))
else
Seq()) ++
(if (extraClasspaths.size > 0)
} else {
Seq()
}) ++
(if (extraClasspaths.size > 0) {
Seq("--driver-class-path", extraClasspaths.mkString(":"))
else
Seq()) ++
(if (ca.common.sparkKryo)
} else {
Seq()
}) ++
(if (ca.common.sparkKryo) {
Seq(
"--conf",
"spark.serializer=org.apache.spark.serializer.KryoSerializer")
else
Seq()) ++
} else {
Seq()
}) ++
Seq(
mainJar,
"--engineInstanceId",
Expand All @@ -108,8 +114,11 @@ object RunServer extends Logging {
ca.eventServer.ip,
"--event-server-port",
ca.eventServer.port.toString) ++
(if (ca.accessKey.accessKey != "")
Seq("--accesskey", ca.accessKey.accessKey) else Seq()) ++
(if (ca.accessKey.accessKey != "") {
Seq("--accesskey", ca.accessKey.accessKey)
} else {
Seq()
}) ++
(if (ca.eventServer.enabled) Seq("--feedback") else Seq()) ++
(if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
(if (ca.common.verbose) Seq("--verbose") else Seq()) ++
Expand Down
50 changes: 30 additions & 20 deletions tools/src/main/scala/RunWorkflow.scala
Original file line number Diff line number Diff line change
Expand Up @@ -51,28 +51,31 @@ object RunWorkflow extends Logging {
val driverClassPathIndex =
ca.common.sparkPassThrough.indexOf("--driver-class-path")
val driverClassPathPrefix =
if (driverClassPathIndex != -1)
if (driverClassPathIndex != -1) {
Seq(ca.common.sparkPassThrough(driverClassPathIndex + 1))
else
} else {
Seq()
}
val extraClasspaths =
driverClassPathPrefix ++ WorkflowUtils.thirdPartyClasspaths

val deployModeIndex =
ca.common.sparkPassThrough.indexOf("--deploy-mode")
val deployMode = if (deployModeIndex != -1)
val deployMode = if (deployModeIndex != -1) {
ca.common.sparkPassThrough(deployModeIndex + 1)
else
} else {
"client"
}

val extraFiles = WorkflowUtils.thirdPartyConfFiles

val mainJar =
if (ca.build.uberJar) {
if (deployMode == "cluster")
if (deployMode == "cluster") {
em.files.filter(_.startsWith("hdfs")).head
else
} else {
em.files.filterNot(_.startsWith("hdfs")).head
}
} else {
if (deployMode == "cluster") {
em.files.filter(_.contains("pio-assembly")).head
Expand Down Expand Up @@ -108,20 +111,23 @@ object RunWorkflow extends Logging {
(if (!ca.build.uberJar) {
Seq("--jars", em.files.mkString(","))
} else Seq()) ++
(if (extraFiles.size > 0)
(if (extraFiles.size > 0) {
Seq("--files", extraFiles.mkString(","))
else
Seq()) ++
(if (extraClasspaths.size > 0)
} else {
Seq()
}) ++
(if (extraClasspaths.size > 0) {
Seq("--driver-class-path", extraClasspaths.mkString(":"))
else
Seq()) ++
(if (ca.common.sparkKryo)
} else {
Seq()
}) ++
(if (ca.common.sparkKryo) {
Seq(
"--conf",
"spark.serializer=org.apache.spark.serializer.KryoSerializer")
else
Seq()) ++
} else {
Seq()
}) ++
Seq(
mainJar,
"--env",
Expand All @@ -131,12 +137,13 @@ object RunWorkflow extends Logging {
"--engine-version",
em.version,
"--engine-variant",
(if (deployMode == "cluster")
(if (deployMode == "cluster") {
hdfs.makeQualified(new Path(
(engineLocation :+ variantJson.getName).mkString(Path.SEPARATOR))).
toString
else
variantJson.getCanonicalPath),
} else {
variantJson.getCanonicalPath
}),
"--verbosity",
ca.common.verbosity.toString) ++
ca.common.engineFactory.map(
Expand All @@ -148,8 +155,11 @@ object RunWorkflow extends Logging {
(if (ca.common.verbose) Seq("--verbose") else Seq()) ++
(if (ca.common.skipSanityCheck) Seq("--skip-sanity-check") else Seq()) ++
(if (ca.common.stopAfterRead) Seq("--stop-after-read") else Seq()) ++
(if (ca.common.stopAfterPrepare)
Seq("--stop-after-prepare") else Seq()) ++
(if (ca.common.stopAfterPrepare) {
Seq("--stop-after-prepare")
} else {
Seq()
}) ++
ca.common.evaluation.map(x => Seq("--evaluation-class", x)).
getOrElse(Seq()) ++
ca.common.engineParamsGenerator.map(x => Seq("--engine-params-generator-class", x)).
Expand Down
27 changes: 16 additions & 11 deletions tools/src/main/scala/Runner.scala
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,11 @@ object Runner extends Logging {
val driverClassPathIndex =
ca.common.sparkPassThrough.indexOf("--driver-class-path")
val driverClassPathPrefix =
if (driverClassPathIndex != -1)
if (driverClassPathIndex != -1) {
Seq(ca.common.sparkPassThrough(driverClassPathIndex + 1))
else
} else {
Seq()
}
val extraClasspaths =
driverClassPathPrefix ++ WorkflowUtils.thirdPartyClasspaths

Expand All @@ -63,23 +64,27 @@ object Runner extends Logging {
val sparkSubmitCommand =
Seq(Seq(sparkHome, "bin", "spark-submit").mkString(File.separator))

val sparkSubmitFiles = if (extraFiles.size > 0)
val sparkSubmitFiles = if (extraFiles.size > 0) {
Seq("--files", extraFiles.mkString(","))
else
} else {
Seq("")
}

val sparkSubmitExtraClasspaths = if (extraClasspaths.size > 0)
val sparkSubmitExtraClasspaths = if (extraClasspaths.size > 0) {
Seq("--driver-class-path", extraClasspaths.mkString(":"))
else
} else {
Seq("")
}

val sparkSubmitKryo = if (ca.common.sparkKryo) Seq(
"--conf",
"spark.serializer=org.apache.spark.serializer.KryoSerializer")
else
val sparkSubmitKryo = if (ca.common.sparkKryo) {
Seq(
"--conf",
"spark.serializer=org.apache.spark.serializer.KryoSerializer")
} else {
Seq("")
}

val verbose = if (ca.common.verbose) Seq("--verbose") else Seq()
val verbose = if (ca.common.verbose) { Seq("--verbose") } else { Seq() }

val sparkSubmit = Seq(
sparkSubmitCommand,
Expand Down
19 changes: 17 additions & 2 deletions tools/src/main/scala/console/AccessKey.scala
Original file line number Diff line number Diff line change
@@ -1,3 +1,18 @@
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package io.prediction.tools.console

import io.prediction.data.storage.{AccessKey => StorageAccessKey}
Expand Down Expand Up @@ -33,9 +48,9 @@ object AccessKey extends Logging {

def list(ca: ConsoleArgs): Int = {
val keys =
if (ca.app.name == "")
if (ca.app.name == "") {
Storage.getMetaDataAccessKeys.getAll
else {
} else {
val apps = Storage.getMetaDataApps
apps.getByName(ca.app.name) map { app =>
Storage.getMetaDataAccessKeys.getByAppid(app.id)
Expand Down
15 changes: 15 additions & 0 deletions tools/src/main/scala/console/App.scala
Original file line number Diff line number Diff line change
@@ -1,3 +1,18 @@
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package io.prediction.tools.console

import io.prediction.data.storage.{AccessKey => StorageAccessKey}
Expand Down
Loading

0 comments on commit d823bcf

Please sign in to comment.