|
| 1 | + |
| 2 | +organization := "generated" |
| 3 | + |
| 4 | +name := "simple-spark" |
| 5 | + |
| 6 | +version := "0.0.1-SNAPSHOT" |
| 7 | +// append scala version to artifact name(s) |
| 8 | +crossPaths := true |
| 9 | + |
| 10 | +scalaVersion := "2.10.6" |
| 11 | + |
| 12 | +maintainer := "" //Docker |
| 13 | + |
| 14 | +resolvers ++= Seq( |
| 15 | + "Maven2 Local" at "file:/Users/vidma-vinted/.m2/repository/" , |
| 16 | + "public" at "https://repo1.maven.org/maven2/" , |
| 17 | + "spark-packages" at "http://dl.bintray.com/spark-packages/maven/" , |
| 18 | + new sbt.URLRepository("typesafe-ivy-releases", new sbt.Patterns( |
| 19 | + List("https://repo.typesafe.com/typesafe/ivy-releases/[organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]"), |
| 20 | + List("https://repo.typesafe.com/typesafe/ivy-releases/[organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]"), |
| 21 | + false |
| 22 | + ) |
| 23 | + ) , |
| 24 | + "jcenter" at "https://jcenter.bintray.com/" , |
| 25 | + "sonatype-releases" at "https://oss.sonatype.org/content/repositories/releases" ) |
| 26 | + |
| 27 | +net.virtualvoid.sbt.graph.Plugin.graphSettings |
| 28 | + |
| 29 | +enablePlugins(UniversalPlugin) |
| 30 | + |
| 31 | +enablePlugins(DockerPlugin) |
| 32 | + |
| 33 | +enablePlugins(JavaAppPackaging) |
| 34 | + |
| 35 | +import com.typesafe.sbt.SbtNativePackager.autoImport.NativePackagerHelper._ |
| 36 | + |
| 37 | +import com.typesafe.sbt.packager.docker._ |
| 38 | + |
| 39 | +dockerBaseImage := "data-fellas-docker-public.bintray.io/base-adst:0.0.1" |
| 40 | + |
| 41 | +dockerExposedPorts := Seq(9000, 9443) |
| 42 | + |
| 43 | +daemonUser in Docker := "root" |
| 44 | + |
| 45 | +packageName in Docker := "generated.simplespark" |
| 46 | + |
| 47 | +mappings in Docker ++= directory("spark-lib") |
| 48 | + |
| 49 | +mappings in Universal ++= directory("spark-lib") |
| 50 | + |
| 51 | +resolvers += Resolver.mavenLocal |
| 52 | + |
| 53 | +resolvers += Resolver.typesafeRepo("releases") |
| 54 | + |
| 55 | +resolvers += "cloudera" at "https://repository.cloudera.com/artifactory/cloudera-repos" |
| 56 | + |
| 57 | + |
| 58 | + |
| 59 | +credentials += Credentials(Path.userHome / ".bintray" / ".credentials") |
| 60 | + |
| 61 | +resolvers += Resolver.url("bintray-data-fellas-maven", url("http://dl.bintray.com/data-fellas/maven"))(Resolver.ivyStylePatterns) |
| 62 | + |
| 63 | +dockerCommands ++= Seq(Cmd("ENV", "SPARK_HOME \"\"")) |
| 64 | + |
| 65 | +dockerRepository := Some("") //Docker |
| 66 | + |
| 67 | +enablePlugins(DebianPlugin) |
| 68 | + |
| 69 | +name in Debian := "simple-spark" |
| 70 | + |
| 71 | +maintainer in Debian := "Data Fellas" |
| 72 | + |
| 73 | +packageSummary in Debian := "Data Fellas Generated Job" |
| 74 | + |
| 75 | +packageDescription := "Generated Job by Spark-notebook" |
| 76 | + |
| 77 | +debianPackageDependencies in Debian += "java8-runtime-headless" |
| 78 | + |
| 79 | +serverLoading in Debian := com.typesafe.sbt.packager.archetypes.ServerLoader.Upstart |
| 80 | + |
| 81 | +daemonUser in Linux := "root" |
| 82 | + |
| 83 | +daemonGroup in Linux := "root" |
| 84 | + |
| 85 | +bashScriptExtraDefines += "export SPARK_HOME=\"\"" |
| 86 | + |
| 87 | + |
| 88 | + |
| 89 | + |
| 90 | +val sparkVersion = sys.env.get("SPARK_VERSION") .orElse(sys.props.get("spark.version")) .getOrElse("2.0.1") |
| 91 | + |
| 92 | +val hadoopVersion = sys.env.get("HADOOP_VERSION").orElse(sys.props.get("hadoop.version")).getOrElse("2.2.0") |
| 93 | + |
| 94 | +// TODO: needed only if you use some of spark-notebook code |
| 95 | +// (most likely you don't want to use this, otherwise you'd need to publishLocal the SN libs) |
| 96 | +// libraryDependencies += "io.kensu" %% "common" % (sparkVersion + "_0.8.0-SNAPSHOT") excludeAll( |
| 97 | +// ExclusionRule("org.apache.hadoop"), |
| 98 | +// ExclusionRule("org.apache.spark") |
| 99 | +// ) |
| 100 | + |
| 101 | +libraryDependencies += "com.typesafe" % "config" % "1.3.1" |
| 102 | + |
| 103 | +// you might not need all of the Spark jars below |
| 104 | +libraryDependencies += "org.apache.spark" %% "spark-core" % sparkVersion excludeAll( |
| 105 | + ExclusionRule("org.apache.hadoop"), |
| 106 | + ExclusionRule("org.apache.ivy", "ivy") |
| 107 | + ) |
| 108 | + |
| 109 | +libraryDependencies += "org.apache.spark" %% "spark-mllib" % sparkVersion excludeAll( |
| 110 | + ExclusionRule("org.apache.hadoop"), |
| 111 | + ExclusionRule("org.apache.ivy", "ivy") |
| 112 | + ) |
| 113 | + |
| 114 | +libraryDependencies += "org.apache.spark" %% "spark-sql" % sparkVersion excludeAll( |
| 115 | + ExclusionRule("org.apache.hadoop") |
| 116 | +) |
| 117 | + |
| 118 | +libraryDependencies += "org.apache.spark" %% "spark-yarn" % sparkVersion excludeAll( |
| 119 | + ExclusionRule("org.apache.hadoop"), |
| 120 | + ExclusionRule("org.apache.ivy", "ivy") |
| 121 | + ) |
| 122 | + |
| 123 | +libraryDependencies += "org.apache.spark" %% "spark-hive" % sparkVersion excludeAll( |
| 124 | + ExclusionRule("org.apache.hadoop"), |
| 125 | + ExclusionRule("org.apache.ivy", "ivy"), |
| 126 | + ExclusionRule("javax.servlet", "servlet-api"), |
| 127 | + ExclusionRule("org.mortbay.jetty", "servlet-api") |
| 128 | + ) |
| 129 | + |
| 130 | +libraryDependencies += "org.apache.hadoop" % "hadoop-client" % hadoopVersion excludeAll( |
| 131 | + ExclusionRule("org.apache.commons", "commons-exec"), |
| 132 | + ExclusionRule("commons-codec", "commons-codec"), |
| 133 | + ExclusionRule("com.google.guava", "guava"), |
| 134 | + ExclusionRule("javax.servlet") |
| 135 | + ) |
| 136 | + |
| 137 | +libraryDependencies += "org.apache.hadoop" % "hadoop-yarn-server-web-proxy" % hadoopVersion excludeAll( |
| 138 | + ExclusionRule("org.apache.commons", "commons-exec"), |
| 139 | + ExclusionRule("commons-codec", "commons-codec"), |
| 140 | + ExclusionRule("com.google.guava", "guava"), |
| 141 | + ExclusionRule("javax.servlet") |
| 142 | + ) |
| 143 | + |
| 144 | +libraryDependencies += "net.java.dev.jets3t" % "jets3t" % "0.9.0" force() |
| 145 | + |
| 146 | +libraryDependencies += "com.google.guava" % "guava" % "16.0.1" force() |
| 147 | + |
| 148 | + |
| 149 | + |
| 150 | +//asssembly |
| 151 | +// skip test during assembly |
| 152 | +test in assembly := {} |
| 153 | + |
| 154 | +//main class |
| 155 | +mainClass in assembly := Some("generated.Main") |
| 156 | + |
| 157 | +artifact in (Compile, assembly) ~= { art => |
| 158 | + art.copy(`classifier` = Some("assembly")) |
| 159 | +} |
| 160 | + |
| 161 | + |
| 162 | + |
| 163 | +// merging files... specially application.conf! |
| 164 | +assemblyMergeStrategy in assembly := { |
| 165 | + case PathList("javax", "servlet", xs @ _*) => MergeStrategy.first |
| 166 | + case PathList("org", "apache", xs @ _*) => MergeStrategy.first |
| 167 | + case PathList("org", "fusesource", xs @ _*) => MergeStrategy.first |
| 168 | + case PathList("org", "slf4j", xs @ _*) => MergeStrategy.first |
| 169 | + case PathList("com", "google", xs @ _*) => MergeStrategy.first |
| 170 | + case PathList("play", "core", xs @ _*) => MergeStrategy.first |
| 171 | + case PathList("javax", "xml", xs @ _*) => MergeStrategy.first |
| 172 | + case PathList("com", "esotericsoftware", xs @ _*) => MergeStrategy.first |
| 173 | + case PathList("xsbt", xs @ _*) => MergeStrategy.first |
| 174 | + case PathList("META-INF", "MANIFEST.MF" ) => MergeStrategy.discard |
| 175 | + case PathList("META-INF", xs @ _*) => MergeStrategy.first |
| 176 | + case "application.conf" => MergeStrategy.concat |
| 177 | + case "module.properties" => MergeStrategy.first |
| 178 | + case PathList(ps @ _*) if ps.last endsWith ".html" => MergeStrategy.discard |
| 179 | + case PathList(ps @ _*) if ps.last endsWith ".thrift" => MergeStrategy.first |
| 180 | + case PathList(ps @ _*) if ps.last endsWith ".xml" => MergeStrategy.first |
| 181 | + case x => |
| 182 | + val oldStrategy = (assemblyMergeStrategy in assembly).value |
| 183 | + oldStrategy(x) |
| 184 | +} |
| 185 | + |
| 186 | +aggregate in update := false |
| 187 | + |
| 188 | +updateOptions := updateOptions.value.withCachedResolution(true) |
| 189 | + |
0 commit comments