Skip to content

Commit

Permalink
Added spark-2.0.1, spark-2.1.0 and flink-1.1.4 configurations.
Browse files Browse the repository at this point in the history
  • Loading branch information
aalexandrov committed Jan 10, 2017
1 parent 59499a5 commit ca6d7ac
Show file tree
Hide file tree
Showing 5 changed files with 86 additions and 0 deletions.
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ Peel offers the following features for your experiments.
| Flink | 1.1.1 | `flink-1.1.1` |
| Flink | 1.1.2 | `flink-1.1.2` |
| Flink | 1.1.3 | `flink-1.1.3` |
| Flink | 1.1.4 | `flink-1.1.4` |
| MapReduce | 1.2.1 | `mapred-1.2.1` |
| MapReduce | 2.4.1 | `mapred-2.4.1` |
| Spark | 1.3.1 | `spark-1.3.1` |
Expand All @@ -56,6 +57,8 @@ Peel offers the following features for your experiments.
| Spark | 1.6.2 | `spark-1.6.2` |
| Spark | 2.0.0 | `spark-2.0.0` |
| Spark | 2.0.1 | `spark-2.0.1` |
| Spark | 2.0.2 | `spark-2.0.2` |
| Spark | 2.1.0 | `spark-2.1.0` |
| Zookeeper | 3.4.5 | `zookeeper-3.4.5` |
| Dstat | 0.7.2 | `dstat-0.7.2` |

Expand Down
19 changes: 19 additions & 0 deletions peel-extensions/src/main/resources/reference.flink-1.1.4.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# include common flink configuration
include "reference.flink.conf"

system {
flink {
path {
archive.url = "http://archive.apache.org/dist/flink/flink-1.1.4/flink-1.1.4-bin-hadoop2-scala_2.10.tgz"
archive.md5 = "e270a65b0bbaefe5214ac7154b938dc0"
archive.src = ${app.path.downloads}"/flink-1.1.4-bin-hadoop2-scala_2.10.tgz"
home = ${system.flink.path.archive.dst}"/flink-1.1.4"
}
config {
# flink.yaml entries
yaml {
env.pid.dir = "/tmp/flink-1.1.4-pid"
}
}
}
}
20 changes: 20 additions & 0 deletions peel-extensions/src/main/resources/reference.spark-2.0.2.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# include common spark configuration
include "reference.spark.conf"

system {
spark {
path {
archive.url = "http://archive.apache.org/dist/spark/spark-2.0.2/spark-2.0.2-bin-hadoop2.4.tgz"
archive.md5 = "394853fd3e19239b3fc11e40abba6744"
archive.src = ${app.path.downloads}"/spark-2.0.2-bin-hadoop2.4.tgz"
home = ${system.spark.path.archive.dst}"/spark-2.0.2-bin-hadoop2.4"
}
config {
# spark-env.sh entries
env {
# directory where process IDs are stored
SPARK_PID_DIR = "/tmp/spark-2.0.2-pid"
}
}
}
}
20 changes: 20 additions & 0 deletions peel-extensions/src/main/resources/reference.spark-2.1.0.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# include common spark configuration
include "reference.spark.conf"

system {
spark {
path {
archive.url = "http://archive.apache.org/dist/spark/spark-2.1.0/spark-2.1.0-bin-hadoop2.4.tgz"
archive.md5 = "eebfc937e0c3c76174617465d1a8b408"
archive.src = ${app.path.downloads}"/spark-2.1.0-bin-hadoop2.4.tgz"
home = ${system.spark.path.archive.dst}"/spark-2.1.0-bin-hadoop2.4"
}
config {
# spark-env.sh entries
env {
# directory where process IDs are stored
SPARK_PID_DIR = "/tmp/spark-2.1.0-pid"
}
}
}
}
24 changes: 24 additions & 0 deletions peel-extensions/src/main/scala/org/peelframework/extensions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,14 @@ class extensions extends ApplicationContextAware {
mc = ctx.getBean(classOf[Mustache.Compiler])
)

@Bean(name = Array("flink-1.1.4"))
def `flink-1.1.4`: Flink = new Flink(
version = "1.1.4",
configKey = "flink",
lifespan = Lifespan.EXPERIMENT,
mc = ctx.getBean(classOf[Mustache.Compiler])
)

// Spark

@Bean(name = Array("spark-1.3.1"))
Expand Down Expand Up @@ -273,6 +281,22 @@ class extensions extends ApplicationContextAware {
mc = ctx.getBean(classOf[Mustache.Compiler])
)

@Bean(name = Array("spark-2.0.2"))
def `spark-2.0.2`: Spark = new Spark(
version = "2.0.2",
configKey = "spark",
lifespan = Lifespan.EXPERIMENT,
mc = ctx.getBean(classOf[Mustache.Compiler])
)

@Bean(name = Array("spark-2.1.0"))
def `spark-2.1.0`: Spark = new Spark(
version = "2.1.0",
configKey = "spark",
lifespan = Lifespan.EXPERIMENT,
mc = ctx.getBean(classOf[Mustache.Compiler])
)

// DStat

@Bean(name = Array("dstat-0.7.2"))
Expand Down

0 comments on commit ca6d7ac

Please sign in to comment.