which versions installed?

library(sparklyr)
spark_installed_versions()
##   spark hadoop                                            dir
## 1 1.5.2    2.6 /Users/Mezhoud/spark/spark-1.5.2-bin-hadoop2.6
## 2 2.3.2    2.7 /Users/Mezhoud/spark/spark-2.3.2-bin-hadoop2.7
## 3 2.4.0    2.7 /Users/Mezhoud/spark/spark-2.4.0-bin-hadoop2.7

Last Spark version Availability

spark_available_versions(show_hadoop = TRUE) %>% tail()
##    spark hadoop
## 43 2.3.1    2.7
## 44 2.3.1    2.6
## 45 2.3.2    2.7
## 46 2.3.2    2.6
## 47 2.4.0    2.7
## 48 2.4.0    2.6

Install Specific spark version

library(sparklyr)
#spark_install(version = "2.4.0")

Connect to two spark versions

conf232 <- spark_config()   # Load variable with spark_config()
conf232$`sparklyr.cores.local` <- 2
conf232$`sparklyr.shell.driver-memory` <- "4G"
conf232$spark.memory.fraction <- 0.5

conf240 <- spark_config()   # Load variable with spark_config()
conf240$`sparklyr.cores.local` <- 2
conf240$`sparklyr.shell.driver-memory` <- "4G"
conf240$spark.memory.fraction <- 0.5

sc232 <- spark_connect(master = "local",
                       spark_home = "/Users/Mezhoud/spark/spark-2.3.2-bin-hadoop2.7",
                       version = "2.3.2",
                       config = conf232
                    )

sc240 <- spark_connect(master = "local",
                       spark_home = "/Users/Mezhoud/spark/spark-2.4.0-bin-hadoop2.7",
                       version = "2.4.0",
                       config = conf240
                    )
## Re-using existing Spark connection to local
spark_version(sc240)
## [1] '2.3.2'
spark_version(sc232)
## [1] '2.3.2'

Only the first connection is done!

Copy data set in different nodes

…. working….