Spark[1]安装配置
1. 下载软件包
*Master
#cd /usr/local //进入/usr/local 目录
tar zxvf spark-2.3.0-bin-hadoop2.7.tgz
wget https://downloads.lightbend.com/scala/2.12.5/scala-2.12.5.tgz
tar zxvf scala-2.12.5.tgz
3. 修改Spark配置文件
cd /usr/local/spark-2.3.0-bin-hadoop2.7/conf
vim spark-env.sh //路径根据实际情况填写
export SCALA_HOME=/usr/local/src/scala-2.12.4
export JAVA_HOME=/usr/local/src/jdk1.8.0_152
export HADOOP_HOME=/usr/local/src/hadoop-2.8.2
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
SPARK_MASTER_IP=master
SPARK_LOCAL_DIRS=/usr/local/src/spark-1.6.3-bin-hadoop2.6
SPARK_DRIVER_MEMORY=1G
vim slaves
slave1
slave2
4. 拷贝安装包
scp -rp /usr/local/spark-2.3.0-bin-hadoop2.7 slave1:/usr/local/spark-2.3.0-bin-hadoop2.7
scp -rp /usr/local/spark-2.3.0-bin-hadoop2.7 slave2:/usr/local/spark-2.3.0-bin-hadoop2.7
scp -rp /usr/local/scala-2.12.5 slave1:/usr/local/scala-2.12.5
scp -rp /usr/local/scala-2.12.5 slave2:/usr/local/scala-2.12.5
5. 启动集群(hadoop集群也要启动)
cd /usr/local/spark-2.3.0-bin-hadoop2.7/sbin
./start-all.sh
6. 验证
#本地模式
./bin/run-example SparkPi 10 --master local[2]
#集群Standlone
./bin/spark-submit --class org.apache.spark.examples.SparkPi --master spark://master:7077 lib/spark-examples-1.6.3-hadoop2.6.0.jar 100