安装spark必须要安装scala吗

2020-09-29 社会 75阅读
安装spark
tar -zxvf spark-1.3.0-bin-hadoop2.3.tgz
mkdir /usr/local/spark
mv spark-1.3.0-bin-hadoop2.3 /usr/local/spark

vim /etc/bashrc
export SPARK_HOME=/usr/local/spark/spark-1.3.0-bin-hadoop2.3
export PATH=$SCALA_HOME/bin:$SPARK_HOME/bin:$PATH

source /etc/bashrc

cd /usr/local/spark/spark-1.3.0-bin-hadoop2.3/conf/
cp spark-env.sh.template spark-env.sh

vim spark-env.sh

export JAVA_HOME=/java
export SCALA_HOME=/usr/lib/scala/scala-2.10.5
export SPARK_HOME=/usr/local/spark/spark-1.3.0-bin-hadoop2.3
export SPARK_MASTER_IP=192.168.137.101
export SPARK_WORKER_MEMORY=1g
export HADOOP_CONF_DIR=/home/hadoop/hadoop/etc/hadoop
export SPARK_LIBRARY_PATH=$SPARK_HOME/lib
export SCALA_LIBRARY_PATH=$SPARK_LIBRARY_PATH
cp slaves.template slaves

vim slaves

hd1
hd2
hd3
hd4
hd5

7、分发
scp /etc/bashrc hd2:/etc
scp /etc/bashrc hd3:/etc
scp /etc/bashrc hd4:/etc
scp /etc/bashrc hd5:/etc

scp -r /usr/local/spark/spark-1.3.0-bin-hadoop2.3 hd2:/usr/local/spark/
scp -r /usr/local/spark/spark-1.3.0-bin-hadoop2.3 hd3:/usr/local/spark/
scp -r /usr/local/spark/spark-1.3.0-bin-hadoop2.3 hd4:/usr/local/spark/
scp -r /usr/local/spark/spark-1.3.0-bin-hadoop2.3 hd5:/usr/local/spark/

7、 启动
在hd1,启动
cd $SPARK_HOME/sbin
./start-all.sh
声明:你问我答网所有作品(图文、音视频)均由用户自行上传分享,仅供网友学习交流。若您的权利被侵害,请联系fangmu6661024@163.com