$ sudo cp /home/share/.bashrc.sh ~/
$ source ~/.bashrc.sh
$ sudo tar -zxvf /home/share/spark-2.4.0-bin-without-hadoop.tgz -C /usr/local/
$ cd /usr/local
$ sudo chown -R hadoop:hadoop ./spark-2.4.0-bin-without-hadoop
# hadoop是當(dāng)前登錄Linux系統(tǒng)的用戶名
$ cd /usr/local/spark-2.4.0-bin-without-hadoop
$ cp ./conf/spark-env.sh.template ./conf/spark-env.sh
$ sudo vi ./conf/spark-env.sh
在spark-env.sh中添加 ↓
export SPARK_DIST_CLASSPATH=$(/usr/local/hadoop/bin/hadoop classpath)
spark-env.sh
到此完成!