使用shell脚本一键搭建spark集群
时间: 2023-10-27 16:46:17 浏览: 36
#!/bin/bash
# 定义变量
SPARK_HOME=/opt/spark
SPARK_MASTER_URL=spark://master:7077
SPARK_WORKER_CORES=2
SPARK_WORKER_MEMORY=4g
SPARK_WORKER_INSTANCES=2
# 安装Java
sudo apt-get update
sudo apt-get install -y openjdk-8-jdk
# 下载Spark
wget https://archive.apache.org/dist/spark/spark-2.4.7/spark-2.4.7-bin-hadoop2.7.tgz
# 解压Spark
tar -xvf spark-2.4.7-bin-hadoop2.7.tgz
sudo mv spark-2.4.7-bin-hadoop2.7 $SPARK_HOME
# 配置环境变量
echo "export SPARK_HOME=$SPARK_HOME" >> ~/.bashrc
echo "export PATH=$PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin" >> ~/.bashrc
source ~/.bashrc
# 配置Spark
cp $SPARK_HOME/conf/spark-env.sh.template $SPARK_HOME/conf/spark-env.sh
echo "export SPARK_MASTER_HOST=master" >> $SPARK_HOME/conf/spark-env.sh
echo "export SPARK_MASTER_PORT=7077" >> $SPARK_HOME/conf/spark-env.sh
echo "export SPARK_WORKER_CORES=$SPARK_WORKER_CORES" >> $SPARK_HOME/conf/spark-env.sh
echo "export SPARK_WORKER_MEMORY=$SPARK_WORKER_MEMORY" >> $SPARK_HOME/conf/spark-env.sh
echo "export SPARK_WORKER_INSTANCES=$SPARK_WORKER_INSTANCES" >> $SPARK_HOME/conf/spark-env.sh
# 启动Spark集群
$SPARK_HOME/sbin/start-master.sh
$SPARK_HOME/sbin/start-slave.sh $SPARK_MASTER_URL
# 查看Spark集群状态
$SPARK_HOME/sbin/start-slaves.sh
$SPARK_HOME/sbin/stop-slaves.sh
$SPARK_HOME/sbin/stop-master.sh
echo "Spark集群已搭建完成!"