代码拉取完成,页面将自动刷新
同步操作将从 杨赟/HadoopDeployment 强制同步,此操作会覆盖自 Fork 仓库以来所做的任何修改,且无法恢复!!!
确定后同步将在后台操作,完成时将刷新页面,请耐心等待。
#!/usr/bin/env bash
#使jdk-8u162-linux-x64.tar.gz用方法:
#1.在目标主机上配置ssh允许root用户登录
#2.在该脚本中修改参数
#3.在本机上执行脚本
MASTER="192.168.1.74"
SLAVES=("192.168.1.61" "192.168.1.73")
SOFT_URL=http://192.168.1.179/soft
#配置本机免密登录所有节点
ssh_copy_id_to_all() {
echo "配置ssh免密登录..."
ssh-keygen -t rsa
ssh-copy-id root@${MASTER}
for ip in ${SLAVES[@]}
do
ssh-copy-id root@${ip}
done
}
#安装wget
install_wget() {
echo "下载安装wget..."
ssh root@${MASTER} apt install -y wget
for ip in ${SLAVES[@]}
do
ssh root@${ip} apt install -y wget
done
}
#安装和配置JAVA
install_java() {
echo "为所有服务器下载安装和配置JAVA..."
scp install/java.sh root@${MASTER}:/root
ssh root@${MASTER} "
export SOFT_URL=$SOFT_URL
sh /root/java.sh
rm /root/java.sh
"
for ip in ${SLAVES[@]}
do
scp install/java.sh root@${ip}:/root
ssh root@${ip} "
export SOFT_URL=$SOFT_URL
sh /root/java.sh
rm /root/java.sh
"
done
}
#修改服务器主机名为master和slave...
set_hostname_for_all() {
echo "修改服务器的主机名..."
ssh root@${MASTER} "
rm -f /etc/hostname
echo 'master' >> /etc/hostname
reboot
"
echo "${MASTER}主机设置为master"
echo "为使其生效,需要重启master..."
for i in "${!SLAVES[@]}"
do
name="slave""${i}"
ssh root@${SLAVES[$i]} "
rm -f /etc/hostname
echo ${name} >> /etc/hostname
reboot
"
echo "修改${SLAVES[$i]}主机设置为${name}"
echo "为使其生效,需要重启${name}..."
done
echo "请稍等,正在重启服务器..."
sleep 1m
}
#配置master免密码登录slaves
ssh_copy_id_form_master_to_all(){
echo "配置master免密登录localhost和slaves..."
ssh root@${MASTER} "
ssh-keygen -t rsa
cat /root/.ssh/id_rsa.pub >> /root/.ssh/authorized_keys
"
scp root@${MASTER}:/root/.ssh/id_rsa.pub ./
for ip in ${SLAVES[@]}
do
scp ./id_rsa.pub root@${ip}:/root
ssh root@${ip} "
cat /root/id_rsa.pub >> /root/.ssh/authorized_keys
rm /root/id_rsa.pub
"
done
rm ./id_rsa.pub
}
#安装和配置hadoop
install_hadoop() {
echo "下载安装和配置hadoop..."
rm -f slaves
for ip in ${SLAVES[@]}
do
echo ${ip} >> slaves
done
scp install/hadoop.sh root@${MASTER}:/root
scp slaves root@${MASTER}:/root
ssh root@${MASTER} "
export SOFT_URL=$SOFT_URL
export MASTER=$MASTER
export FILENAME='/root/slaves'
bash /root/hadoop.sh
rm /root/hadoop.sh
rm /root/slaves
"
for ip in ${SLAVES[@]}
do
scp install/hadoop.sh root@${ip}:/root
scp slaves root@${ip}:/root
ssh root@${ip} "
export SOFT_URL=$SOFT_URL
export MASTER=$MASTER
export FILENAME='/root/slaves'
bash /root/hadoop.sh
rm /root/hadoop.sh
rm /root/slaves
"
done
rm slaves
}
#安装和配置hbase
install_hbase() {
echo "下载安装和配置HBase..."
rm -f slaves
for ip in ${SLAVES[@]}
do
echo ${ip} >> slaves
done
scp install/hbase.sh root@${MASTER}:/root
scp slaves root@${MASTER}:/root
ssh root@${MASTER} "
export SOFT_URL=$SOFT_URL
export MASTER=$MASTER
export FILENAME='/root/slaves'
bash /root/hbase.sh
rm /root/hbase.sh
rm /root/slaves
"
for ip in ${SLAVES[@]}
do
scp install/hbase.sh root@${ip}:/root
scp slaves root@${ip}:/root
ssh root@${ip} "
export SOFT_URL=$SOFT_URL
export MASTER=$MASTER
export FILENAME='/root/slaves'
bash /root/hbase.sh
rm /root/hbase.sh
rm /root/slaves
"
done
rm slaves
}
#安装和配置spark
install_spark() {
echo "下载安装和配置spark..."
rm -f slaves
for ip in ${SLAVES[@]}
do
echo ${ip} >> slaves
done
scp install/spark.sh root@${MASTER}:/root
scp slaves root@${MASTER}:/root
ssh root@${MASTER} "
export SOFT_URL=$SOFT_URL
export MASTER=$MASTER
export FILENAME='/root/slaves'
bash /root/spark.sh
rm /root/spark.sh
rm /root/slaves
"
for ip in ${SLAVES[@]}
do
scp install/spark.sh root@${ip}:/root
scp slaves root@${ip}:/root
ssh root@${ip} "
export SOFT_URL=$SOFT_URL
export MASTER=$MASTER
export FILENAME='/root/slaves'
bash /root/spark.sh
rm /root/spark.sh
rm /root/slaves
"
done
rm slaves
}
hadoop_success() {
echo "恭喜您!成功安装hadoop。"
echo "启动hadoop的方法如下:"
echo "1.登录master节点: ssh root@${MASTER}"
echo "2.格式化hadoop: /usr/local/hadoop/bin/hdfs namenode -format"
echo "3.启动hadoop: /usr/local/hadoop/sbin/start-all.sh"
}
hbase_success() {
echo "恭喜您!成功安装hbase。"
echo "启动hbase的方法如下:"
echo "1.登录master节点: ssh root@${MASTER}"
echo "2.启动hbase: /usr/local/hbase/bin/start-hbase.sh"
}
spark_success() {
echo "恭喜您!成功安装spark。"
echo "启动spark的方法如下:"
echo "1.登录master节点: ssh root@${MASTER}"
echo "2.启动spark: /usr/local/spark/sbin/start-all.sh"
}
ssh_copy_id_to_all
install_wget
install_java
set_hostname_for_all
ssh_copy_id_form_master_to_all
install_hadoop
install_hbase
install_spark
hadoop_success
hbase_success
spark_success
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。