[server@hadoop102 bin]$ pwd
/home/server/bin
[server@hadoop102 bin]$ vim jpsall
#!/bin/bash
for host in hadoop102 hadoop103 hadoop104
do
echo =================$host===================
ssh $host jps
done
[server@hadoop102 bin]$ chmod 777 jpsall
使用分发脚本分发
[server@hadoop102 bin]$ xsync /home/server/bin
======= hadoop102 ======
sending incremental file list
sent 186 bytes received 17 bytes 135.33 bytes/sec
total size is 3,396 speedup is 16.73
======= hadoop103 ======
sending incremental file list
bin/
sent 189 bytes received 20 bytes 139.33 bytes/sec
total size is 3,396 speedup is 16.25
======= hadoop104 ======
sending incremental file list
bin/
sent 189 bytes received 20 bytes 139.33 bytes/sec
total size is 3,396 speedup is 16.25
使用,比如说hadoop启动后
[server@hadoop102 bin]$ jpsall
=================hadoop102===================
8256 Jps
7475 NameNode
7589 DataNode
7958 NodeManager
8138 JobHistoryServer
=================hadoop103===================
7988 Jps
7230 DataNode
7422 ResourceManager
7566 NodeManager
=================hadoop104===================
7506 NodeManager
7285 DataNode
7719 Jps
7400 SecondaryNameNode
附xsync脚本(分发脚本)
[server@hadoop104 bin]$ cat xsync
#!/bin/bash
#1. 判断参数个数
if [ $# -lt 1 ]
then
echo Not Enough Argument!
exit;
fi
#2. 遍历集群所有机器
for host in hadoop102 hadoop103 hadoop104
do
echo ======= $host ======
#3. 遍历所有目录,挨个发送
for file in $@
do
#4. 判断文件是否存在
if [ -e $file ]
then
#5. 获取父目录
pdir=$(cd -P $(dirname $file);pwd)
#6. 获取当前文件的名称
fname=$(basename $file)
ssh $host "mkdir -p $pdir"
rsync -av $pdir/$fname $host:$pdir
else
echo $file does not exits!
fi
done
done
网友评论