1、基础配置:
三台服务器、jdk
2、设置服务器hostname:
//设置hostname
hostnamectl set-hostname VM-0-109-centos
//执行后主机名生效
bash
3、配置服务器环境变量:
vim /etc/profile添加
export HADOOP_HOME=/opt/module/hadoop-2.7.4
export PATH=HADOOP_HOME/bin
export PATH=HADOOP_HOME/sbin
export HDFS_NAMENODE_USER=root export HDFS_DATANODE_USER=root export HDFS_SECONDARYNAMENODE_USER=root export YARN_RESOURCEMANAGER_USER=root export YARN_NODEMANAGER_USER=root
//执行后环境变量生效
source /etc/profile
4、配置集群服务器间免密登录
//生成密钥
ssh-keygen -t rsa -P ""
//配置本机免密
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 700 ~/.ssh
chmod 600 ~/.ssh/authorized_keys
如果chmod无权限:chmod a+x chattr ; lsattr authorized_keys ; chattr -i authorized_keys
//复制密钥
ssh-copy-id -i /root/.ssh/id_rsa.pub -p 15312 root@VM-0-132-centos
ssh-copy-id -i /root/.ssh/id_rsa.pub -p 15312 root@VM-0-41-centos
ssh-copy-id -i /root/.ssh/id_rsa.pub -p 15312 root@VM-0-109-centos
//测试连接
ssh -p 15312 VM-0-109-centos
//每台服务器都要配置一遍
5、修改hosts文件
vi /etc/hosts将三台hostname写入hosts
scp -P 15312 hosts VM-0-132-centos:/etc/
6、解压压缩包至指定文件夹
tar -zxvf hadoop-3.2.4.tar.gz -C /opt/bigdata/
hadoop安装目录下etc/hadoop修改hadoop-env.sh
export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_162
7、hadoop安装目录下etc/hadoop修改core-site.xml主要指定namenode
fs.defaultFS
hdfs://VM-0-109-centos:8020
hadoop.tmp.dir
/opt/module/hadoop-2.7.4/data
hadoop.proxyuser.root.hosts
*
hadoop.proxyuser.root.groups
*
8、hadoop安装目录下etc/hadoop修改hdfs-site.xml主要指定SecondaryNameNode
dfs.namenode.secondary.http-address
VM-0-41-centos:50090
dfs.replication
2
9、hadoop安装目录下etc/hadoop修改yarn-site.xml主要指定 ResourceManager
yarn.resourcemanager.hostname
VM-0-41-centos
10、修改works
将三台hostname写入slaves
//修改完后scp将以上配置文件同步到其他服务器
scp -P 15312 -r hadoop-3.2.4/ VM-0-132-centos:/opt/bigdata
11、格式化NameNode
bin/hadoop namenode -format
12、启动hadoop
可以一起启动:sbin/start-all.sh
也可以单独启动:start-dfs.sh start-mapreted.sh start-yarn.sh
还可以按守护线程启动:
hadoop-daemon.sh start namenode
hadoop-daemon.sh start datenode
hadoop-daemon.sh start secondarynamenode
启动后浏览器可查看[http://ip:50070/]
13、关闭hadoop
sbin/stop-all.sh