欢迎光临
Hadoop3的安装
   

Hadoop3的安装

一、修改配置文件

1.hadoop-env.sh

export JAVA_HOME=/export/server/jdk1.8.0_65
#文件最后添加
export HDFS_NAMENODE_USER=root
export HDFS_DATANODE_USER=root
export HDFS_SECONDARYNAMENODE_USER=root
export YARN_RESOURCEMANAGER_USER=root
export YARN_NODEMANAGER_USER=root

2.core-site.xml



    fs.defaultFS
    hdfs://node1:8020



    hadoop.tmp.dir
    /export/data/hadoop-3.3.0



    hadoop.http.staticuser.user
    root



    hadoop.proxyuser.root.hosts
    *


    hadoop.proxyuser.root.groups
    *

3.hdfs-site.xml




    dfs.namenode.secondary.http-address
    node2:9868

4.mapred-sit.xml



  mapreduce.framework.name
  yarn



  mapreduce.jobhistory.address
  node1:10020

 


  mapreduce.jobhistory.webapp.address
  node1:19888


  yarn.app.mapreduce.am.env
  HADOOP_MAPRED_HOME=${HADOOP_HOME}


  mapreduce.map.env
  HADOOP_MAPRED_HOME=${HADOOP_HOME}


  mapreduce.reduce.env
  HADOOP_MAPRED_HOME=${HADOOP_HOME}

5.yarn-site.xml



	yarn.resourcemanager.hostname
	node1


    yarn.nodemanager.aux-services
    mapreduce_shuffle



    yarn.nodemanager.pmem-check-enabled
    false



    yarn.nodemanager.vmem-check-enabled
    false



  yarn.log-aggregation-enable
  true



    yarn.log.server.url
    http://node1:19888/jobhistory/logs



  yarn.log-aggregation.retain-seconds
  604800

6.workers

vi workers
node1
node2
node3

二、分发安装包并启动

cd /export/server
scp -r hadoop-3.3.0 root@node2:$PWD
scp -r hadoop-3.3.0 root@node3:$PWD
# 添加环境变量
vim /etc/proflie
# Hadoop3
export HADOOP_HOME=/export/server/hadoop-3.3.0
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib/native:$HADOOP_COMMON_LIB_NATIVE_DIR"
export YARN_HOME=$HADOOP_HOME
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
#有个警告信息就是处理不了,解决了踢我谢谢
#WARNING: YARN_CONF_DIR has been replaced by HADOOP_CONF_DIR. Using value of YARN_CONF_DIR.Stopping nodemanagers
#export YARN_CONF_DIR=$YARN_HOME/etc/hadoop
export HADOOP_LOG_DIR=$HADOOP_HOME/logs
export HDFS_CONF_DIR=$HADOOP_HOME/etc/hadoop
export PATH=.:$JAVA_HOME/lib:$HADOOP_HOME/sbin:$HADOOP_HOME/bin:$PATH

source /etc/profile
# 首次启动 格式化namenode
hdfs namenode -format
# 一键启动
start-dfs.sh 
start-yarn.sh 

Web UI页面

打赏
版权声明:本文采用知识共享 署名4.0国际许可协议 [BY-NC-SA] 进行授权
文章名称:《Hadoop3的安装》
文章链接:https://goodmancom.com/wl/175916.html