HDFS与CentOS集成配置指南
一 环境准备与规划
二 安装与基础配置
sudo yum install -y java-1.8.0-openjdk-develjava -versionwget https://archive.apache.org/dist/hadoop/common/hadoop-3.3.4/hadoop-3.3.4.tar.gztar -xzvf hadoop-3.3.4.tar.gz -C /usr/local/ && mv /usr/local/hadoop-3.3.4 /usr/local/hadoopexport HADOOP_HOME=/usr/local/hadoop
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
source /etc/profile.d/hadoop.shssh-keygen -t rsassh-copy-id hadoop@hadoop01、ssh-copy-id hadoop@hadoop02、ssh-copy-id hadoop@hadoop03mkdir -p /usr/local/hadoop/data/{namenode,datanode}三 HDFS核心配置
export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://hadoop01:9000</value>
</property>
</configuration>
<configuration>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>/usr/local/hadoop/data/namenode</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>/usr/local/hadoop/data/datanode</value>
</property>
<property>
<name>dfs.permissions.enabled</name>
<value>false</value>
</property>
</configuration>
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.aux-services.mapreduce.shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>hadoop02</value>
</property>
</configuration>
四 启动与验证
hdfs namenode -formatstart-dfs.shstart-yarn.shjps(应见到 NameNode/DataNode/SecondaryNameNode/ResourceManager/NodeManager 等)hdfs dfsadmin -reporthdfs dfs -ls /五 网络与防火墙及客户端集成
firewall-cmd --permanent --zone=public --add-port=9000/tcp
firewall-cmd --permanent --zone=public --add-port=50010/tcp
firewall-cmd --permanent --zone=public --add-port=50020/tcp
firewall-cmd --permanent --zone=public --add-port=50070/tcp
firewall-cmd --permanent --zone=public --add-port=50075/tcp
firewall-cmd --permanent --zone=public --add-port=50090/tcp
firewall-cmd --reload
hdfs dfs -put /local/file /hdfs/path、hdfs dfs -get /hdfs/file /local/path<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.3.4</version>
</dependency>