1 設定ip位址
點選(此處)折疊或打開
[root@test1 ~]# vi /etc/sysconfig/network-scripts/ifcfg-eth0
# Intel Corporation 82545EM Gigabit Ethernet Controller (Copper)
DEVICE=eth0
BOOTPROTO=none
ONBOOT=yes
HWADDR=00:0c:29:51:cc:37
TYPE=Ethernet
NETMASK=255.255.255.0
IPADDR=192.168.23.131
GATEWAY=192.168.23.1
USERCTL=no
IPV6INIT=no
PEERDNS=yes
執行指令 service network restart
驗證: ifconfig
2 關閉防火牆
執行指令 service iptables stop
驗證: service iptables status
3 關閉防火牆的自動運作
執行指令 chkconfig iptables off
驗證: chkconfig --list | grep iptables
4 設定主機名
執行指令
(1)hostname hadoop1
(2)vi /etc/sysconfig/network
NETWORKING=yes
NETWORKING_IPV6=yes
HOSTNAME=hadoop1
5 ip與hostname綁定
執行指令 (1)vi /etc/hosts
192.168.23.131 hadoop1.localdomain hadoop1
驗證: ping hadoop1
6 設定ssh免密碼登陸
(1)ssh-keygen -t rsa
(2)cp ~/.ssh/id_rsa.pub ~/.ssh/authorized_keys
驗證:
[root@test1 ~]# ssh hadoop1
The authenticity of host 'hadoop1 (192.168.23.131)' can't be established.
RSA key fingerprint is e9:9f:f2:ea:f2:aa:47:58:5f:12:ea:3c:50:3f:0d:1b.
Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added 'hadoop1,192.168.23.131' (RSA) to the list of known hosts.
Last login: Thu Feb 11 20:54:11 2016 from 192.168.23.1
[root@hadoop1 ~]# ssh hadoop1
Last login: Thu Feb 11 20:57:56 2016 from hadoop1.localdomain
(1)執行指令
[root@hadoop1 java]# cd /usr/share/java
[root@hadoop1 java]# cd
[root@hadoop1 ~]# cd /usr/share/java
[root@hadoop1 java]# cp /tmp/jdk-7u79-linux-x64.gz ./
[root@hadoop1 java]# tar -xzvf jdk-7u79-linux-x64.gz
(2)vi /etc/profile 增加内容如下:
export JAVA_HOME=/usr/share/java/jdk1.7.0_79
export PATH=.:$JAVA_HOME/bin:$PATH
(3)source /etc/profile
驗證: java -version
8 安裝hadoop
(1)執行指令
[root@hadoop1 ~]# cd /usr/local/
[root@hadoop1 local]# cp /tmp/hadoop-2.6.0.tar.gz ./
[root@hadoop1 local]# tar -zxvf hadoop-2.6.0.tar.gz
[root@hadoop1 local]# mv hadoop-2.6.0 hadoop
export HADOOP_HOME=/usr/local/hadoop
export PATH=.:$HADOOP_HOME/bin:$JAVA_HOME/bin:$PATH
(4)修改/usr/local/hadoop/etc/hadoop目錄下的配置檔案hadoop-env.sh、core-site.xml、hdfs-site.xml、mapred-site.xml
[root@hadoop1 hadoop]# vi hadoop-env.sh
export JAVA_HOME=/usr/share/java/jdk1.7.0_79
[root@hadoop1 hadoop]# vi core-site.xml
<configuration>
<property>
<name>fs.default.name</name>
<value>hdfs://hadoop1:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/usr/local/hadoop/tmp</value>
</configuration>
[root@hadoop1 hadoop]# vi hdfs-site.xml
<name>dfs.replication</name>
<value>1</value>
<name>dfs.permissions</name>
<value>false</value>
~
[root@hadoop1 hadoop]# cp mapred-site.xml.template mapred-site.xml
[root@hadoop1 hadoop]# vi mapred-site.xml
<name>mapred.job.tracker</name>
<value>hadoop1:9001</value>
(5)hadoop namenode -format
(6)start-all.sh
[root@hadoop1 hadoop]# cd sbin
[root@hadoop1 sbin]# start-all.sh
This script is Deprecated. Instead use start-dfs.sh and start-yarn.sh
16/02/11 21:40:54 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Starting namenodes on [hadoop1]
hadoop1: starting namenode, logging to /usr/local/hadoop/logs/hadoop-root-namenode-hadoop1.out
The authenticity of host 'localhost (127.0.0.1)' can't be established.
localhost: Warning: Permanently added 'localhost' (RSA) to the list of known hosts.
localhost: starting datanode, logging to /usr/local/hadoop/logs/hadoop-root-datanode-hadoop1.out
Starting secondary namenodes [0.0.0.0]
The authenticity of host '0.0.0.0 (0.0.0.0)' can't be established.
0.0.0.0: Warning: Permanently added '0.0.0.0' (RSA) to the list of known hosts.
0.0.0.0: starting secondarynamenode, logging to /usr/local/hadoop/logs/hadoop-root-secondarynamenode-hadoop1.out
16/02/11 21:41:27 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
starting yarn daemons
starting resourcemanager, logging to /usr/local/hadoop/logs/yarn-root-resourcemanager-hadoop1.out
localhost: starting nodemanager, logging to /usr/local/hadoop/logs/yarn-root-nodemanager-hadoop1.out
[root@hadoop1 sbin]# jps
7192 SecondaryNameNode
7432 NodeManager
7468 Jps
6913 NameNode
7333 ResourceManager
7036 DataNode
驗證: (1)執行指令jps 如果看到5個新的java程序,分别是NameNode、SecondaryNameNode、DataNode、ResourceManager、NodeManager
(2)在浏覽器檢視
hadoop web控制台頁面的端口整理:
50070:hdfs檔案管理 http://192.168.23.131:50070
8088:ResourceManager http://192.168.23.131:8088
8042:NodeManager http://192.168.23.131:8042
9 啟動時沒有NameNode的可能原因:
(1)沒有格式化
(2)環境變量設定錯誤
(3)ip與hostname綁定失敗
參考: