創(chuàng)新互聯(lián)公司專注于額濟納企業(yè)網(wǎng)站建設(shè),響應(yīng)式網(wǎng)站開發(fā),商城網(wǎng)站定制開發(fā)。額濟納網(wǎng)站建設(shè)公司,為額濟納等地區(qū)提供建站服務(wù)。全流程定制設(shè)計,專業(yè)設(shè)計,全程項目跟蹤,創(chuàng)新互聯(lián)公司專業(yè)和態(tài)度為您提供的服務(wù)
#生成密鑰(node1,node2,node3,node4) ssh-keygen -t dsa -P '' -f ~/.ssh/id_dsa cd ~/.ssh/ ls #說明 ##id_ds 私鑰 ##is_dsa.pub 公鑰 #將公鑰文件追加到本地的認證文件中(node1,node2,node3,node4) cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys ssh 192.168.2.136 #Last login: Sun Jul 9 14:14:35 2017 from 192.168.2.136,表示登錄成功,并且是不需要輸入密碼的 exit #logout 退出 將node1中的公鑰復(fù)制到node2,node3,node4中 scp ./id_dsa.pub root@192.168.2.137:/opt/ scp ./id_dsa.pub root@192.168.2.138:/opt/ scp ./id_dsa.pub root@192.168.2.139:/opt/ 將/opt/id_dsa.pub添加到node2,node3,node4的認證文件中 cat /opt/id_dsa.pub >> ~/.ssh/authorized_keys
將hadoop壓縮包上傳到node1的/root目錄 tar -zxvf hadoop-2.5.1_x64.tar.gz mv hadoop-2.5.1 /home/ cd /home/ ls cd hadoop-2.5.1 ls cd etc/hadoop #配置1,修改hadoopenv.sh vi hadoop-env.sh #將jdk的安裝目錄添加到這個文件 export JAVA_HOME=/usr/java/jdk1.7.0_79 #配置2,修改core-site.xml vi core-site.xml#配置3,修改hdfs-site.xml vi hdfs-site.xml fs.defaultFS hdfs://192.168.2.136:9000 hadoop.tmp.dir /opt/hadoop-2.5 #配置4,修改slaves vi slaves 192.168.2.137 192.168.2.138 192.168.2.139 #配置5,修改masters vi masters 192.168.2.137 dfs.namenode.secondary.http-address 192.168.2.137:50090 dfs.namenode.secondary.https-address 192.168.2.137:50091
scp -r hadoop-2.5.1/ root@192.168.2.137:/home/ scp -r hadoop-2.5.1/ root@192.168.2.138:/home/ scp -r hadoop-2.5.1/ root@192.168.2.139:/home/
vi ~/.bash_profile export HADOOP_HOME=/home/hadoop-2.5.1 export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
scp ~/.bash_profile root@192.168.2.137:/root/ scp ~/.bash_profile root@192.168.2.138:/root/ scp ~/.bash_profile root@192.168.2.139:/root/
source ~/.bash_profile
hdfs namenode -format
cd /opt/hadoop-2.5/dfs/name/current ls -l
start-dfs.sh #start-all.sh
service iptables stop
http://192.168.2.136:50070/dfshealth.html#tab-overview