@Great-Chinese
2017-05-25T07:52:26.000000Z
字数 2513
阅读 825
hadoop2.x部署(SingleCluster)
cd /usr/local/src
wget http://archive.cloudera.com/cdh5/cdh/5/hadoop-2.5.0-cdh5.3.6.tar.gz
# 解压文件
tar -zxvf hadoop-2.5.0-cdh5.3.6.tar.gz
# /usr/local/下创建目录
mkdir -p cdh/hadoop
# 移动解压文件到/usr/local/cdh/hadoop/目录下
mv hadoop-2.5.0-cdh5.3.6 /usr/local/cdh/
# 进入当前目录下,删除cmd文件,doc文件
cd hadoop/share #可选项,删除doc文档
rm -rf doc/
# 可选项,删除cmd文件
cd hadoop/sbin
rm -rf ./*.cmd
cd hadoop/libexec
rm -rf ./*.cmd
cd hadoop/etc/hadoop
rm -rf ./*.cmd
cd hadoop/etc/hadoop
# 配置hadoop的JAVA_HOME
vim hadoop-env.sh
export JAVA_HOME=/usr/local/jdk1.8.0_131
# 配置yarn的JAVA_HOME
vim yarn-env.sh
export JAVA_HOME=/usr/local/jdk1.8.0_131
# 配置mapreduce的JAVA_HOME
vim mapred-env.sh
export JAVA_HOME=/usr/local/jdk1.8.0_131
cd hadoop/etc/hadoop
vim core-site.xml #增加内容如下
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://melody0113:8020</value>
</property>
</configuration>
cd hadoop/etc/hadoop
vim slaves # 添加使用机的主机名
melody0113
cd /etc/hadoop
vim hdfs-site.xml # 增加内容如下
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
bin/hdfs # 查看命令
bin/hdfs namenode -format # 格式化HDFS
# 启动namenode
sbin/hadoop-daemon.sh start namenode
# 启动datanode
sbin/hadoop-daemon.sh start datanode
# 查看启动
jps
# 启动出错请查看日志
http://melody0113:50070/
# 创建目录
bin/hdfs dfs -mkdir -p /user/melody/tmp
# 上传文件
bin/hdfs dfs -put etc/hadoop/core-site.xml /user/melody/tmp
# 读取文件
bin/hdfs dfs -cat /user/melody/tmp/core-site.xml
# 下载文件
bin/hdfs dfs -get /user/melody/tmp/core-site.xml /tmp
# 删除文件
bin/hdfs dfs -rm -f /user/melody/tmp/*
# linux下修改主机名
vim /etc/hosts
# windows下修改主机名
C:\Windows\System32\drivers\etc\hosts
cd /etc/hadoop
vim yarn-site.xml # 增加内容如下
<!--配置服务框架(以下为mapreduce框架的配置,NodeManager)-->
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
<!--配置ResourceManager-->
<configuration>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>melody0113</value>
</property>
</configuration>
<!--启动ResourceManager-->
sbin/yarn-daemon.sh start resourcemanager
<!--启动NodeManager-->
sbin/yarn-daemon.sh start nodemanager
<!--启动出错请查看日志:/opt/modules/hadoop-2.5.0/logs-->
WEB地址:http://melody0113:8088/
# 上传要wordcount的文件到hdfs
bin/hdfs dfs -put /tmp/xxx.txt /user/local/tmp
# 运行wordcount
bin/yarn jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.5.0.jar wordcount /user/local/tmp/input /user/local/tmp/output/
bin/hdfs dfsadmin -safemode leave