@Great-Chinese
2017-05-25T07:52:26.000000Z
字数 2513
阅读 848
hadoop2.x部署(SingleCluster)
cd /usr/local/srcwget http://archive.cloudera.com/cdh5/cdh/5/hadoop-2.5.0-cdh5.3.6.tar.gz# 解压文件tar -zxvf hadoop-2.5.0-cdh5.3.6.tar.gz# /usr/local/下创建目录mkdir -p cdh/hadoop# 移动解压文件到/usr/local/cdh/hadoop/目录下mv hadoop-2.5.0-cdh5.3.6 /usr/local/cdh/# 进入当前目录下,删除cmd文件,doc文件cd hadoop/share #可选项,删除doc文档rm -rf doc/# 可选项,删除cmd文件cd hadoop/sbinrm -rf ./*.cmdcd hadoop/libexecrm -rf ./*.cmdcd hadoop/etc/hadooprm -rf ./*.cmd
cd hadoop/etc/hadoop# 配置hadoop的JAVA_HOMEvim hadoop-env.shexport JAVA_HOME=/usr/local/jdk1.8.0_131# 配置yarn的JAVA_HOMEvim yarn-env.shexport JAVA_HOME=/usr/local/jdk1.8.0_131# 配置mapreduce的JAVA_HOMEvim mapred-env.shexport JAVA_HOME=/usr/local/jdk1.8.0_131
cd hadoop/etc/hadoopvim core-site.xml #增加内容如下<configuration><property><name>fs.defaultFS</name><value>hdfs://melody0113:8020</value></property></configuration>
cd hadoop/etc/hadoopvim slaves # 添加使用机的主机名melody0113
cd /etc/hadoopvim hdfs-site.xml # 增加内容如下<configuration><property><name>dfs.replication</name><value>1</value></property></configuration>
bin/hdfs # 查看命令bin/hdfs namenode -format # 格式化HDFS
# 启动namenodesbin/hadoop-daemon.sh start namenode# 启动datanodesbin/hadoop-daemon.sh start datanode# 查看启动jps# 启动出错请查看日志
http://melody0113:50070/
# 创建目录bin/hdfs dfs -mkdir -p /user/melody/tmp# 上传文件bin/hdfs dfs -put etc/hadoop/core-site.xml /user/melody/tmp# 读取文件bin/hdfs dfs -cat /user/melody/tmp/core-site.xml# 下载文件bin/hdfs dfs -get /user/melody/tmp/core-site.xml /tmp# 删除文件bin/hdfs dfs -rm -f /user/melody/tmp/*
# linux下修改主机名vim /etc/hosts# windows下修改主机名C:\Windows\System32\drivers\etc\hosts
cd /etc/hadoopvim yarn-site.xml # 增加内容如下<!--配置服务框架(以下为mapreduce框架的配置,NodeManager)--><configuration><property><name>yarn.nodemanager.aux-services</name><value>mapreduce_shuffle</value></property></configuration><!--配置ResourceManager--><configuration><property><name>yarn.resourcemanager.hostname</name><value>melody0113</value></property></configuration>
<!--启动ResourceManager-->sbin/yarn-daemon.sh start resourcemanager<!--启动NodeManager-->sbin/yarn-daemon.sh start nodemanager<!--启动出错请查看日志:/opt/modules/hadoop-2.5.0/logs-->
WEB地址:http://melody0113:8088/
# 上传要wordcount的文件到hdfsbin/hdfs dfs -put /tmp/xxx.txt /user/local/tmp# 运行wordcountbin/yarn jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.5.0.jar wordcount /user/local/tmp/input /user/local/tmp/output/
bin/hdfs dfsadmin -safemode leave