在Ubuntu上安装hadoop-2.7.7
1.安装open-vm-tools
sudo apt-get install open-vm-tools
2.安装openjdk
sudo apt-get install openjdk-8-jdk
3.安装配置ssh
apt-get install openssh-server
4.在进行了初次登陆后,会在当前家目录用户下有一个.ssh文件夹,进入该文件夹下:cd .ssh
ssh-keygen -t rsa
一路回车c
5.下载hadoop2.7.7 解压缩并改名为hadoop目录,放到/usr/local下(注意权限)
sudo mv ~/hadoop-2.7.7 /usr/local/hadoop
6.修改目录所有者 /usr/local/下的hadoop文件夹
sudo chown -r 当前用户名 /usr/local/hadoop
7.设置环境变量
(1)进入 sudo gedit ~/.bashrc
#~/.bashrc
export java_home=/usr/lib/jvm/java-8-openjdk-amd64
export hadoop_home=/usr/local/hadoop
export path=$path:$hadoop_home/bin
export path=$path:$hadoop_home/sbin
export hadoop_mapred_home=$hadoop_home
export hadoop_common_home=$hadoop_home
export hadoop_hdfs_home=$hadoop_home
export hadoop_yarn_home=$hadoop_home
export hadoop_common_lib_native_dir=$hadoop_home/lib/native
export hadoop_opts="-djava.library.path=$hadoop_home/lib"
export java_library_path=$hadoop_home/lib/native:$java_library_path
#打包hadoop程序需要的环境变量
export path=$hadoop_home/bin:$hadoop_home/sbin:$path
export classpath=$($hadoop_home/bin/hadoop classpath):$classpath
#让环境变量生效
source ~/.bashrc
(2) 进入 /usr/local/hadoop/etc/hadoop/hadoop-env.sh
export java_home=/usr/lib/jvm/java-8-openjdk-amd64
(3) 进入 /usr/local/hadoop/etc/hadoop/core-site.xml
<configuration>
<property>
<name>fs.default.name</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
(4) 进入 /usr/local/hadoop/etc/hadoop/hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/hadoop/hadoop_data/hdfs/namenode</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/local/hadoop/hadoop_data/hdfs/datanode</value>
</property>
</configuration>
(5) 进入 /usr/local/hadoop/etc/hadoop/mapred-site.xml(mapred-site.xml.template重命名)
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
(6) 进入 /usr/local/hadoop/etc/hadoop/yarn-site.xml
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.aux-services.mapreduce.shuffle.class</name>
<value>org.apache.hadoop.mapred.shufflehandler</value>
</property>
</configuration>
8.格式化hdfs文件系统
hdfs namenode -format
9.启动hadoop
start-all.sh 或start-dfs.sh start-yarn.sh
10.浏览器搜索
上一篇: Ruby中变量引用时的一些注意点
下一篇: Ruby中的异常处理代码编写示例