确保hadoop-env.sh和yarn-env.sh中的JAVA_HOME是正确的。
3) 修改/etc/profile略,详见配置文件,蓝色用于编译hadoop export JAVA_HOME=/usr/jdk1.7.0_67
export HADOOP_HOME=/home/ha/hadoop-2.7.1 export HIVE_HOME=/home/ha/hive-1.2.1
export MAVEN_HOME=/home/ha/maven-3.3.3 export ANT_HOME=/home/ha/ant-1.9.6
export FINDBUGS_HOME=/home/ha/findbugs-3.0.1 export HADOOP_COMMON_HOME=$HADOOP_HOME export HADOOP_HDFS_HOME=$HADOOP_HOME export HADOOP_MAPRED_HOME=$HADOOP_HOME export HADOOP_YARN_HOME=$HADOOP_HOME
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop export JAVA_LIBRARY_PATH=$HADOOP_HOME/lib/native
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native export HADOOP_OPTS=\
export CLASSPATH=:$CLASSPATH:$JAVA_HOME/lib:$JAVA_HOME/jre/lib export CLASSPATH=:$CLASSPATH:$HADOOP_HOME/lib
export PATH=$PATH:$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$HADOOP_HOME/bin export PATH=$PATH:$HADOOP_HOME/sbin:$HIVE_HOME/bin
export PATH=$PATH:$FINDBUGS_HOME/bin:$ANT_HOME/bin:$MAVEN_HOME/bin
4) 格式化文件系统
[ha@hadoop0 ~]$cd /home/ha/hadoop-2.7.1/bin [ha@hadoop0 ~]$hdfsnamenode -format 5) 启动hadoop
[ha@hadoop0 ~]$cd/home/ha/hadoop-2.7.1/sbin [ha@hadoop0 ~]$./start-all.sh
6) 查看进程,成功状态如下: Hadoop0的进程
[ha@hadoop0 ~]$ jps
2931 SecondaryNameNode 3086 ResourceManager 6317 Jps
2738 NameNode
Hadoop1的进程
[ha@hadoop1 ~]$ jps 2475 NodeManager 3625 Jps
2361 DataNode
Hadoop2的进程
[ha@hadoop1 ~]$ jps 2475 NodeManager 3625 Jps
2361 DataNode
7) 停止hadoop
[ha@hadoop0 ~]cd /home/ha/hadoop-2.7.1/sbin [ha@hadoop0 ~]./stop-all.sh 8) 浏览器中查看
主机
http://192.168.1.151:50070 http://192.168.1.151:8088 辅机
http://192.168.1.152:19888 8. hive的安装 1) 安装mysql
[root@hadoop0 ~]#yum–y install mysql-server 2) 设置开机启动
[root@hadoop0 ~]#chkconfigmysqld on 3) 启动mysql
[root@hadoop0 ~]#servicemysqld start 4) 设置root密码
[root@hadoop0 ~]#mysql–u root
mysql>set password for 'root'@'localhost'=password('111111'); mysql>set password for 'root'@'hadoop0'=password('111111');
5) 创建用户
[ha@hadoop0 ~]$ mysql -u root -p Enter password:
mysql> create user 'hadoop'@'hadoop0' identified by '111111';
mysql> grant all privileges on *.* to 'hadoop'@'hadoop0' with grant option; 6) 创建数据库
[root@hadoop0 ~]#su– ha
[ha@hadoop0 ~]$ mysql -h hadoop0 -u hadoop -p mysql>create database hive;
7) 下载解压hive
apache-hive-1.2.1-bin.tar.gz
[ha@hadoop0 ~]$ tar–zxvfapache-hive-1.2.1-bin.tar.gz 8) 配置/etx/profile(root用户)
[root@hadoop0 ~]#vi/etx/profile
export HIVE_HOME=/home/ha/hive-1.2.1 export PATH=$PATH: $HIVE_HOME/bin
授权
[root@hadoop0 ~]#chmod 777 /home/ha/hive-1.2.1/bin/*
9) 创建/user/hive/warehouse
[ha@hadoop0 ~]$ hdfsdfs -mkdir -p /user/hive
[ha@hadoop0 ~]$ hdfsdfs -mkdir -p /user/hive/ warehouse 10) 配置Hive,编辑/home/ha/hive-1.2.1/conf/hive-site.xml
mysql-connector-java-5.1.13-bin.jar 12) 启动hive
[ha@hadoop0 ~]$ hive 查看表
hive> show tables;
创建表test
hive>create table test(id int,name string); 查看test表字段 hive>desc test;