欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页

hive-ha 安装 博客分类: linuxhive linuxhivehahadoophdfs 

程序员文章站 2024-03-15 10:31:11
...

hive-ha 安装

 

 

0.准备工作 hadoop 服务器

 

10.156.50.35 yanfabu2-35.base.app.dev.yf zk1  hadoop1 master1 master
10.156.50.36 yanfabu2-36.base.app.dev.yf zk2  hadoop2 master2
10.156.50.37 yanfabu2-37.base.app.dev.yf zk3  hadoop3 slaver1

 

 

1. 安装mysql

 

rpm -qa | grep postfix
rpm -qa | grep mariadb
rpm -ev postfix-2.10.1-6.el7.x86_64
rpm -ev  mariadb-libs-5.5.56-2.el7.x86_64

rpm -ivh mysql-community-common-5.7.17-1.el6.x86_64.rpm
rpm -ivh mysql-community-libs-5.7.17-1.el6.x86_64.rpm 
rpm -ivh mysql-community-client-5.7.17-1.el6.x86_64.rpm
rpm -ivh mysql-community-server-5.7.17-1.el6.x86_64.rpm 
rpm -ivh mysql-community-devel-5.7.17-1.el6.x86_64.rpm


mysqld --user=mysql --initialize  //可以看见返回默认密码  
grep 'temporary password' /var/log/mysqld.log //看见密码
service mysqld start  
mysql -u root -p  
  
set global validate_password_policy=0  
SET PASSWORD = PASSWORD('root');  
use mysql  
update user set host='%' where user='root' and host='localhost';  
flush privileges;   
exit  


CREATE DATABASE metastore;  
USE metastore;  
SOURCE /home/zkkafka/hive/scripts/metastore/upgrade/mysql/hive-schema-2.3.0.mysql.sql;  
CREATE USER 'hive'@'localhost' IDENTIFIED BY 'hive';  
GRANT ALL PRIVILEGES ON metastore.* TO 'hive'@'localhost' IDENTIFIED BY 'hive';  
GRANT ALL PRIVILEGES ON metastore.* TO 'hive'@'yanfabu2-35.base.app.dev.yf' IDENTIFIED BY 'hive';  
GRANT ALL PRIVILEGES ON metastore.* TO 'hive'@'yanfabu2-36.base.app.dev.yf' IDENTIFIED BY 'hive';  
GRANT ALL PRIVILEGES ON metastore.* TO 'hive'@'yanfabu2-37.base.app.dev.yf' IDENTIFIED BY 'hive';  
FLUSH PRIVILEGES; 

 

 

2.上传 mysql-connect-java

 

 

mv mysql-connector-java-5.1.47.jar hive/lib/

 

 

3.修改 配置文件

 

 

vim ~/.bash_profile 

export HIVE_HOME=/home/zkkafka/hive
export PATH=$HIVE_HOME/bin:$PATH

source  ~/.bash_profile 

 

 

scp core-site.xml   zkkafka@10.156.50.36:/home/zkkafka/hadoop/etc/hadoop/
scp core-site.xml   zkkafka@10.156.50.37:/home/zkkafka/hadoop/etc/hadoop/

 

==========================================================
export PATH
export LANG="zh_CN.utf8"

#java
export   JAVA_HOME=/home/zkkafka/jdk1.8.0_151
export   CLASSPATH=$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:
export   PATH=$JAVA_HOME/bin:$PATH

#zookeeper
export   ZOOKEEPER_HOME=/home/zkkafka/zookeeper-3.4.6
export   PATH=$PATH:$ZOOKEEPER_HOME/bin:$ZOOKEEPER_HOME/conf

#kafka
export KAFKA_HOME=/home/zkkafka/kafka_2.11-2.1.1
export PATH=$KAFKA_HOME/bin:$PATH


#hbase
export HBASE_HOME=/home/zkkafka/hbase
export PATH=$HBASE_HOME/bin:$PATH

#hive
export HIVE_HOME=/home/zkkafka/hive
export PATH=$HIVE_HOME/bin:$PATH
export HIVE_CONF_DIR=$HIVE_HOME/conf

#hadoop
export HADOOP_HOME=/home/zkkafka/hadoop
export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$PATH
==========================================================

 

 

 

4.安装 hive

 

4.0  准备

 

mkdir -p /home/zkkafka/hive/tmp/HiveJobsLog
mkdir -p /home/zkkafka/hive/tmp/ResourcesLog
mkdir -p /home/zkkafka/hive/tmp/HiveRunLog
mkdir -p /home/zkkafka/hive/tmp/OpertitionLog

拷贝tools 到hive/lib
cp  $JAVA_HOME/lib/tools.jar  ${HIVE_HOME}/lib

 

 

4.1 修改 hive-env.sh

 

cp hive-env.sh.template    hive-env.sh


export JAVA_HOME=/home/zkkafka/jdk1.8.0_151
export HIVE_HOME=/home/zkkafka/hive
export HADOOP_HOME==/home/zkkafka/hadoop

 

 

4.2 增加 hive-config.sh

 

export JAVA_HOME=/home/zkkafka/jdk1.8.0_151
export HIVE_HOME=/home/zkkafka/hive
export HADOOP_HOME=/home/zkkafka/hadoop
## 修改下列该行
HIVE_CONF_DIR=$HIVE_HOME/conf

 

 

4.3 修改 hive-site.xml

 

cp hive-default.xml.template hive-site.xml

<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>

 <property>  
    <name>javax.jdo.option.ConnectionURL</name>  
    <value>jdbc:mysql://master2:3306/metastore?useSSL=false&amp;useUnicode=true&amp;characterEncoding=UTF-8</value>  
    </property>  
    <property>  
    <name>javax.jdo.option.ConnectionDriverName</name>  
    <value>com.mysql.jdbc.Driver</value>  
    </property>  
  
    <property>  
    <name>javax.jdo.option.ConnectionUserName</name>  
    <value>hive</value>  
    </property>  
    <property>  
    <name>javax.jdo.option.ConnectionPassword</name>  
    <value>hive</value>  
    </property>  
    <property>  
    <name>datanucleus.autoCreateSchema</name>  
    <value>false</value>  
    </property>  
  
    <property>  
    <name>mapreduce.framework.name</name>  
    <value>yarn</value>  
    </property>  
    <property>  
    <name>yarn.resourcemanager.resource-tracker.address</name>  
    <value>master1:8031</value>  
    </property>  
  
    <property>  
    <name>hive.files.umask.value</name>  
    <value>0002</value>  
    </property>  
    <property>  
    <name>hive.exec.reducers.max</name>  
    <value>999</value>  
    </property>  
    <property>  
    <name>hive.auto.convert.join</name>  
    <value>true</value>  
    </property>  
  
    <property>  
    <name>hive.metastore.schema.verification</name>  
    <value>true</value>  
    </property>  
    <property>  
    <name>hive.metastore.warehouse.dir</name>  
    <!-- <value>hdfs://master/hive/warehouse</value>   -->
	<value>/home/zkkafka/hive/warehouse</value>
    </property>  
    <property>  
    <name>hive.warehouse.subdir.inherit.perms</name>  
    <value>true</value>  
    </property>  
    <property>  
     <property>
      <name>hive.metastore.port</name>
      <value>9083</value>
      <description>Hive metastore listener port</description>
    </property>
    <name>hive.metastore.uris</name>  
    <value>thrift://master1:9083</value>  
    </property>  
    <property>  
    <name>hive.metastore.server.min.threads</name>  
    <value>200</value>  
    </property>  
    <property>  
    <name>hive.metastore.server.max.threads</name>  
    <value>100000</value>  
    </property>  
    <property>  
    <name>hive.metastore.client.socket.timeout</name>  
    <value>3600</value>  
    </property>  
  
    <property>  
    <name>hive.support.concurrency</name>  
    <value>true</value>  
    </property>  
    <property>  
    <name>hive.zookeeper.quorum</name>  
    <value>master1,master2,slaver1</value>  
    </property>  
    <!-- 最小工作线程数,默认为5 -->  
    <property>  
    <name>hive.server2.thrift.min.worker.threads</name>  
    <value>5</value>  
    </property>  
    <!-- 最大工作线程数,默认为500 -->  
    <property>  
    <name>hive.server2.thrift.max.worker.threads</name>  
    <value>500</value>  
    </property>  
    <!-- TCP的监听端口,默认为10000 -->  
    <property>  
    <name>hive.server2.thrift.port</name>  
    <value>10000</value>  
    </property>  
    <!-- TCP绑定的主机,默认为localhost -->  
    <property>  
    <name>hive.server2.thrift.bind.host</name>  
    <value>master</value>  
    </property> 
    <!-- 
    <property>
      <name>hive.server2.authentication</name>
      <value>NONE</value>
    </property>
     -->  
    <property>  
    <name>hive.server2.enable.impersonation</name>  
    <value>true</value>  
    </property>  


	<!-- 修改日志位置 -->
	<property>
	    <name>hive.exec.local.scratchdir</name>
	    <value>/home/zkkafka/hive/tmp/HiveJobsLog</value>
	    <description>Local scratch space for Hive jobs</description>
	</property>
	<property>
	    <name>hive.downloaded.resources.dir</name>
	    <value>/home/zkkafka/hive/tmp/ResourcesLog</value>
	    <description>Temporary local directory for added resources in the remote file system.</description>
	</property>
	<property>
	    <name>hive.querylog.location</name>
	    <value>/home/zkkafka/hive/tmp/HiveRunLog</value>
	    <description>Location of Hive run time structured log file</description>
	</property>
	<property>
	    <name>hive.server2.logging.operation.log.location</name>
	    <value>/home/zkkafka/hive/tmp/OpertitionLog</value>
	    <description>Top level directory where operation tmp are stored if logging functionality is enabled</description>
	</property>
	<!-- 配置HWI接口 -->
	<!--
	<property>  
	    <name>hive.hwi.war.file</name>  
	    <value>/home/zkkafka/hive/lib/hive-hwi-2.1.1.jar</value>  
	    <description> 目前不安装hwi ,没有jar  This sets the path to the HWI war file, relative to ${HIVE_HOME}. </description>  
	</property>  
	<property>  
	    <name>hive.hwi.listen.host</name>  
	    <value>master1</value>  
	    <description>This is the host address the Hive Web Interface will listen on</description>  
	</property>  
	<property>  
	    <name>hive.hwi.listen.port</name>  
	    <value>9999</value>  
	    <description>This is the port the Hive Web Interface will listen on</description>  
	</property> 
         -->

        <!-- 解决多并发读取失败的问题(HIVE-4762) -->
	 <property>
	   <name>datanucleus.autoStartMechanism</name>
	   <value>SchemaTable</value>
	</property>
	<!-- 我安装时报错,加上这个完美解决 -->
	<property>                                                      
	    <name>datanucleus.schema.autoCreateAll</name>  
	    <value>true</value>   
	    <description>creates necessary schema on a startup if one doesn't exist. set this to false, after creating it once</description>  
	 </property> 

</configuration>

 

 

 

4.4 修改 core-site.xml 

这个是hadoop 的配置,需要重启hadoop

 

User: hadoop is not allowed to impersonate anonymous,意思是用户hadoop不允许伪装成anonymous(hive的默认用户,默认配置可以查看)。
vim  core-site.xml  
    <property>  
    <name>hadoop.proxyuser.zkkafka.hosts</name>  
    <value>*</value>  
    </property>  
    <property>  
    <name>hadoop.proxyuser.zkkafka.groups</name>  
      <value>*</value>  
    </property>  

 

 

scp core-site.xml   zkkafka@10.156.50.36:/home/zkkafka/hadoop/etc/hadoop/
scp core-site.xml   zkkafka@10.156.50.37:/home/zkkafka/hadoop/etc/hadoop/

 

 

 4.5 初始化数据库

schematool -dbType mysql -initSchema 

效果同 
CREATE DATABASE metastore;  
USE metastore;  
SOURCE /home/zkkafka/hive/scripts/metastore/upgrade/mysql/hive-schema-2.3.0.mysql.sql;  
CREATE USER 'hive'@'localhost' IDENTIFIED BY 'hive';  
GRANT ALL PRIVILEGES ON metastore.* TO 'hive'@'localhost' IDENTIFIED BY 'hive';  
GRANT ALL PRIVILEGES ON metastore.* TO 'hive'@'yanfabu2-35.base.app.dev.yf' IDENTIFIED BY 'hive';  
GRANT ALL PRIVILEGES ON metastore.* TO 'hive'@'yanfabu2-36.base.app.dev.yf' IDENTIFIED BY 'hive';  
GRANT ALL PRIVILEGES ON metastore.* TO 'hive'@'yanfabu2-37.base.app.dev.yf' IDENTIFIED BY 'hive';  
FLUSH PRIVILEGES; 

 

4.6 启动

hive --service metastore
hive --service metastore 2>&1 >> /dev/null &

 

hive
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/zkkafka/hive/lib/log4j-slf4j-impl-2.6.2.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/zkkafka/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]

Logging initialized using configuration in file:/home/zkkafka/hive/conf/hive-log4j2.properties Async: true
Hive-on-MR is deprecated in Hive 2 and may not be available in the future versions. Consider using a different execution engine (i.e. spark, tez) or using Hive 1.X releases.
hive> quit;

 

hive --service hiveserver2

 

[zkkafka@yanfabu2-35 conf]$ hive --service hiveserver2
2019-05-20 15:35:54: Starting HiveServer2
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/zkkafka/hive/lib/log4j-slf4j-impl-2.6.2.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/zkkafka/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]




STARTUP_MSG:   build = git://HW13934/Users/gates/git/hive -r 76595628ae13b95162e77bba365fe4d2c60b3f29; compiled by 'gates' on Tue May 7 15:45:09 PDT 2019
************************************************************/
2019-05-20T17:12:44,082  INFO [main] metastore.HiveMetaStore: Starting hive metastore on port 9083
2019-05-20T17:12:44,391  WARN [main] conf.HiveConf: HiveConf of name hive.files.umask.value does not exist
2019-05-20T17:12:44,392  WARN [main] conf.HiveConf: HiveConf of name hive.server2.enable.impersonation does not exist
2019-05-20T17:12:44,393  INFO [main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore
2019-05-20T17:12:44,643  INFO [main] metastore.ObjectStore: ObjectStore, initialize called
2019-05-20T17:12:47,333  WARN [main] conf.HiveConf: HiveConf of name hive.files.umask.value does not exist
2019-05-20T17:12:47,334  WARN [main] conf.HiveConf: HiveConf of name hive.server2.enable.impersonation does not exist
2019-05-20T17:12:47,336  INFO [main] metastore.ObjectStore: Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes="Table,StorageDescriptor,SerDeInfo,Partition,Database,Type,FieldSchema,Order"
2019-05-20T17:12:55,474  INFO [main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is MYSQL
2019-05-20T17:12:55,481  INFO [main] metastore.ObjectStore: Initialized ObjectStore
2019-05-20T17:12:56,553  INFO [main] metastore.HiveMetaStore: Added admin role in metastore
2019-05-20T17:12:56,572  INFO [main] metastore.HiveMetaStore: Added public role in metastore
2019-05-20T17:12:56,625  INFO [main] metastore.HiveMetaStore: No user is added in admin role, since config is empty
2019-05-20T17:12:57,288  INFO [main] metastore.HiveMetaStore: Starting DB backed MetaStore Server with SetUGI enabled
2019-05-20T17:12:57,312  INFO [main] metastore.HiveMetaStore: Started the new metaserver on port [9083]...
2019-05-20T17:12:57,312  INFO [main] metastore.HiveMetaStore: Options.minWorkerThreads = 200
2019-05-20T17:12:57,312  INFO [main] metastore.HiveMetaStore: Options.maxWorkerThreads = 100000
2019-05-20T17:12:57,312  INFO [main] metastore.HiveMetaStore: TCP keepalive = true
2019-05-20T17:13:12,928  INFO [main] conf.HiveConf: Found configuration file file:/home/zkkafka/hive/conf/hive-site.xml
2019-05-20T17:13:14,876  WARN [main] conf.HiveConf: HiveConf of name hive.files.umask.value does not exist
2019-05-20T17:13:14,877  WARN [main] conf.HiveConf: HiveConf of name hive.server2.enable.impersonation does not exist
2019-05-20T17:13:15,800  WARN [main] conf.HiveConf: HiveConf of name hive.files.umask.value does not exist
2019-05-20T17:13:15,800  WARN [main] conf.HiveConf: HiveConf of name hive.server2.enable.impersonation does not exist
2019-05-20T17:13:16,130  INFO [main] SessionState: 
Logging initialized using configuration in file:/home/zkkafka/hive/conf/hive-log4j2.properties Async: true
2019-05-20T17:13:20,011  INFO [main] session.SessionState: Created HDFS directory: /tmp/hive/zkkafka/d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b
2019-05-20T17:13:20,062  INFO [main] session.SessionState: Created local directory: /home/zkkafka/hive/tmp/HiveJobsLog/d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b
2019-05-20T17:13:20,089  INFO [main] session.SessionState: Created HDFS directory: /tmp/hive/zkkafka/d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b/_tmp_space.db
2019-05-20T17:13:20,092  INFO [main] conf.HiveConf: Using the default value passed in for log id: d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b
2019-05-20T17:13:20,094  INFO [main] session.SessionState: Updating thread name to d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main
2019-05-20T17:13:20,096  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] CliDriver: Hive-on-MR is deprecated in Hive 2 and may not be available in the future versions. Consider using a different execution engine (i.e. spark, tez) or using Hive 1.X releases.
2019-05-20T17:13:37,499  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] conf.HiveConf: Using the default value passed in for log id: d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b
2019-05-20T17:13:37,591  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] ql.Driver: Compiling command(queryId=zkkafka_20190520171337_b42df449-6aec-4100-a1b8-eedb6d51cbeb): show databases
2019-05-20T17:13:38,571  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] hive.metastore: Trying to connect to metastore with URI thrift://master1:9083
2019-05-20T17:13:38,630  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] hive.metastore: Opened a connection to metastore, current connections: 1
2019-05-20T17:13:38,791  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] hive.metastore: Connected to metastore.
2019-05-20T17:13:39,473  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] ql.Driver: Semantic Analysis Completed
2019-05-20T17:13:39,620  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:database_name, type:string, comment:from deserializer)], properties:null)
2019-05-20T17:13:39,869  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] exec.ListSinkOperator: Initializing operator LIST_SINK[0]
2019-05-20T17:13:39,898  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] ql.Driver: Completed compiling command(queryId=zkkafka_20190520171337_b42df449-6aec-4100-a1b8-eedb6d51cbeb); Time taken: 2.357 seconds
2019-05-20T17:13:39,903  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] lockmgr.DummyTxnManager: Creating lock manager of type org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager
2019-05-20T17:13:40,039  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] imps.CuratorFrameworkImpl: Starting
2019-05-20T17:13:40,127  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09 GMT
2019-05-20T17:13:40,127  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:host.name=yanfabu2-35.base.app.dev.yf
2019-05-20T17:13:40,127  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:java.version=1.8.0_151
2019-05-20T17:13:40,128  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:java.vendor=Oracle Corporation
2019-05-20T17:13:40,128  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:java.home=/home/zkkafka/jdk1.8.0_151/jre
2019-05-20T17:13:40,128  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:java.class.path=/home/zkkafka/hive/conf:/home/zkkafka/hive/lib/accumulo-core-1.6.0.jar:/home/zkkafka/hive/lib/accumulo-fate-1.6.0.jar:/home/zkkafka/hive/lib/accumulo-start-1.6.0.jar:/home/zkkafka/hive/lib/accumulo-trace-1.6.0.jar:/home/zkkafka/hive/lib/activation-1.1.jar:/home/zkkafka/hive/lib/aether-api-0.9.0.M2.jar:/home/zkkafka/hive/lib/aether-connector-file-0.9.0.M2.jar:/home/zkkafka/hive/lib/aether-connector-okhttp-0.0.9.jar:/home/zkkafka/hive/lib/aether-impl-0.9.0.M2.jar:/home/zkkafka/hive/lib/aether-spi-0.9.0.M2.jar:/home/zkkafka/hive/lib/aether-util-0.9.0.M2.jar:/home/zkkafka/hive/lib/aircompressor-0.8.jar:/home/zkkafka/hive/lib/airline-0.7.jar:/home/zkkafka/hive/lib/ant-1.6.5.jar:/home/zkkafka/hive/lib/ant-1.9.1.jar:/home/zkkafka/hive/lib/ant-launcher-1.9.1.jar:/home/zkkafka/hive/lib/antlr4-runtime-4.5.jar:/home/zkkafka/hive/lib/antlr-runtime-3.5.2.jar:/home/zkkafka/hive/lib/asm-3.1.jar:/home/zkkafka/hive/lib/asm-commons-3.1.jar:/home/zkkafka/hive/lib/asm-tree-3.1.jar:/home/zkkafka/hive/lib/avatica-1.8.0.jar:/home/zkkafka/hive/lib/avatica-metrics-1.8.0.jar:/home/zkkafka/hive/lib/avro-1.7.7.jar:/home/zkkafka/hive/lib/bonecp-0.8.0.RELEASE.jar:/home/zkkafka/hive/lib/bytebuffer-collections-0.2.5.jar:/home/zkkafka/hive/lib/calcite-core-1.10.0.jar:/home/zkkafka/hive/lib/calcite-druid-1.10.0.jar:/home/zkkafka/hive/lib/calcite-linq4j-1.10.0.jar:/home/zkkafka/hive/lib/classmate-1.0.0.jar:/home/zkkafka/hive/lib/commons-cli-1.2.jar:/home/zkkafka/hive/lib/commons-codec-1.4.jar:/home/zkkafka/hive/lib/commons-collections-3.2.2.jar:/home/zkkafka/hive/lib/commons-compiler-2.7.6.jar:/home/zkkafka/hive/lib/commons-compress-1.9.jar:/home/zkkafka/hive/lib/commons-dbcp-1.4.jar:/home/zkkafka/hive/lib/commons-dbcp2-2.0.1.jar:/home/zkkafka/hive/lib/commons-el-1.0.jar:/home/zkkafka/hive/lib/commons-httpclient-3.0.1.jar:/home/zkkafka/hive/lib/commons-io-2.4.jar:/home/zkkafka/hive/lib/commons-lang-2.6.jar:/home/zkkafka/hive/lib/commons-lang3-3.1.jar:/home/zkkafka/hive/lib/commons-logging-1.2.jar:/home/zkkafka/hive/lib/commons-math-2.2.jar:/home/zkkafka/hive/lib/commons-math3-3.6.1.jar:/home/zkkafka/hive/lib/commons-pool-1.5.4.jar:/home/zkkafka/hive/lib/commons-pool2-2.2.jar:/home/zkkafka/hive/lib/commons-vfs2-2.0.jar:/home/zkkafka/hive/lib/compress-lzf-1.0.3.jar:/home/zkkafka/hive/lib/config-magic-0.9.jar:/home/zkkafka/hive/lib/curator-client-2.7.1.jar:/home/zkkafka/hive/lib/curator-framework-2.7.1.jar:/home/zkkafka/hive/lib/curator-recipes-2.7.1.jar:/home/zkkafka/hive/lib/curator-x-discovery-2.11.0.jar:/home/zkkafka/hive/lib/datanucleus-api-jdo-4.2.4.jar:/home/zkkafka/hive/lib/datanucleus-core-4.1.17.jar:/home/zkkafka/hive/lib/datanucleus-rdbms-4.1.19.jar:/home/zkkafka/hive/lib/derby-10.10.2.0.jar:/home/zkkafka/hive/lib/derbyclient-10.11.1.1.jar:/home/zkkafka/hive/lib/derbynet-10.11.1.1.jar:/home/zkkafka/hive/lib/disruptor-3.3.0.jar:/home/zkkafka/hive/lib/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/zkkafka/hive/lib/druid-api-0.9.2.jar:/home/zkkafka/hive/lib/druid-common-0.9.2.jar:/home/zkkafka/hive/lib/druid-console-0.0.2.jar:/home/zkkafka/hive/lib/druid-hdfs-storage-0.9.2.jar:/home/zkkafka/hive/lib/druid-processing-0.9.2.jar:/home/zkkafka/hive/lib/druid-server-0.9.2.jar:/home/zkkafka/hive/lib/eigenbase-properties-1.1.5.jar:/home/zkkafka/hive/lib/emitter-0.3.6.jar:/home/zkkafka/hive/lib/extendedset-1.3.10.jar:/home/zkkafka/hive/lib/findbugs-annotations-1.3.9-1.jar:/home/zkkafka/hive/lib/geoip2-0.4.0.jar:/home/zkkafka/hive/lib/geronimo-annotation_1.0_spec-1.1.1.jar:/home/zkkafka/hive/lib/geronimo-jaspic_1.0_spec-1.0.jar:/home/zkkafka/hive/lib/geronimo-jta_1.1_spec-1.1.1.jar:/home/zkkafka/hive/lib/google-http-client-jackson2-1.15.0-rc.jar:/home/zkkafka/hive/lib/groovy-all-2.4.4.jar:/home/zkkafka/hive/lib/gson-2.2.4.jar:/home/zkkafka/hive/lib/guava-14.0.1.jar:/home/zkkafka/hive/lib/guice-multibindings-4.1.0.jar:/home/zkkafka/hive/lib/guice-servlet-4.1.0.jar:/home/zkkafka/hive/lib/hbase-annotations-1.1.1.jar:/home/zkkafka/hive/lib/hbase-client-1.1.1.jar:/home/zkkafka/hive/lib/hbase-common-1.1.1.jar:/home/zkkafka/hive/lib/hbase-common-1.1.1-tests.jar:/home/zkkafka/hive/lib/hbase-hadoop2-compat-1.1.1.jar:/home/zkkafka/hive/lib/hbase-hadoop2-compat-1.1.1-tests.jar:/home/zkkafka/hive/lib/hbase-hadoop-compat-1.1.1.jar:/home/zkkafka/hive/lib/hbase-prefix-tree-1.1.1.jar:/home/zkkafka/hive/lib/hbase-procedure-1.1.1.jar:/home/zkkafka/hive/lib/hbase-protocol-1.1.1.jar:/home/zkkafka/hive/lib/hbase-server-1.1.1.jar:/home/zkkafka/hive/lib/hibernate-validator-5.1.3.Final.jar:/home/zkkafka/hive/lib/HikariCP-2.5.1.jar:/home/zkkafka/hive/lib/hive-accumulo-handler-2.3.5.jar:/home/zkkafka/hive/lib/hive-beeline-2.3.5.jar:/home/zkkafka/hive/lib/hive-cli-2.3.5.jar:/home/zkkafka/hive/lib/hive-common-2.3.5.jar:/home/zkkafka/hive/lib/hive-contrib-2.3.5.jar:/home/zkkafka/hive/lib/hive-druid-handler-2.3.5.jar:/home/zkkafka/hive/lib/hive-exec-2.3.5.jar:/home/zkkafka/hive/lib/hive-hbase-handler-2.3.5.jar:/home/zkkafka/hive/lib/hive-hcatalog-core-2.3.5.jar:/home/zkkafka/hive/lib/hive-hcatalog-server-extensions-2.3.5.jar:/home/zkkafka/hive/lib/hive-hplsql-2.3.5.jar:/home/zkkafka/hive/lib/hive-jdbc-2.3.5.jar:/home/zkkafka/hive/lib/hive-jdbc-handler-2.3.5.jar:/home/zkkafka/hive/lib/hive-llap-client-2.3.5.jar:/home/zkkafka/hive/lib/hive-llap-common-2.3.5.jar:/home/zkkafka/hive/lib/hive-llap-common-2.3.5-tests.jar:/home/zkkafka/hive/lib/hive-llap-ext-client-2.3.5.jar:/home/zkkafka/hive/lib/hive-llap-server-2.3.5.jar:/home/zkkafka/hive/lib/hive-llap-tez-2.3.5.jar:/home/zkkafka/hive/lib/hive-metastore-2.3.5.jar:/home/zkkafka/hive/lib/hive-serde-2.3.5.jar:/home/zkkafka/hive/lib/hive-service-2.3.5.jar:/home/zkkafka/hive/lib/hive-service-rpc-2.3.5.jar:/home/zkkafka/hive/lib/hive-shims-0.23-2.3.5.jar:/home/zkkafka/hive/lib/hive-shims-2.3.5.jar:/home/zkkafka/hive/lib/hive-shims-common-2.3.5.jar:/home/zkkafka/hive/lib/hive-shims-scheduler-2.3.5.jar:/home/zkkafka/hive/lib/hive-storage-api-2.4.0.jar:/home/zkkafka/hive/lib/hive-testutils-2.3.5.jar:/home/zkkafka/hive/lib/hive-vector-code-gen-2.3.5.jar:/home/zkkafka/hive/lib/htrace-core-3.1.0-incubating.jar:/home/zkkafka/hive/lib/http-client-1.0.4.jar:/home/zkkafka/hive/lib/httpclient-4.4.jar:/home/zkkafka/hive/lib/httpcore-4.4.jar:/home/zkkafka/hive/lib/icu4j-4.8.1.jar:/home/zkkafka/hive/lib/irc-api-1.0-0014.jar:/home/zkkafka/hive/lib/ivy-2.4.0.jar:/home/zkkafka/hive/lib/jackson-annotations-2.6.0.jar:/home/zkkafka/hive/lib/jackson-core-2.6.5.jar:/home/zkkafka/hive/lib/jackson-databind-2.6.5.jar:/home/zkkafka/hive/lib/jackson-dataformat-smile-2.4.6.jar:/home/zkkafka/hive/lib/jackson-datatype-guava-2.4.6.jar:/home/zkkafka/hive/lib/jackson-datatype-joda-2.4.6.jar:/home/zkkafka/hive/lib/jackson-jaxrs-1.9.13.jar:/home/zkkafka/hive/lib/jackson-jaxrs-base-2.4.6.jar:/home/zkkafka/hive/lib/jackson-jaxrs-json-provider-2.4.6.jar:/home/zkkafka/hive/lib/jackson-jaxrs-smile-provider-2.4.6.jar:/home/zkkafka/hive/lib/jackson-module-jaxb-annotations-2.4.6.jar:/home/zkkafka/hive/lib/jackson-xc-1.9.13.jar:/home/zkkafka/hive/lib/jamon-runtime-2.3.1.jar:/home/zkkafka/hive/lib/janino-2.7.6.jar:/home/zkkafka/hive/lib/jasper-compiler-5.5.23.jar:/home/zkkafka/hive/lib/jasper-runtime-5.5.23.jar:/home/zkkafka/hive/lib/java-util-0.27.10.jar:/home/zkkafka/hive/lib/javax.el-3.0.0.jar:/home/zkkafka/hive/lib/javax.el-api-3.0.0.jar:/home/zkkafka/hive/lib/javax.inject-1.jar:/home/zkkafka/hive/lib/javax.jdo-3.2.0-m3.jar:/home/zkkafka/hive/lib/javax.servlet-3.0.0.v201112011016.jar:/home/zkkafka/hive/lib/javax.servlet-api-3.1.0.jar:/home/zkkafka/hive/lib/javolution-5.5.1.jar:/home/zkkafka/hive/lib/jboss-logging-3.1.3.GA.jar:/home/zkkafka/hive/lib/jcodings-1.0.8.jar:/home/zkkafka/hive/lib/jcommander-1.32.jar:/home/zkkafka/hive/lib/jdbi-2.63.1.jar:/home/zkkafka/hive/lib/jdo-api-3.0.1.jar:/home/zkkafka/hive/lib/jersey-client-1.9.jar:/home/zkkafka/hive/lib/jersey-guice-1.19.jar:/home/zkkafka/hive/lib/jersey-server-1.14.jar:/home/zkkafka/hive/lib/jettison-1.1.jar:/home/zkkafka/hive/lib/jetty-6.1.26.jar:/home/zkkafka/hive/lib/jetty-all-7.6.0.v20120127.jar:/home/zkkafka/hive/lib/jetty-client-9.2.5.v20141112.jar:/home/zkkafka/hive/lib/jetty-continuation-9.2.5.v20141112.jar:/home/zkkafka/hive/lib/jetty-http-9.2.5.v20141112.jar:/home/zkkafka/hive/lib/jetty-io-9.2.5.v20141112.jar:/home/zkkafka/hive/lib/jetty-proxy-9.2.5.v20141112.jar:/home/zkkafka/hive/lib/jetty-security-9.2.5.v20141112.jar:/home/zkkafka/hive/lib/jetty-server-9.2.5.v20141112.jar:/home/zkkafka/hive/lib/jetty-servlet-9.2.5.v20141112.jar:/home/zkkafka/hive/lib/jetty-servlets-9.2.5.v20141112.jar:/home/zkkafka/hive/lib/jetty-sslengine-6.1.26.jar:/home/zkkafka/hive/lib/jetty-util-6.1.26.jar:/home/zkkafka/hive/lib/jetty-util-9.2.5.v20141112.jar:/home/zkkafka/hive/lib/jline-2.12.jar:/home/zkkafka/hive/lib/joda-time-2.8.1.jar:/home/zkkafka/hive/lib/jol-core-0.2.jar:/home/zkkafka/hive/lib/joni-2.1.2.jar:/home/zkkafka/hive/lib/jpam-1.1.jar:/home/zkkafka/hive/lib/json-1.8.jar:/home/zkkafka/hive/lib/json-path-2.1.0.jar:/home/zkkafka/hive/lib/jsp-2.1-6.1.14.jar:/home/zkkafka/hive/lib/jsp-api-2.0.jar:/home/zkkafka/hive/lib/jsp-api-2.1-6.1.14.jar:/home/zkkafka/hive/lib/jsp-api-2.1.jar:/home/zkkafka/hive/lib/jsr305-3.0.0.jar:/home/zkkafka/hive/lib/jta-1.1.jar:/home/zkkafka/hive/lib/libfb303-0.9.3.jar:/home/zkkafka/hive/lib/libthrift-0.9.3.jar:/home/zkkafka/hive/lib/log4j-1.2-api-2.6.2.jar:/home/zkkafka/hive/lib/log4j-api-2.6.2.jar:/home/zkkafka/hive/lib/log4j-core-2.6.2.jar:/home/zkkafka/hive/lib/log4j-jul-2.5.jar:/home/zkkafka/hive/lib/log4j-slf4j-impl-2.6.2.jar:/home/zkkafka/hive/lib/log4j-web-2.6.2.jar:/home/zkkafka/hive/lib/lz4-1.3.0.jar:/home/zkkafka/hive/lib/mail-1.4.1.jar:/home/zkkafka/hive/lib/mapdb-1.0.8.jar:/home/zkkafka/hive/lib/maven-aether-provider-3.1.1.jar:/home/zkkafka/hive/lib/maven-model-3.1.1.jar:/home/zkkafka/hive/lib/maven-model-builder-3.1.1.jar:/home/zkkafka/hive/lib/maven-repository-metadata-3.1.1.jar:/home/zkkafka/hive/lib/maven-scm-api-1.4.jar:/home/zkkafka/hive/lib/maven-scm-provider-svn-commons-1.4.jar:/home/zkkafka/hive/lib/maven-scm-provider-svnexe-1.4.jar:/home/zkkafka/hive/lib/maven-settings-3.1.1.jar:/home/zkkafka/hive/lib/maven-settings-builder-3.1.1.jar:/home/zkkafka/hive/lib/maxminddb-0.2.0.jar:/home/zkkafka/hive/lib/metrics-core-2.2.0.jar:/home/zkkafka/hive/lib/metrics-core-3.1.0.jar:/home/zkkafka/hive/lib/metrics-json-3.1.0.jar:/home/zkkafka/hive/lib/metrics-jvm-3.1.0.jar:/home/zkkafka/hive/lib/mysql-connector-java-5.1.47.jar:/home/zkkafka/hive/lib/mysql-metadata-storage-0.9.2.jar:/home/zkkafka/hive/lib/netty-3.6.2.Final.jar:/home/zkkafka/hive/lib/netty-all-4.0.52.Final.jar:/home/zkkafka/hive/lib/okhttp-1.0.2.jar:/home/zkkafka/hive/lib/opencsv-2.3.jar:/home/zkkafka/hive/lib/orc-core-1.3.4.jar:/home/zkkafka/hive/lib/org.abego.treelayout.core-1.0.1.jar:/home/zkkafka/hive/lib/paranamer-2.3.jar:/home/zkkafka/hive/lib/parquet-hadoop-bundle-1.8.1.jar:/home/zkkafka/hive/lib/pentaho-aggdesigner-algorithm-5.1.5-jhyde.jar:/home/zkkafka/hive/lib/plexus-interpolation-1.19.jar:/home/zkkafka/hive/lib/plexus-utils-3.0.15.jar:/home/zkkafka/hive/lib/postgresql-9.4.1208.jre7.jar:/home/zkkafka/hive/lib/postgresql-metadata-storage-0.9.2.jar:/home/zkkafka/hive/lib/protobuf-java-2.5.0.jar:/home/zkkafka/hive/lib/regexp-1.3.jar:/home/zkkafka/hive/lib/rhino-1.7R5.jar:/home/zkkafka/hive/lib/RoaringBitmap-0.5.18.jar:/home/zkkafka/hive/lib/server-metrics-0.2.8.jar:/home/zkkafka/hive/lib/servlet-api-2.4.jar:/home/zkkafka/hive/lib/servlet-api-2.5-6.1.14.jar:/home/zkkafka/hive/lib/slice-0.29.jar:/home/zkkafka/hive/lib/slider-core-0.90.2-incubating.jar:/home/zkkafka/hive/lib/snappy-java-1.0.5.jar:/home/zkkafka/hive/lib/spymemcached-2.11.7.jar:/home/zkkafka/hive/lib/ST4-4.0.4.jar:/home/zkkafka/hive/lib/stax-api-1.0.1.jar:/home/zkkafka/hive/lib/super-csv-2.2.0.jar:/home/zkkafka/hive/lib/tempus-fugit-1.1.jar:/home/zkkafka/hive/lib/tesla-aether-0.0.5.jar:/home/zkkafka/hive/lib/tools.jar:/home/zkkafka/hive/lib/transaction-api-1.1.jar:/home/zkkafka/hive/lib/validation-api-1.1.0.Final.jar:/home/zkkafka/hive/lib/velocity-1.5.jar:/home/zkkafka/hive/lib/wagon-provider-api-2.4.jar:/home/zkkafka/hive/lib/zookeeper-3.4.6.jar::/home/zkkafka/hadoop/share/hadoop/tools/lib/hadoop-distcp-2.6.5.jar:/home/zkkafka/hbase/conf:/home/zkkafka/hbase/lib/hbase-common-2.0.5.jar:/home/zkkafka/hbase/lib/hbase-mapreduce-2.0.5.jar:/home/zkkafka/hbase/lib/metrics-core-3.2.1.jar:/home/zkkafka/hbase/lib/hbase-server-2.0.5.jar:/home/zkkafka/hbase/lib/hbase-hadoop-compat-2.0.5.jar:/home/zkkafka/hbase/lib/hbase-hadoop2-compat-2.0.5.jar:/home/zkkafka/hbase/lib/commons-lang3-3.6.jar:/home/zkkafka/hbase/lib/hbase-metrics-2.0.5.jar:/home/zkkafka/hbase/lib/hbase-shaded-netty-2.1.0.jar:/home/zkkafka/hbase/lib/hbase-client-2.0.5.jar:/home/zkkafka/hbase/lib/jackson-databind-2.9.2.jar:/home/zkkafka/hbase/lib/jackson-annotations-2.9.0.jar:/home/zkkafka/hbase/lib/hbase-shaded-protobuf-2.1.0.jar:/home/zkkafka/hbase/lib/htrace-core4-4.2.0-incubating.jar:/home/zkkafka/hbase/lib/jackson-core-2.9.2.jar:/home/zkkafka/hbase/lib/hbase-protocol-shaded-2.0.5.jar:/home/zkkafka/hbase/lib/hbase-protocol-2.0.5.jar:/home/zkkafka/hbase/lib/hbase-metrics-api-2.0.5.jar:/home/zkkafka/hbase/lib/hbase-shaded-miscellaneous-2.1.0.jar:/home/zkkafka/hadoop/contrib/capacity-scheduler/*.jar:/home/zkkafka/hadoop/etc/hadoop:/home/zkkafka/hadoop/share/hadoop/common/lib/activation-1.1.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jackson-mapper-asl-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/java-xmlbuilder-0.4.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/protobuf-java-2.5.0.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/guava-11.0.2.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-net-3.1.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jackson-jaxrs-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/servlet-api-2.5.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/httpclient-4.2.5.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/xz-1.0.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-cli-1.2.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/slf4j-api-1.7.5.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jersey-server-1.9.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/stax-api-1.0-2.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jersey-json-1.9.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/htrace-core-3.0.4.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/asm-3.2.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/hadoop-annotations-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-collections-3.2.2.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/curator-framework-2.6.0.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-configuration-1.6.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-math3-3.1.1.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-digester-1.8.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/apacheds-i18n-2.0.0-M15.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jasper-runtime-5.5.23.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/httpcore-4.2.5.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/curator-recipes-2.6.0.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/api-util-1.0.0-M20.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jsr305-1.3.9.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/junit-4.11.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jettison-1.1.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jets3t-0.9.0.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jsp-api-2.1.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/paranamer-2.3.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-io-2.4.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/zookeeper-3.4.6.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-el-1.0.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jersey-core-1.9.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jetty-util-6.1.26.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-lang-2.6.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/hadoop-auth-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/avro-1.7.4.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jsch-0.1.42.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jasper-compiler-5.5.23.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/apacheds-kerberos-codec-2.0.0-M15.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jackson-xc-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/netty-3.6.2.Final.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-beanutils-1.7.0.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/gson-2.2.4.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-codec-1.4.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jetty-6.1.26.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-httpclient-3.1.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-logging-1.1.3.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/curator-client-2.6.0.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/xmlenc-0.52.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-beanutils-core-1.8.0.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jackson-core-asl-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/hamcrest-core-1.3.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/api-asn1-api-1.0.0-M20.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/log4j-1.2.17.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/commons-compress-1.4.1.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/jaxb-api-2.2.2.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/mockito-all-1.8.5.jar:/home/zkkafka/hadoop/share/hadoop/common/lib/snappy-java-1.0.4.1.jar:/home/zkkafka/hadoop/share/hadoop/common/hadoop-common-2.6.5-tests.jar:/home/zkkafka/hadoop/share/hadoop/common/hadoop-nfs-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/common/hadoop-common-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/hdfs:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/jackson-mapper-asl-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/protobuf-java-2.5.0.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/guava-11.0.2.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/servlet-api-2.5.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/commons-cli-1.2.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/jersey-server-1.9.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/htrace-core-3.0.4.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/asm-3.2.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/jasper-runtime-5.5.23.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/jsr305-1.3.9.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/xercesImpl-2.9.1.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/jsp-api-2.1.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/commons-io-2.4.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/commons-el-1.0.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/jersey-core-1.9.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/jetty-util-6.1.26.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/commons-lang-2.6.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/commons-codec-1.4.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/jetty-6.1.26.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/commons-logging-1.1.3.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/xml-apis-1.3.04.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/xmlenc-0.52.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/jackson-core-asl-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/lib/log4j-1.2.17.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/hadoop-hdfs-2.6.5-tests.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/hadoop-hdfs-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/hdfs/hadoop-hdfs-nfs-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/activation-1.1.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jackson-mapper-asl-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/protobuf-java-2.5.0.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/guava-11.0.2.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jackson-jaxrs-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/servlet-api-2.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/javax.inject-1.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/xz-1.0.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/commons-cli-1.2.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jersey-server-1.9.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/stax-api-1.0-2.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jersey-json-1.9.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/asm-3.2.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/commons-collections-3.2.2.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/aopalliance-1.0.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jsr305-1.3.9.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jettison-1.1.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/commons-io-2.4.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/zookeeper-3.4.6.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/guice-servlet-3.0.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jersey-core-1.9.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jaxb-impl-2.2.3-1.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jersey-client-1.9.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jetty-util-6.1.26.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/commons-lang-2.6.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jersey-guice-1.9.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jackson-xc-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/netty-3.6.2.Final.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/guice-3.0.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/commons-codec-1.4.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jetty-6.1.26.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/leveldbjni-all-1.8.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/commons-httpclient-3.1.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/commons-logging-1.1.3.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jackson-core-asl-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/log4j-1.2.17.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/commons-compress-1.4.1.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jaxb-api-2.2.2.jar:/home/zkkafka/hadoop/share/hadoop/yarn/lib/jline-2.12.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-common-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-server-applicationhistoryservice-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-api-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-server-tests-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-registry-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-server-nodemanager-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-server-web-proxy-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-client-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-server-common-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/yarn/hadoop-yarn-applications-unmanaged-am-launcher-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/jackson-mapper-asl-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/protobuf-java-2.5.0.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/javax.inject-1.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/xz-1.0.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/jersey-server-1.9.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/asm-3.2.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/hadoop-annotations-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/aopalliance-1.0.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/junit-4.11.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/paranamer-2.3.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/commons-io-2.4.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/guice-servlet-3.0.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/jersey-core-1.9.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/avro-1.7.4.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/jersey-guice-1.9.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/netty-3.6.2.Final.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/guice-3.0.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/leveldbjni-all-1.8.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/jackson-core-asl-1.9.13.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/hamcrest-core-1.3.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/log4j-1.2.17.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/commons-compress-1.4.1.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/lib/snappy-java-1.0.4.1.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-common-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-shuffle-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-app-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.6.5-tests.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-2.6.5.jar:/home/zkkafka/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-2.6.5.jar
2019-05-20T17:13:40,128  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:java.library.path=/home/zkkafka/hadoop/lib/native
2019-05-20T17:13:40,128  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:java.io.tmpdir=/tmp
2019-05-20T17:13:40,129  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:java.compiler=<NA>
2019-05-20T17:13:40,129  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:os.name=Linux
2019-05-20T17:13:40,129  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:os.arch=amd64
2019-05-20T17:13:40,129  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:os.version=3.10.0-862.el7.x86_64
2019-05-20T17:13:40,129  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:user.name=zkkafka
2019-05-20T17:13:40,129  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:user.home=/home/zkkafka
2019-05-20T17:13:40,129  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Client environment:user.dir=/home/zkkafka
2019-05-20T17:13:40,132  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] zookeeper.ZooKeeper: Initiating client connection, connectString=master1:2181,master2:2181,slaver1:2181 sessionTimeout=1200000 watcher=org.apache.curator.ConnectionState@73809e7
2019-05-20T17:13:40,250  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main-SendThread(yanfabu2-35.base.app.dev.yf:2181)] zookeeper.ClientCnxn: Opening socket connection to server yanfabu2-35.base.app.dev.yf/10.156.50.35:2181. Will not attempt to authenticate using SASL (unknown error)
2019-05-20T17:13:40,251  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main-SendThread(yanfabu2-35.base.app.dev.yf:2181)] zookeeper.ClientCnxn: Socket connection established to yanfabu2-35.base.app.dev.yf/10.156.50.35:2181, initiating session
2019-05-20T17:13:40,306  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main-SendThread(yanfabu2-35.base.app.dev.yf:2181)] zookeeper.ClientCnxn: Session establishment complete on server yanfabu2-35.base.app.dev.yf/10.156.50.35:2181, sessionid = 0x16a53c49c90003f, negotiated timeout = 40000
2019-05-20T17:13:40,317  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main-EventThread] state.ConnectionStateManager: State change: CONNECTED
2019-05-20T17:13:40,349  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] ql.Driver: Executing command(queryId=zkkafka_20190520171337_b42df449-6aec-4100-a1b8-eedb6d51cbeb): show databases
2019-05-20T17:13:40,395  WARN [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] session.SessionState: METASTORE_FILTER_HOOK will be ignored, since hive.security.authorization.manager is set to instance of HiveAuthorizerFactory.
2019-05-20T17:13:40,388  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] sqlstd.SQLStdHiveAccessController: Created SQLStdHiveAccessController for session context : HiveAuthzSessionContext [sessionString=d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b, clientType=HIVECLI]
2019-05-20T17:13:40,396  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] hive.metastore: Mestastore configuration hive.metastore.filter.hook changed from org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl to org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook
2019-05-20T17:13:40,408  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] hive.metastore: Closed a connection to metastore, current connections: 0
2019-05-20T17:13:40,414  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] hive.metastore: Trying to connect to metastore with URI thrift://master1:9083
2019-05-20T17:13:40,414  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] hive.metastore: Opened a connection to metastore, current connections: 1
2019-05-20T17:13:40,422  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] hive.metastore: Connected to metastore.
2019-05-20T17:13:40,457  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] ql.Driver: Starting task [Stage-0:DDL] in serial mode
2019-05-20T17:13:40,777  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] exec.DDLTask: results : 1
2019-05-20T17:13:40,806  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] ql.Driver: Completed executing command(queryId=zkkafka_20190520171337_b42df449-6aec-4100-a1b8-eedb6d51cbeb); Time taken: 0.458 seconds
2019-05-20T17:13:40,806  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] ql.Driver: OK
2019-05-20T17:13:40,852  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] Configuration.deprecation: mapred.input.dir is deprecated. Instead, use mapreduce.input.fileinputformat.inputdir
2019-05-20T17:13:41,002  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] mapred.FileInputFormat: Total input paths to process : 1
2019-05-20T17:13:41,063  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] exec.ListSinkOperator: Closing operator LIST_SINK[0]
2019-05-20T17:13:41,076  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] conf.HiveConf: Using the default value passed in for log id: d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b
2019-05-20T17:13:41,077  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] session.SessionState: Resetting thread name to  main
2019-05-20T17:13:41,076  INFO [d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b main] CliDriver: Time taken: 3.275 seconds, Fetched: 1 row(s)

 

 

4.7  hive sql

show databases;
create database test;
use test;
create table app (appid int, appname string) COMMENT 'app' ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\n' STORED AS TEXTFILE;
create table device (deviceid int, appid int,register_time string) COMMENT 'device' ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\n' STORED AS TEXTFILE;
drop table device;

vim app.txt
1	kaxinxiaoxiaole
2	wangzherognyao
3	cijizhanchang
vim device.txt
1	1	2019-05-20+17:21:44
2	2	2019-05-20+17:21:44
3	3	2019-05-20+17:21:44

load data local inpath '/home/zkkafka/hive/data/app.txt' into table app; 
load data local inpath '/home/zkkafka/hive/data/device.txt' into table device;

select * from app;
select * from device;

 
hive> select b.* from app  a , device b where  a.appid=b.appid and b.deviceid =1;
WARNING: Hive-on-MR is deprecated in Hive 2 and may not be available in the future versions. Consider using a different execution engine (i.e. spark, tez) or using Hive 1.X releases.
Query ID = zkkafka_20190520173906_f03af066-edc3-44c9-bb6b-0f94bec4019e
Total jobs = 1
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/zkkafka/hive/lib/log4j-slf4j-impl-2.6.2.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/zkkafka/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]
2019-05-20 17:39:31	Starting to launch local task to process map join;	maximum memory = 477626368
2019-05-20 17:39:36	Dump the side-table for tag: 1 with group count: 1 into file: file:/home/zkkafka/hive/tmp/HiveJobsLog/d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b/hive_2019-05-20_17-39-06_709_1947770512124771535-1/-local-10004/HashTable-Stage-3/MapJoin-mapfile01--.hashtable
2019-05-20 17:39:36	Uploaded 1 File to: file:/home/zkkafka/hive/tmp/HiveJobsLog/d9ba8f8f-2fb4-4882-864f-d20d8bf2f57b/hive_2019-05-20_17-39-06_709_1947770512124771535-1/-local-10004/HashTable-Stage-3/MapJoin-mapfile01--.hashtable (299 bytes)
2019-05-20 17:39:36	End of local task; Time Taken: 5.115 sec.
Execution completed successfully
MapredLocal task succeeded
Launching Job 1 out of 1
Number of reduce tasks is set to 0 since there's no reduce operator
Starting Job = job_1557299349575_0003, Tracking URL = http://master1:8088/proxy/application_1557299349575_0003/
Kill Command = /home/zkkafka/hadoop/bin/hadoop job  -kill job_1557299349575_0003
Hadoop job information for Stage-3: number of mappers: 1; number of reducers: 0
2019-05-20 17:39:53,939 Stage-3 map = 0%,  reduce = 0%
2019-05-20 17:40:00,879 Stage-3 map = 100%,  reduce = 0%, Cumulative CPU 2.85 sec
MapReduce Total cumulative CPU time: 2 seconds 850 msec
Ended Job = job_1557299349575_0003
MapReduce Jobs Launched: 
Stage-Stage-3: Map: 1   Cumulative CPU: 2.85 sec   HDFS Read: 6746 HDFS Write: 123 SUCCESS
Total MapReduce CPU Time Spent: 2 seconds 850 msec
OK
1	1	2019-05-20+17:21:44
Time taken: 56.647 seconds, Fetched: 1 row(s)

 

4.8 beeline

beeline -u jdbc:hive2://10.156.50.36:10000 -n hive

beeline
!connect jdbc:hive2://10.156.50.36:10000/test  


select b.* from app  a , device b where  a.appid=b.appid and b.deviceid =1;

 

 

4.9 springboot hive  hs2

4.9.1 java log

SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/F:/yunrepository/repository/ch/qos/logback/logback-classic/1.2.3/logback-classic-1.2.3.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/F:/yunrepository/repository/org/apache/logging/log4j/log4j-slf4j-impl/2.11.1/log4j-slf4j-impl-2.11.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/F:/yunrepository/repository/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [ch.qos.logback.classic.util.ContextSelectorStaticBinder]
14:17:05.896 [main] DEBUG org.springframework.test.context.junit4.SpringJUnit4ClassRunner - SpringJUnit4ClassRunner constructor called with [class bigdata.hivedemo.HiveTemplateTest]
14:17:05.907 [main] DEBUG org.springframework.test.context.BootstrapUtils - Instantiating CacheAwareContextLoaderDelegate from class [org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate]
14:17:05.933 [main] DEBUG org.springframework.test.context.BootstrapUtils - Instantiating BootstrapContext using constructor [public org.springframework.test.context.support.DefaultBootstrapContext(java.lang.Class,org.springframework.test.context.CacheAwareContextLoaderDelegate)]
14:17:05.972 [main] DEBUG org.springframework.test.context.BootstrapUtils - Instantiating TestContextBootstrapper for test class [bigdata.hivedemo.HiveTemplateTest] from class [org.springframework.boot.test.context.SpringBootTestContextBootstrapper]
14:17:05.998 [main] INFO org.springframework.boot.test.context.SpringBootTestContextBootstrapper - Neither @ContextConfiguration nor @ContextHierarchy found for test class [bigdata.hivedemo.HiveTemplateTest], using SpringBootContextLoader
14:17:06.007 [main] DEBUG org.springframework.test.context.support.AbstractContextLoader - Did not detect default resource location for test class [bigdata.hivedemo.HiveTemplateTest]: class path resource [bigdata/hivedemo/HiveTemplateTest-context.xml] does not exist
14:17:06.008 [main] DEBUG org.springframework.test.context.support.AbstractContextLoader - Did not detect default resource location for test class [bigdata.hivedemo.HiveTemplateTest]: class path resource [bigdata/hivedemo/HiveTemplateTestContext.groovy] does not exist
14:17:06.008 [main] INFO org.springframework.test.context.support.AbstractContextLoader - Could not detect default resource locations for test class [bigdata.hivedemo.HiveTemplateTest]: no resource found for suffixes {-context.xml, Context.groovy}.
14:17:06.118 [main] DEBUG org.springframework.test.context.support.ActiveProfilesUtils - Could not find an 'annotation declaring class' for annotation type [org.springframework.test.context.ActiveProfiles] and class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.397 [main] DEBUG org.springframework.boot.test.context.SpringBootTestContextBootstrapper - @TestExecutionListeners is not present for class [bigdata.hivedemo.HiveTemplateTest]: using defaults.
14:17:06.397 [main] INFO org.springframework.boot.test.context.SpringBootTestContextBootstrapper - Loaded default TestExecutionListener class names from location [META-INF/spring.factories]: [org.springframework.boot.test.mock.mockito.MockitoTestExecutionListener, org.springframework.boot.test.mock.mockito.ResetMocksTestExecutionListener, org.springframework.boot.test.autoconfigure.restdocs.RestDocsTestExecutionListener, org.springframework.boot.test.autoconfigure.web.client.MockRestServiceServerResetTestExecutionListener, org.springframework.boot.test.autoconfigure.web.servlet.MockMvcPrintOnlyOnFailureTestExecutionListener, org.springframework.boot.test.autoconfigure.web.servlet.WebDriverTestExecutionListener, org.springframework.test.context.web.ServletTestExecutionListener, org.springframework.test.context.support.DirtiesContextBeforeModesTestExecutionListener, org.springframework.test.context.support.DependencyInjectionTestExecutionListener, org.springframework.test.context.support.DirtiesContextTestExecutionListener, org.springframework.test.context.transaction.TransactionalTestExecutionListener, org.springframework.test.context.jdbc.SqlScriptsTestExecutionListener]
14:17:06.427 [main] INFO org.springframework.boot.test.context.SpringBootTestContextBootstrapper - Using TestExecutionListeners: [org.springframework.test.context.web.ServletTestExecutionListener@71e9ddb4, org.springframework.test.context.support.DirtiesContextBeforeModesTestExecutionListener@394df057, org.springframework.boot.test.mock.mockito.MockitoTestExecutionListener@4961f6af, org.springframework.boot.test.autoconfigure.SpringBootDependencyInjectionTestExecutionListener@5aebe890, org.springframework.test.context.support.DirtiesContextTestExecutionListener@65d09a04, org.springframework.test.context.transaction.TransactionalTestExecutionListener@33c911a1, org.springframework.test.context.jdbc.SqlScriptsTestExecutionListener@75db5df9, org.springframework.boot.test.mock.mockito.ResetMocksTestExecutionListener@707194ba, org.springframework.boot.test.autoconfigure.restdocs.RestDocsTestExecutionListener@1190200a, org.springframework.boot.test.autoconfigure.web.client.MockRestServiceServerResetTestExecutionListener@6a2f6f80, org.springframework.boot.test.autoconfigure.web.servlet.MockMvcPrintOnlyOnFailureTestExecutionListener@45b4c3a9, org.springframework.boot.test.autoconfigure.web.servlet.WebDriverTestExecutionListener@399c4be1]
14:17:06.429 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved @ProfileValueSourceConfiguration [null] for test class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.430 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved ProfileValueSource type [class org.springframework.test.annotation.SystemProfileValueSource] for class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.431 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved @ProfileValueSourceConfiguration [null] for test class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.432 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved ProfileValueSource type [class org.springframework.test.annotation.SystemProfileValueSource] for class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.445 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved @ProfileValueSourceConfiguration [null] for test class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.445 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved ProfileValueSource type [class org.springframework.test.annotation.SystemProfileValueSource] for class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.446 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved @ProfileValueSourceConfiguration [null] for test class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.447 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved ProfileValueSource type [class org.springframework.test.annotation.SystemProfileValueSource] for class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.447 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved @ProfileValueSourceConfiguration [null] for test class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.447 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved ProfileValueSource type [class org.springframework.test.annotation.SystemProfileValueSource] for class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.453 [main] DEBUG org.springframework.test.context.support.AbstractDirtiesContextTestExecutionListener - Before test class: context [DefaultTestContext@3c0be339 testClass = HiveTemplateTest, testInstance = [null], testMethod = [null], testException = [null], mergedContextConfiguration = [WebMergedContextConfiguration@15ca7889 testClass = HiveTemplateTest, locations = '{}', classes = '{class bigdata.hivedemo.App, class bigdata.hivedemo.App}', contextInitializerClasses = '[]', activeProfiles = '{}', propertySourceLocations = '{}', propertySourceProperties = '{org.springframework.boot.test.context.SpringBootTestContextBootstrapper=true}', contextCustomizers = set[org.springframework.boot.test.context.filter.ExcludeFilterContextCustomizer@25359ed8, org.springframework.boot.test.json.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer@80ec1f8, org.springframework.boot.test.mock.mockito.MockitoContextCustomizer@0, org.springframework.boot.test.web.client.TestRestTemplateContextCustomizer@77be656f, org.springframework.boot.test.autoconfigure.properties.PropertyMappingContextCustomizer@0, org.springframework.boot.test.autoconfigure.web.servlet.WebDriverContextCustomizerFactory$Customizer@345965f2], resourceBasePath = 'src/main/webapp', contextLoader = 'org.springframework.boot.test.context.SpringBootContextLoader', parent = [null]], attributes = map['org.springframework.test.context.web.ServletTestExecutionListener.activateListener' -> true]], class annotated with @DirtiesContext [false] with mode [null].
14:17:06.453 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved @ProfileValueSourceConfiguration [null] for test class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.453 [main] DEBUG org.springframework.test.annotation.ProfileValueUtils - Retrieved ProfileValueSource type [class org.springframework.test.annotation.SystemProfileValueSource] for class [bigdata.hivedemo.HiveTemplateTest]
14:17:06.500 [main] DEBUG org.springframework.test.context.support.TestPropertySourceUtils - Adding inlined properties to environment: {spring.jmx.enabled=false, org.springframework.boot.test.context.SpringBootTestContextBootstrapper=true, server.port=-1}

  .   ____          _            __ _ _
 /\\ / ___'_ __ _ _(_)_ __  __ _ \ \ \ \
( ( )\___ | '_ | '_| | '_ \/ _` | \ \ \ \
 \\/  ___)| |_)| | | | | || (_| |  ) ) ) )
  '  |____| .__|_| |_|_| |_\__, | / / / /
 =========|_|==============|___/=/_/_/_/
 :: Spring Boot ::        (v2.1.0.RELEASE)

2019-05-24 14:17:07.081  WARN 19476 --- [           main] ory$DuplicateJsonObjectContextCustomizer : 

Found multiple occurrences of org.json.JSONObject on the class path:

	jar:file:/F:/yunrepository/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
	jar:file:/F:/yunrepository/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2019-05-24 14:17:07.116  INFO 19476 --- [           main] bigdata.hivedemo.HiveTemplateTest        : Starting HiveTemplateTest on DESKTOP-DQPVTLL with PID 19476 (started by baoy in C:\Users\baoy\git\bigdataby\bigdata.hivedemo)
2019-05-24 14:17:07.117  INFO 19476 --- [           main] bigdata.hivedemo.HiveTemplateTest        : No active profile set, falling back to default profiles: default
2019-05-24 14:17:10.357  INFO 19476 --- [           main] o.s.s.concurrent.ThreadPoolTaskExecutor  : Initializing ExecutorService 'applicationTaskExecutor'
2019-05-24 14:17:11.449  INFO 19476 --- [           main] bigdata.hivedemo.HiveTemplateTest        : Started HiveTemplateTest in 4.932 seconds (JVM running for 6.82)
2019-05-24 14:17:12.139  INFO 19476 --- [           main] org.apache.hive.jdbc.Utils               : Supplied authorities: 10.156.50.36:10000
2019-05-24 14:17:12.140  INFO 19476 --- [           main] org.apache.hive.jdbc.Utils               : Resolved authority: 10.156.50.36:10000
2019-05-24 14:17:16.325  INFO 19476 --- [           main] com.alibaba.druid.pool.DruidDataSource   : {dataSource-1} inited
>> 1 1 2019-05-20+17:21:44
2019-05-24 14:17:48.772  INFO 19476 --- [       Thread-2] o.s.s.concurrent.ThreadPoolTaskExecutor  : Shutting down ExecutorService 'applicationTaskExecutor'
2019-05-24 14:17:48.780  INFO 19476 --- [       Thread-2] com.alibaba.druid.pool.DruidDataSource   : {dataSource-1} closed

 

4.9.2 hive log 

WARNING: Hive-on-MR is deprecated in Hive 2 and may not be available in the future versions. Consider using a different execution engine (i.e. spark, tez) or using Hive 1.X releases.
Query ID = zkkafka_20190524141713_9c0be3e7-7a7e-4bea-a77a-903dbce14494
Total jobs = 1
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/zkkafka/hive/lib/log4j-slf4j-impl-2.6.2.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/zkkafka/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]
2019-05-24 14:17:22	Starting to launch local task to process map join;	maximum memory = 477626368
2019-05-24 14:17:24	Dump the side-table for tag: 0 with group count: 3 into file: file:/home/zkkafka/hive/tmp/HiveJobsLog/dcb2c15c-eca8-403b-9d03-6f9ca738b466/hive_2019-05-24_14-17-13_893_1274073688230085866-1/-local-10004/HashTable-Stage-3/MapJoin-mapfile00--.hashtable
2019-05-24 14:17:24	Uploaded 1 File to: file:/home/zkkafka/hive/tmp/HiveJobsLog/dcb2c15c-eca8-403b-9d03-6f9ca738b466/hive_2019-05-24_14-17-13_893_1274073688230085866-1/-local-10004/HashTable-Stage-3/MapJoin-mapfile00--.hashtable (314 bytes)
2019-05-24 14:17:24	End of local task; Time Taken: 2.273 sec.
Execution completed successfully
MapredLocal task succeeded
Launching Job 1 out of 1
Number of reduce tasks is set to 0 since there's no reduce operator
Starting Job = job_1558676658010_0005, Tracking URL = http://master1:8088/proxy/application_1558676658010_0005/
Kill Command = /home/zkkafka/hadoop/bin/hadoop job  -kill job_1558676658010_0005
Hadoop job information for Stage-3: number of mappers: 1; number of reducers: 0
2019-05-24 14:17:36,247 Stage-3 map = 0%,  reduce = 0%
2019-05-24 14:17:43,832 Stage-3 map = 100%,  reduce = 0%, Cumulative CPU 2.78 sec
MapReduce Total cumulative CPU time: 2 seconds 780 msec
Ended Job = job_1558676658010_0005
MapReduce Jobs Launched: 
Stage-Stage-3: Map: 1   Cumulative CPU: 2.78 sec   HDFS Read: 6897 HDFS Write: 123 SUCCESS
Total MapReduce CPU Time Spent: 2 seconds 780 msec
OK

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

捐助开发者 

在兴趣的驱动下,写一个免费的东西,有欣喜,也还有汗水,希望你喜欢我的作品,同时也能支持一下。 当然,有钱捧个钱场(支持支付宝和微信 以及扣扣群),没钱捧个人场,谢谢各位。

 

个人主页http://knight-black-bob.iteye.com/


hive-ha 安装
            
    
    博客分类: linuxhive linuxhivehahadoophdfs hive-ha 安装
            
    
    博客分类: linuxhive linuxhivehahadoophdfs hive-ha 安装
            
    
    博客分类: linuxhive linuxhivehahadoophdfs 
 
 
 谢谢您的赞助,我会做的更好!