| $ sudo yum install hive Hive-metastore hive-server $ sudo yum install Mysql-server $ sudo service mysqld start $ cd ~$ Wget ' http://cdn.mysql.com/Downloads/Connector-J/mysql-connector-java-5.1.25.tar.gz '
 $ tar xzf mysql-connector-java-5.1.25.tar.gz
 $ sudo cp mysql-connector-java-5.1.25/mysql-connector-java-5.1.25-bin.jar/usr/lib/hive/lib/
 $ sudo/usr/bin/mysql_secure_installation [...]Enter current password for root (enter for none):
 OK, successfully used password, moving on ...
 [...]
 Set root Password? [y/n] Y
 New password:hadoophive
 Re-enter New password:hadoophive
 Remove anonymous users? [y/n] Y
 [...]
 Disallow root login remotely? [y/n] N
 [...]
 Remove test database and access to it [y/n] Y
 [...]
 Reload privilege tables now? [y/n] Y
 All done!
 $ mysql-u root-phadoophive mysql> CREATE DATABASE Metastore;mysql> use Metastore;
 Mysql> Source/usr/lib/hive/scripts/metastore/upgrade/mysql/hive-schema-0.10.0.mysql.sql;
 mysql> CREATE USER ' hive ' @ '% ' identified by ' hadoophive ';mysql> CREATE USER ' hive ' @ ' localhost ' identified by ' hadoophive ';
 Mysql> REVOKE all privileges, GRANT OPTION from ' hive ' @ '% ';
 Mysql> REVOKE all privileges, GRANT OPTION from ' hive ' @ ' localhost ';
 Mysql> GRANT Select,insert,update,delete,lock Tables,execute on metastore.* to ' hive ' @ '% ';
 Mysql> GRANT Select,insert,update,delete,lock Tables,execute on metastore.* to ' hive ' @ ' localhost ';
 mysql> FLUSH privileges;
 Mysql> quit;
 $ sudo mv/etc/hive/conf/hive-site.xml/etc/hive/conf/hive-site.xml.bak$ sudo vim/etc/hive/conf/hive-site.xml
 Javax.jdo.option.ConnectionURL
 Jdbc:mysql://localhost/metastore
 The URL of the MySQL database
 Javax.jdo.option.ConnectionDriverNameCom.mysql.jdbc.Driver
 Javax.jdo.option.ConnectionUserNameHive
 Javax.jdo.option.ConnectionPasswordHadoophive
 Datanucleus.autocreateschemaFalse
 Datanucleus.fixeddatastoreTrue
 Hive.metastore.uristhrift://127.0.0.1:9083
 IP address (or fully-qualified domain name) and port of the Metastore host
 Hive.aux.jars.pathfile:///usr/lib/hive/lib/zookeeper.jar,file:///usr/lib/hive/lib/hbase.jar,file:///usr/lib/hive/lib/ Hive-hbase-handler-0.10.0-cdh4.2.0.jar,file:///usr/lib/hive/lib/guava-11.0.2.jar
 $ sudo service hive-metastore start Starting (Hive-metastore): [OK] $ sudo service hive-server start Starting (Hive-server): [OK] $ sudo-u HDFs Hadoop fs-mkdir/user/hive$ sudo-u HDFs Hadoop fs-chown hive/user/hive
 $ sudo-u HDFs Hadoop fs-mkdir/tmp
 $ sudo-u HDFs Hadoop fs-chmod 777/tmp
 $ sudo-u HDFs Hadoop fs-chmod o+t/tmp
 $ sudo-u HDFs Hadoop fs-mkdir/data
 $ sudo-u HDFs Hadoop fs-chown hdfs/data
 $ sudo-u HDFs Hadoop fs-chmod 777/data
 $ sudo-u HDFs Hadoop fs-chmod o+t/data
 $ sudo chown-r hive:hive/var/lib/hive$ sudo vim/tmp/kv1.txt
 1 www.baidu.com2 www.google.com
 3 www.sina.com.cn
 4 www.163.com
 5 heylinx.com
 $ sudo-u Hive Hive Logging initialized using configuration in File:/etc/hive/conf.dist/hive-log4j.propertiesHive History File=/tmp/root/hive_job_log_root_201305140801_825709760.txt
 hive> CREATE TABLE IF not EXISTS pokes (foo int,bar STRING) ROW FORMAT delimited FIELDS terminated by "T" LINES Nated by "n"; Hive> Show tables;Ok
 Pokes
 Time taken:0.415 seconds
 hive> LOAD DATA local inpath '/tmp/kv1.txt ' OVERWRITE into TABLE pokes;
 Copying data from File:/tmp/kv1.txt
 Copying File:file:/tmp/kv1.txt
 Loading Data to Table Default.pokes
 Rmr:DEPRECATED:Please use ' rm-r ' instead.
 Deleted/user/hive/warehouse/pokes
 Table default.pokes Stats: [num_partitions:0, Num_files:1, num_rows:0, total_size:79, raw_data_size:0]
 Ok
 Time taken:1.681 seconds
 $ Export Hadoop_mapred_home=/usr/lib/hadoop-mapreduce 4. Install Impala$ cd/etc/yum.repos.d/
 $ sudo wget http://archive.cloudera.com/impala/redhat/6/x86_64/impala/cloudera-impala.repo
 $ sudo yum install Impala Impala-shell
 $ sudo yum install impala-server Impala-state-store
 $ sudo vim/etc/hadoop/conf/hdfs-site.xml ... Dfs.client.read.shortcircuitTrue
 Dfs.domain.socket.path
 /var/run/hadoop-hdfs/dn._port
 
 Dfs.client.file-block-storage-locations.timeout3000
 Dfs.datanode.hdfs-blocks-metadata.enabled
 True
 $ sudo cp-rpa/etc/hadoop/conf/core-site.xml/etc/impala/conf/$ sudo cp-rpa/etc/hadoop/conf/hdfs-site.xml/etc/impala/conf/
 $ sudo service hadoop-hdfs-datanode restart $ sudo service impala-state-store restart$ sudo service impala-server restart
 $ sudo/usr/java/default/bin/jps |