If Hadoop turns on Ha, you need to specify some additional parameters when connecting to hive with the Java client
Package Cn.itacst.hadoop.hdfs;import Java.io.fileinputstream;import Java.io.inputstream;import Java.io.outputstream;import Java.net.uri;import Org.apache.hadoop.conf.configuration;import Org.apache.hadoop.fs.filesystem;import Org.apache.hadoop.fs.path;import Org.apache.hadoop.io.ioutils;public Class Hdfs_ha {public static void main (string[] args) throws Exception {Configuration conf = new configuration () ; Conf.set ("Fs.defaultfs", "hdfs://ns1"); Conf.set ("Dfs.nameservices", "ns1"); Conf.set ("dfs.ha.namenodes.ns1", "nn1,nn2"); Conf.set ("Dfs.namenode.rpc-address.ns1.nn1", "itcast01:9000"); Conf.set ("Dfs.namenode.rpc-address.ns1.nn2", "itcast02:9000"); Conf.set ("Dfs.client.failover.proxy.provider.ns1", " Org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider "); FileSystem fs = Filesystem.get (New URI ("Hdfs://ns1"), conf, "Hadoop"); filestatus[] list = Fs.liststatus (new Path ("/"));
for (Filestatus filestatus:list) {
System.out.println (Filestatus.tostring ());
}
Fs.close ();
}}
JAVA HDFS API Client connection ha