Importjava.io.IOException;ImportJava.io.InputStream;Importjava.security.PrivilegedExceptionAction;ImportJava.text.SimpleDateFormat;ImportJava.util.concurrent.ConcurrentHashMap;ImportJava.util.concurrent.ConcurrentMap;Importorg.apache.hadoop.conf.Configuration;ImportOrg.apache.hadoop.fs.FileStatus;ImportOrg.apache.hadoop.fs.FileSystem;ImportOrg.apache.hadoop.fs.FsStatus;ImportOrg.apache.hadoop.fs.Path;Importorg.apache.hadoop.security.UserGroupInformation;ImportOrg.springframework.stereotype.Controller;Importorg.springframework.web.bind.annotation.RequestMapping;ImportOrg.springframework.web.bind.annotation.RequestMethod;ImportOrg.springframework.web.bind.annotation.ResponseBody;ImportCom.alibaba.fastjson.JSON;ImportCom.alibaba.fastjson.JSONObject;ImportBean. tablestatistic;@Controller @requestmapping ("/dfview") Public classDataframeviewcontrollerextendsBasecontroller {Privateconcurrentmap<string, usergroupinformation> cache =NewConcurrenthashmap<string, usergroupinformation>(); Privateconcurrentmap<string, filesystem> Filesystemcache =NewConcurrenthashmap<string, filesystem>(); PrivateConfiguration hadoopconf =NewConfiguration (); Private Static FinalString hdfs_json_name = "Jsonobj"; @RequestMapping (Value= "/getdfviewofcolumn", method ={requestmethod.get}) @ResponseBody Publictablestatistic getdfviewoftable (String tableName)throwsException {String user= "BI"; String Dirpath= "/user/cbt/datax/temp_transfer/zzzdes"; Path HomePath=NewPath (Dirpath); FileSystem FS= This. Createfilesystem (user); Filestatus[] Stats=Fs.liststatus (HomePath); StringBuffer txtcontent=NewStringBuffer (); for(inti = 0; i < stats.length; ++i) {if(Stats[i].isfile ()) {Filestatus file=Stats[i]; if(Hdfs_json_name.equalsignorecase (File.getpath (). GetName ())) {InputStream in=Fs.open (File.getpath ()); byte[] B =New byte[1]; while(In.read (b)! =-1) { //string concatenationTxtcontent.append (NewString (b)); } in.close (); Break; }}} tablestatistic ts= Json.parseobject (Txtcontent.tostring (), tablestatistic.class); returnts; } Public Static voidMain (string[] args)throwsException {Dataframeviewcontroller AAA=NewDataframeviewcontroller (); FileSystem FS= Aaa.createfilesystem ("Bi"); Path HomePath=NewPath ("/user/cbt/datax/temp_transfer/zzzdes"); System.out.println ("***********************************"); Filestatus[] Stats=Fs.liststatus (HomePath); for(inti = 0; i < stats.length; ++i) {if(Stats[i].isfile ()) {Filestatus file=Stats[i]; StringBuffer txtcontent=NewStringBuffer (); if("Jsonobj". Equalsignorecase (File.getpath (). GetName ())) {InputStream in=Fs.open (File.getpath ()); byte[] B =New byte[1]; while(In.read (b)! =-1) { //string concatenationTxtcontent.append (NewString (b)); }//ioutils.copybytes (Fs.open (File.getpath ()), System.out, 4096,false);in.close ();//fs.close ();} System.out.print (Txtcontent.tostring ()); System.out. println ("************************************************"); Jsonobject JB=Json.parseobject (txtcontent.tostring ()); System.out.println ("********!!!!!:" + jb.get ("Colunique")); Tablestatistic TS= Json.parseobject (Txtcontent.tostring (), tablestatistic.class); System.out.println ("********!!!!! : " +Ts.getcolunique (). toString ()); } Else if(Stats[i].isdirectory ()) {System.out.println (Stats[i].getpath (). toString ()); } Else if(Stats[i].issymlink ()) {System.out.println ("&&&&&&&&" +Stats[i].getpath (). toString ()); }} fsstatus Fsstatus=Fs.getstatus (HomePath); } PublicFileSystem Createfilesystem (String user)throwsException {FinalConfiguration conf =loadhadoopconf (); Conf.set ("Hadoop.job.ugi", user);//conf.set ("Hadoop_user_name", USER); if(Filesystemcache.get (user)! =NULL) { returnfilesystemcache.get (user); } usergroupinformation Ugi=getproxyuser (user); FileSystem FS= Ugi.doas (NewPrivilegedexceptionaction<filesystem>() { PublicFileSystem Run ()throwsException {returnfilesystem.get (conf); } }); Filesystemcache.put (user, FS); returnFS; } Public Static FinalThreadlocal<simpledateformat> Appdateformat =NewThreadlocal<simpledateformat>() {@Override PublicSimpleDateFormat InitialValue () {SimpleDateFormat DateFormat=NewJava.text.SimpleDateFormat ("Yyyy-mm-dd HH:mm:ss"); returnDateFormat; } }; Private Static FinalString[] Hadoop_conf_files = {"Core-site.xml", "Hdfs-site.xml" }; PrivateConfiguration loadhadoopconf () {if(Hadoopconf! =NULL) { returnhadoopconf; } Configuration conf=NewConfiguration (); for(String filename:hadoop_conf_files) {Try{InputStream InputStream= Dataframeviewcontroller.class. getClassLoader (). getResourceAsStream (FileName); Conf.addresource (InputStream); } Catch(Exception ex) {}}returnconf; } Public voiddestroy () { for(Usergroupinformation ugi:cache.values ()) {Try{Filesystem.closeallforugi (UGI); } Catch(IOException IoE) {//logger.error ("Exception occurred while closing filesystems for"//+ ugi.getusername (), IoE);}} cache.clear (); } PrivateUsergroupinformation getproxyuser (String user)throwsIOException {cache.putifabsent (user, Usergroupinformation.createremoteuser (user)); returncache.get (user); }}
Reading HDFs files with the Java API