PackageCn.itcast.bigdata.hdfs;ImportJava.net.URI;ImportJava.util.Iterator;ImportJava.util.Map.Entry;Importorg.apache.hadoop.conf.Configuration;ImportOrg.apache.hadoop.fs.FileStatus;ImportOrg.apache.hadoop.fs.FileSystem;ImportOrg.apache.hadoop.fs.LocatedFileStatus;ImportOrg.apache.hadoop.fs.Path;ImportOrg.apache.hadoop.fs.RemoteIterator;ImportOrg.junit.Before;Importorg.junit.Test;/*** * Client to operate HDFS, there is a user identity * By default, the HDFs client API obtains a parameter from the JVM as its own user identity:-dhadoop_user_name=hadoop * * can also be used when constructing client fs objects. Passed in by parameters *@author * */ Public classHdfsclientdemo {FileSystem fs=NULL; Configuration conf=NULL; @Before Public voidInit ()throwsexception{conf=NewConfiguration (); Conf.set ("Fs.defaultfs", "hdfs://master:9000"); //get a client instance object with a file system operation /*fs = Filesystem.get (conf);*/ //URI and user identity can be passed in directlyFS = Filesystem.get (NewURI ("hdfs://master:9000"), conf, "Hadoop");//the last parameter is the user name} @Test Public voidTestupload ()throwsException {thread.sleep (2000); Fs.copyfromlocalfile (NewPath ("G:/access.log"),NewPath ("/access.log.copy")); Fs.close (); } @Test Public voidTestdownload ()throwsException {fs.copytolocalfile (NewPath ("/access.log.copy"),NewPath ("d:/")); Fs.close (); } @Test Public voidtestconf () {Iterator<entry<string, string>> iterator =Conf.iterator (); while(Iterator.hasnext ()) {Entry<string, string> entry =Iterator.next (); System.out.println (Entry.getvalue ()+ "--" + entry.getvalue ());//conf Loaded content } } /*** Create a directory*/@Test Public voidMakdirtest ()throwsException {BooleanMkdirs = Fs.mkdirs (NewPath ("/AAA/BBB")); System.out.println (mkdirs); } /*** Delete*/@Test Public voidDeletetest ()throwsexception{BooleanDelete = Fs.delete (NewPath ("/aaa"),true);//true, recursive deletionSystem.out.println (delete); } @Test Public voidListtest ()throwsexception{filestatus[] Liststatus= Fs.liststatus (NewPath ("/")); for(Filestatus filestatus:liststatus) {System.err.println (Filestatus.getpath ()+"================="+filestatus.tostring ()); } //will find all the files recursively.remoteiterator<locatedfilestatus> listfiles = Fs.listfiles (NewPath ("/"),true); while(Listfiles.hasnext ()) {Locatedfilestatus Next=Listfiles.next (); String name=Next.getpath (). GetName (); Path Path=Next.getpath (); SYSTEM.OUT.PRINTLN (Name+ "---" +path.tostring ()); } } Public Static voidMain (string[] args)throwsException {Configuration conf=NewConfiguration (); Conf.set ("Fs.defaultfs", "hdfs://master:9000"); //get a client instance object with a file system operationFileSystem fs =filesystem.get (conf); Fs.copyfromlocalfile (NewPath ("G:/access.log"),NewPath ("/access.log.copy")); Fs.close (); } }
PackageCn.itcast.bigdata.hdfs;ImportJava.net.URI;ImportJava.util.Iterator;ImportJava.util.Map.Entry;Importorg.apache.hadoop.conf.Configuration;ImportOrg.apache.hadoop.fs.FileStatus;ImportOrg.apache.hadoop.fs.FileSystem;ImportOrg.apache.hadoop.fs.LocatedFileStatus;ImportOrg.apache.hadoop.fs.Path;ImportOrg.apache.hadoop.fs.RemoteIterator;ImportOrg.junit.Before;Importorg.junit.Test;/*** * Client to operate HDFS, there is a user identity * By default, the HDFs client API obtains a parameter from the JVM as its own user identity:-dhadoop_user_name=hadoop * * can also be used when constructing client fs objects. Passed in by parameters *@author * */ Public classHdfsclientdemo {FileSystem fs=NULL; Configuration conf=NULL; @Before Public voidInit ()throwsexception{conf=NewConfiguration (); Conf.set ("Fs.defaultfs", "hdfs://master:9000"); //get a client instance object with a file system operation /*fs = Filesystem.get (conf);*/ //URI and user identity can be passed in directlyFS = Filesystem.get (NewURI ("hdfs://master:9000"), conf, "Hadoop");//the last parameter is the user name} @Test Public voidTestupload ()throwsException {thread.sleep (2000); Fs.copyfromlocalfile (NewPath ("G:/access.log"),NewPath ("/access.log.copy")); Fs.close (); } @Test Public voidTestdownload ()throwsException {fs.copytolocalfile (NewPath ("/access.log.copy"),NewPath ("d:/")); Fs.close (); } @Test Public voidtestconf () {Iterator<entry<string, string>> iterator =Conf.iterator (); while(Iterator.hasnext ()) {Entry<string, string> entry =Iterator.next (); System.out.println (Entry.getvalue ()+ "--" + entry.getvalue ());//conf Loaded content } } /*** Create a directory*/@Test Public voidMakdirtest ()throwsException {BooleanMkdirs = Fs.mkdirs (NewPath ("/AAA/BBB")); System.out.println (mkdirs); } /*** Delete*/@Test Public voidDeletetest ()throwsexception{BooleanDelete = Fs.delete (NewPath ("/aaa"),true);//true, recursive deletionSystem.out.println (delete); } @Test Public voidListtest ()throwsexception{filestatus[] Liststatus= Fs.liststatus (NewPath ("/")); for(Filestatus filestatus:liststatus) {System.err.println (Filestatus.getpath ()+"================="+filestatus.tostring ()); } //will find all the files recursively.remoteiterator<locatedfilestatus> listfiles = Fs.listfiles (NewPath ("/"),true); while(Listfiles.hasnext ()) {Locatedfilestatus Next=Listfiles.next (); String name=Next.getpath (). GetName (); Path Path=Next.getpath (); SYSTEM.OUT.PRINTLN (Name+ "---" +path.tostring ()); } } Public Static voidMain (string[] args)throwsException {Configuration conf=NewConfiguration (); Conf.set ("Fs.defaultfs", "hdfs://master:9000"); //get a client instance object with a file system operationFileSystem fs =filesystem.get (conf); Fs.copyfromlocalfile (NewPath ("G:/access.log"),NewPath ("/access.log.copy")); Fs.close (); } }
JAVAAPI operations in HDFs for Hadoop