Access the files on HDFs and write them out to the output station
/*** Access the files on HDFs and write them out to the output station *@paramargs*/ Public Static voidMain (string[] args) {Try { //converts the URL of the HDFS format to a system-recognizedUrl.seturlstreamhandlerfactory (Newfsurlstreamhandlerfactory ()); URL URL=NewURL ("Hdfs://hadoop1:9000/hello"); InputStream in=Url.openstream (); /*** Writes the read data to a file, does not need to control the buffer itself, and does not need to read the input stream yourself *@paramIn input stream *@paramout Stream *@parambuffersize Change to area size *@paramClose whether to close the stream, or false to close the * Ioutils.closestream (in) in the finally; */ioutils.copybytes (in, System.out,1024,true); } Catch(Exception e) {e.printstacktrace (); } }
Get filesystem
/** * Get filesystem * Read files using hadoop filesystem * * public static throws IOException, urisyntaxexception { = filesystem.get (newnew Configuration ()); return fileSystem; }
Create a folder
/*** Create folder *@throwsurisyntaxexception*/ Public Static voidmkdir ()throwsIOException, urisyntaxexception {FileSystem FileSystem=Getfilesystem (); //Create a folder on HDFs and return the label of whether the creation was successful BooleanSuccessful = Filesystem.mkdirs (NewPath ("/dir1")); if(successful) {SYSTEM.OUT.PRINTLN ("Create folder Succeeded"); } }
Upload
/*** Upload *@throwsurisyntaxexception*/ Public Static voidPutData ()throwsIOException, FileNotFoundException, urisyntaxexception {FileSystem FileSystem=Getfilesystem (); //Create an upload path that returns the output streamFsdataoutputstream OS = filesystem.create (NewPath ("/dir1/readme")); FileInputStream in=NewFileInputStream ("D:\\Program files\\others\\2345soft\\haozip\\2345 Good Press disclaimer"); Ioutils.copybytes (in, OS,1024,true); }
Download
/*** Download *@throwsurisyntaxexception*/ Public Static voidDownload ()throwsIOException, urisyntaxexception {FileSystem FileSystem=Getfilesystem (); Fsdatainputstream in= Filesystem.open (NewPath ("Hdfs://hadoop1:9000/hello")); //closing the stream needs to be closed manually, System.out is also an output stream, and if true, it will not output theIoutils.copybytes (in, System.out, 1024,false); In.close (); }
Delete a file or folder
/**Delete a file or folder * True: Indicates whether to delete recursively, if it is a file, here is True,false is indifferent, * folder must be true, otherwise error *@throwsurisyntaxexception*/ Public Static voidDelete ()throwsIOException, urisyntaxexception {FileSystem FileSystem=Getfilesystem (); BooleanisDeleted = Filesystem.delete (NewPath ("/dir1"),true); if(isDeleted) {System.out.println ("Delete Succeeded"); } }
Traverse Directory
/**Traverse Directory * Call FileSystem's Liststatus method * View file status using Filestatus *@throwsurisyntaxexception*/ Public Static voidList ()throwsIOException, urisyntaxexception {FileSystem FileSystem=Getfilesystem (); Filestatus[] Liststatus= Filesystem.liststatus (NewPath ("/")); for(Filestatus filestatus:liststatus) {String Isdir= Filestatus.isdir ()? " Directory ":" File "; String name=Filestatus.getpath (). GetName (). toString (); System.out.println (Isdir+ "-+" +name); } }
Java Operations for Hadoop HDFs