A brief introduction to controlling the HDFs file system with Java
First, note the Namenode access rights, modify the Hdfs-site.xml file or modify the file directory permissions
This time using modify Hdfs-site.xml for testing, add the following content in the configuration node
< Property > < name >dfs.permissions.enabled</name> <value >false</value> </Property >
Second, this time use Eclipse Environment new project complete test
Using the manual add Jar package to complete the environment preparation, the jar package is located in the Hadoop decompression directory
As follows:
hadoop-2.7. 3\share\hadoop\common\hadoop-common-2.7. 3 . Jarhadoop-2.7. 3\share\hadoop\common\lib\*. Jarhadoop-2.7. 3\share\hadoop\hdfs\hadoop-hdfs-2.7. 3. jar
Add complete jar package to write code, link HDFs file system
To link HDFs you need to complete the following steps
1. Create Org.apache.hadoop.conf.Configuration to specify the configuration of the client (server address, upload some configuration of the download file), this time is configured as follows
Package com.huaqin.hdfs.conf; Import org.apache.hadoop.conf.Configuration; Public class extends configuration{ public defaultdfsclientconfigration () { this. Set ("Fs.defaultfs", "hdfs://*.*.*.*:9000"); this. Set ("Dfs.replication", "2");} }
2. Writing utils Encapsulation Common operation file method
Need to use Org.apache.hadoop.fs.FileSystem
Created from the configuration file above
FileSystem FileSystem = filesystem.get (New defaultdfsclientconfigration ());
After creation, you can manipulate HDFs, and the code is encapsulated as follows
Packagecom.huaqin.hdfs.utils;ImportJava.io.File;ImportJava.io.FileInputStream;Importjava.io.FileNotFoundException;Importjava.io.IOException;ImportJava.util.Map;ImportOrg.apache.hadoop.fs.FSDataInputStream;ImportOrg.apache.hadoop.fs.FSDataOutputStream;ImportOrg.apache.hadoop.fs.FileStatus;ImportOrg.apache.hadoop.fs.FileSystem;ImportOrg.apache.hadoop.fs.Path;Importorg.apache.hadoop.io.IOUtils;Importcom.huaqin.hdfs.conf.DeFaultDfsClientConfigration; Public classHdfsfileutils { Public DoubleProgressBar; PublicHdfsfileutils ()throwsIOException {//loading using the default classFileSystem = Filesystem.get (Newdefaultdfsclientconfigration ()); } PublicHdfsfileutils (defaultdfsclientconfigration clientconfration)throwsIOException {//loading using the specified classFileSystem =Filesystem.get (clientconfration); } //default Client Configuration class PrivateFileSystem FileSystem; Public voidreloadclientconfigration (defaultdfsclientconfigration clientconfration) {filesystem.setconf (clientConfratio n); } PublicFilestatus[] List (String fileName)throwsFileNotFoundException, IllegalArgumentException, IOException {//TODO auto-generated Method Stubfilestatus[] Statuslist = This. Filesystem.liststatus (NewPath (fileName)); returnstatuslist; } Public voidText (String fileName)throwsIllegalArgumentException, IOException {//TODO auto-generated Method StubFsdatainputstream InputStream = This. Filesystem.open (NewPath (fileName)); Ioutils.copybytes (InputStream, System.out, filesystem.getconf ()); } //Uploading Files Public voidUpload (string src, string dest)throwsIOException {//TODO auto-generated Method StubFileInputStream in =NewFileInputStream (SRC); Fsdataoutputstream OS= This. Filesystem.create (NewPath (dest),true); Ioutils.copybytes (in, OS,4096,true); } //Deleting Files Public BooleanDeleteFile (String dest)throwsIllegalArgumentException, IOException {//TODO auto-generated Method Stub BooleanSuccess = This. Filesystem.delete (NewPath (dest),true); returnsuccess; } //Create a folder Public BooleanMakeDir (String dest)throwsIllegalArgumentException, IOException {return This. Filesystem.mkdirs (NewPath (dest)); } //Download Show Progress Public voidDownload2 (String dest, map<string, integer> descript)throwsIllegalArgumentException, IOException {fsdatainputstream in= Filesystem.open (NewPath (dest)); Descript.put ("ByteSize", In.available ()); Descript.put ("Current", 0); byte[] bs =New byte[1024]; while(-1! =(In.read (BS))) {Descript.put (' Current ', Descript.get ("current") + 1024); } in.close (); } //Upload Display Progress Public voidUpload2 (string src, String dest, Map<string, long>descript)throwsillegalargumentexception, ioexception {file file=NewFile (SRC); FileInputStream in=Newfileinputstream (file); Fsdataoutputstream out= This. Filesystem.create (NewPath (dest),true); Descript.put ("ByteSize", File.length ()); Descript.put ("Current", 0l); //0.5MB byte[] bs =New byte[1024 * 1024/2]; while(-1! =(In.read (BS))) {Out.write (BS); Descript.put (' Current ', Descript.get ("current") + 1024); } out.close (); In.close (); }}
Three, the following is the Junittest test environment
Importjava.io.IOException;ImportJava.text.DecimalFormat;ImportJava.util.HashMap;ImportJava.util.Map;ImportOrg.junit.Before;Importorg.junit.Test;Importcom.huaqin.hdfs.utils.HDFSFileUtils; Public classhdfsfileutilsjut {@Before Public voidBefore ()throwsIOException {fileUtils=Newhdfsfileutils (); } hdfsfileutils fileUtils; @Test Public voidTestcreatenewfile ()throwsIOException {//fileutils.upload ("D:\\temp\\helloworld.txt", "/tmp/helloworld.txt");Fileutils.upload ("e:\\devtool\\hadoop-2.7.3.tar.gz", "/hadoop-2.7.3.tar.gz"); } @Test Public voidTesttext ()throwsIllegalArgumentException, IOException {fileutils.text ("/hello.txt"); } @Test Public voidTestdeletefile ()throwsIllegalArgumentException, IOException {BooleanSuccess = Fileutils.deletefile ("/centos-7-x86_64-dvd-1511.iso"); SYSTEM.OUT.PRINTLN (Success); } @Test Public voidTestzmikdirs ()throwsIllegalArgumentException, IOException {BooleanSuccess = Fileutils.makedir ("/tmp"); SYSTEM.OUT.PRINTLN (Success); } @Test Public voidTestdownload2 ()throwsIllegalArgumentException, IOException {Map<string, integer> desc =NewHashmap<>(); Desc.put ("Current", 0); Desc.put ("ByteSize", 0); NewThread (NewRunnable () {@Override Public voidrun () {//TODO auto-generated Method Stub while(true) { Try{Thread.Sleep (500); System.out.printf ("Maxl:%d\tcurrent:%d\tsurplus:%d\n", Desc.get ("ByteSize"), Desc.get ("current"), Desc.get ("ByteSize")-desc.get (" Current)); } Catch(interruptedexception e) {//TODO auto-generated Catch blockE.printstacktrace (); }}}). Start (); Fileutils.download2 ("/hadoop-2.7.3.tar.gz", DESC); } @Test Public voidTestupload2 ()throwsIllegalArgumentException, IOException {decimalformat df=NewDecimalFormat ("0%"); Map<string, long> desc =NewHashmap<string, long>(); Desc.put ("Current", 0l); Desc.put ("ByteSize", 0l); NewThread (NewRunnable () {@Override Public voidrun () {//TODO auto-generated Method Stub while(true) { Try{Thread.Sleep (500); System.out.printf ("Maxl:%d\tcurrent:%d\tsurplus:%d\tprogressbar:%s\n", Desc.get ("ByteSize"), Desc.get ("current"), Desc.get (" ByteSize ")-desc.get (" current "), Df.format ((Desc.get (" current ") +0.0)/desc.get (" ByteSize "))); } Catch(interruptedexception e) {//TODO auto-generated Catch blockE.printstacktrace (); }}}). Start (); Fileutils.upload2 ("D:\\hadoop\\centos-7-x86_64-dvd-1511.iso", "/centos-7-x86_64-dvd-1511.iso", DESC); } }
Hadoop HDFs (Java API)