The basic operations for the HDFs API are through org.apache.hadoop.fs.FileSystem
classes, and here are some common operations:
PackageHdfsapi;ImportJava.io.BufferedInputStream;ImportJava.io.File;ImportJava.io.FileInputStream;ImportJava.io.IOException;ImportJava.io.InputStream;ImportJava.net.URI;ImportOrg.apache.hadoop.conf.Configuration;ImportOrg.apache.hadoop.fs.BlockLocation;ImportOrg.apache.hadoop.fs.FSDataOutputStream;ImportOrg.apache.hadoop.fs.FileStatus;ImportOrg.apache.hadoop.fs.FileSystem;ImportOrg.apache.hadoop.fs.Path;ImportOrg.apache.hadoop.io.IOUtils;ImportOrg.junit.Test;ImportJunit.framework.TestCase; Public class hdfsapi extends TestCase{ Public StaticString Hdfsurl ="hdfs://master:8020";//Create a folder @Test Public void Testhdfsmkdir()throwsioexception{Configuration conf =NewConfiguration (); FileSystem fs = Filesystem.get (Uri.create (Hdfsurl), conf); Path PATH =NewPath ("/liguodong/hdfs"); Fs.mkdirs (path); }//Create file @Test Public void Testhdfsmkfile()throwsioexception{Configuration conf =NewConfiguration (); FileSystem fs = Filesystem.get (Uri.create (Hdfsurl), conf); Path PATH =NewPath ("/liguodong/hdfs/liguodong.txt"); Fsdataoutputstream Fdos = fs.create (path); Fdos.write ("Hello Hadoop". GetBytes ()); }//Rename @Test Public void Testhdfsrenamefile()throwsioexception{Configuration conf =NewConfiguration (); FileSystem fs = Filesystem.get (Uri.create (Hdfsurl), conf); Path PATH =NewPath ("/liguodong/hdfs/liguodong.txt"); Path Renamepath =NewPath ("/liguodong/hdfs/love.txt"); System.out.println (Fs.rename (path, renamepath)); }//Upload a local file @Test Public void TestHDFSPutLocalFile1()throwsioexception{Configuration conf =NewConfiguration (); FileSystem fs = Filesystem.get (Uri.create (Hdfsurl), conf);//Due to debugging on Windows, the directory of Windows is selected, //If in Linxu, a Linux directory is required. Path Srcpath =NewPath ("G:/liguodong.txt"); Path DestPath =NewPath ("/liguodong/hdfs"); Fs.copyfromlocalfile (Srcpath, DestPath); }@Test Public void TestHDFSPutLocalFile2()throwsioexception{Configuration conf =NewConfiguration (); FileSystem fs = Filesystem.get (Uri.create (Hdfsurl), conf);//Due to debugging on Windows, the directory of Windows is selected, //If in Linxu, a Linux directory is required. String Srcpath ="G:/oncemore.txt"; Path DestPath =NewPath ("/liguodong/hdfs/kkk.txt"); InputStream is =NewBufferedinputstream (NewFileInputStream (NewFile (Srcpath)); Fsdataoutputstream Fdos = fs.create (DestPath); Ioutils.copybytes (IS, Fdos,4094); }//View all files under a folder @Test Public void Testhdfslistfiles()throwsioexception{Configuration conf =NewConfiguration (); FileSystem fs = Filesystem.get (Uri.create (Hdfsurl), conf); Path PATH =NewPath ("/liguodong/hdfs"); filestatus[] files = fs.liststatus (path); for(Filestatus file:files) {System.out.println (File.getpath (). toString ()); } }//View data block information for a file @Test Public void Testhdfsgetblockinfo()throwsioexception{Configuration conf =NewConfiguration (); FileSystem fs = Filesystem.get (Uri.create (Hdfsurl), conf); Path PATH =NewPath ("/liguodong/hdfs/kkk.txt"); Filestatus filestatus = fs.getfilestatus (path); blocklocation[] Blkloc = fs.getfileblocklocations (Filestatus,0, Filestatus.getlen ()); for(Blocklocation Loc:blkloc) { for(inti =0; I < loc.gethosts (). length; i++) {//Gets the data block on which hostSystem.out.println (Loc.gethosts () [i]);//Get the host name of the file block //Because this file has only one block, the output is: Slave2, slave1, Slave5} } } }
HDFS API Basic Operations