This is primarily a simple operation of the files in HDFs in Hadoop, you can add files on your own, or upload a file operation experiment directly.
Go no code as follows:
Package Hadoop1;import Java.io.fileinputstream;import java.io.ioexception;import java.io.inputstream;import Java.net.malformedurlexception;import Java.net.url;import Org.apache.hadoop.conf.configuration;import Org.apache.hadoop.fs.fsdataoutputstream;import Org.apache.hadoop.fs.filestatus;import Org.apache.hadoop.fs.filesystem;import Org.apache.hadoop.fs.fsurlstreamhandlerfactory;import Org.apache.hadoop.fs.path;import org.apache.hadoop.io.IOUtils; Public classHDFs { Public Static voidMain (string[] args) throws Malformedurlexception {/*URL url=new url ("http://www.baidu.com");//Use the URL to determine the path try {inputstream in=url.openstream ();//path open an input stream file Ioutils.copybytes (in, System.out, 4096,true);//Copy the contents of the file} catch (IOException e) {//TODO auto-generated catch block E.printstacktrace (); }*/ /*url.seturlstreamhandlerfactory (New Fsurlstreamhandlerfactory ());//Use URL method to first establish a URL of the factory URL url=new url ("h Dfs://192.168.83.100:9000/hello.txt ");//Get the path we want the file try {InputStream in=url.openstream ();//File Open Ioutils.copybytes (in, System.out, 4096,true);//file content output} catch (IOException e) {//TODO Auto-genera Ted Catch Block E.printstacktrace (); }*/Configuration conf=NewConfiguration ();//object creation of the configurationConf.Set("Fs.defaultfs","hdfs://192.168.83.100:9000");//the path you want to access is placed Try{FileSystem FileSystem=filesystem.Get(conf);//Get path information /*boolean success=filesystem.mkdirs (New Path ("/ASB"));//Determine whether to create a directory file System.out.println (success); Success=filesystem.exists (New Path ("\\hello.txt"));//Determine if there is a file System.out.println (success); Success=filesystem.delete (New Path ("/ASB"));//delete file System.out.println (success); Success=filesystem.exists (New Path ("/ASB")); SYSTEM.OUT.PRINTLN (success);*/ /*Fsdataoutputstream out =filesystem.create (new Path ("/test.data"), true);//recreate a data directory FileInputStream FIS = New FileInputStream ("C:\\users\\zb\\desktop\\data\\hamlet.txt");//Read the desktop file into the Ioutils.copybytes (FIS, out, 4096,true );*///read the placed file /*Fsdataoutputstream out =filesystem.create (new Path ("/test.data"), true);//Manually add FileInputStream fis =new File InputStream ("C:\\users\\zb\\desktop\\data\\hamlet.txt"); Byte[] Buf=new byte[4096];//a byte one byte upload int len=fis.read (BUF); while (len!=-1) {out.write (Buf,0,len); Len=fis.read (BUF); } fis.close (); Out.close ();*/filestatus[] Status=filesystem.liststatus (NewPath ("/"));//find files that exist under the root directory for(Filestatus status2:status) {System. out. println (Status2.getpath ());//Get AddressSystem. out. println (Status2.getpermission ());//Get certification InformationSystem. out. println (Status2.getreplication ());//Get Response } } Catch(IOException e) {//TODO auto-generated Catch blockE.printstacktrace (); } }}
Java file operation for Hadoop (ii)