Import Java.io.FileInputStream;
Import java.io.FileNotFoundException;
Import Java.io.FileOutputStream;
Import java.io.IOException;
Import Java.net.URI;
Import Org.apache.commons.io.IOUtils;
Import org.apache.hadoop.conf.Configuration;
Import Org.apache.hadoop.fs.FSDataInputStream;
Import Org.apache.hadoop.fs.FSDataOutputStream;
Import Org.apache.hadoop.fs.FileStatus;
Import Org.apache.hadoop.fs.FileSystem;
Import Org.apache.hadoop.fs.LocatedFileStatus;
Import Org.apache.hadoop.fs.Path;
Import Org.apache.hadoop.fs.RemoteIterator;
Import Org.junit.Before;
Import Org.junit.Test;
/**
* HDFS Java programming
* Generally operated with HDFS shell, as follows:
* Hadoop fs-put Aa.txt/
* Hadoop Fs-get/aa.txt
* Hadoop Fs-ls/
* Hadoop Fs-cat/aa.txt
*
*/
public class Hdfsutil {
FileSystem fs = null;
@Before
public void Init () throws exception{
Reads the xxx-site.xml configuration file under Classpath and parses its contents into a Conf object
Configuration conf = new configuration ();
You can also manually set the configuration information in the Conf in your code, overwriting the read value in the configuration file
Conf.set ("Fs.defaultfs", "hdfs://node1:9000/");
To obtain a client action instance object for a specific file system, based on the configuration information
FS = Filesystem.get (New URI ("Hdfs://node1:9000/"), conf, "username");
}
/**
* Upload files to compare the underlying wording
*
* @throws Exception
*/
@Test
public void Upload () throws Exception {
Configuration conf = new configuration ();
Conf.set ("Fs.defaultfs", "hdfs://node1:9000/");
FileSystem fs = Filesystem.get (New URI ("Hdfs://node1:9000/"), conf, "username");
Path DST = new Path ("Hdfs://node1:9000/aa/aa2.txt");
Fsdataoutputstream OS = fs.create (DST);
FileInputStream is = new FileInputStream ("D:/aa.txt");
Ioutils.copy (is, OS);
}
/**
* Upload files, package the wording
* @throws Exception
* @throws IOException
*/
@Test
public void Upload2 () throws Exception, ioexception{
Fs.copyfromlocalfile (New Path ("D:/aa.txt"), New Path ("Hdfs://node1:9000/bb.txt"));
}
/**
* Download File
* @throws Exception
* @throws IllegalArgumentException
*/
@Test
public void Download () throws Exception {
Fs.copytolocalfile (New Path ("Hdfs://node1:9000/aa/bb.txt"), New Path ("D:/aa_down.txt"));
}
/**
* View file information
* @throws IOException
* @throws IllegalArgumentException
* @throws FileNotFoundException
*
*/
@Test
public void Listfiles () throws FileNotFoundException, IllegalArgumentException, IOException {
Listfiles lists the file information, and provides recursive traversal
remoteiterator<locatedfilestatus> files = fs.listfiles (new Path ("/"), true);
while (Files.hasnext ()) {
Locatedfilestatus file = Files.next ();
Path FilePath = File.getpath ();
String fileName = Filepath.getname ();
System.out.println (FileName);
}
System.out.println ("---------------------------------");
Liststatus can list information about files and folders, but does not provide self-recursive traversal
filestatus[] Liststatus = fs.liststatus (New Path ("/"));
for (Filestatus status:liststatus) {
String name = Status.getpath (). GetName ();
SYSTEM.OUT.PRINTLN (name + (Status.isdirectory ()? "is dir": "Is File"));
}
}
/**
* Create Folder
* @throws Exception
* @throws IllegalArgumentException
*/
@Test
public void mkdir () throws IllegalArgumentException, Exception {
Fs.mkdirs (New Path ("/AAA/BBB/CCC"));
}
/**
* Delete files or folders
* @throws IOException
* @throws IllegalArgumentException
*/
@Test
public void Rm () throws IllegalArgumentException, IOException {
Fs.delete (New Path ("/aa"), true);
}
public static void Main (string[] args) throws Exception {
Configuration conf = new configuration ();
Conf.set ("Fs.defaultfs", "hdfs://node1:9000/");
FileSystem fs = Filesystem.get (conf);
Fsdatainputstream is = Fs.open (new Path ("/jdk-7u65-linux-i586.tar.gz"));
FileOutputStream OS = new FileOutputStream ("c:/jdk7.tgz");
Ioutils.copy (is, OS);
}
}
Hadoop HDFS Java programming