Not much to say, directly on the code.
Code
Package ZHOULS.BIGDATA.MYWHOLEHADOOP.HDFS.HDFS1;
Import Java.io.FileInputStream;
Import java.io.FileNotFoundException;
Import Java.io.FileOutputStream;
Import java.io.IOException;
Import Java.net.URI;
Import Org.apache.commons.io.IOUtils;
Import org.apache.hadoop.conf.Configuration;
Import Org.apache.hadoop.fs.FSDataInputStream;
Import Org.apache.hadoop.fs.FSDataOutputStream;
Import Org.apache.hadoop.fs.FileStatus;
Import Org.apache.hadoop.fs.FileSystem;
Import Org.apache.hadoop.fs.LocatedFileStatus;
Import Org.apache.hadoop.fs.Path;
Import Org.apache.hadoop.fs.RemoteIterator;
Import Org.junit.Before;
Import Org.junit.Test;
public class Hdfsutil {
FileSystem fs = null;
@Before
public void Init () throws exception{
Reads the xxx-site.xml configuration file under Classpath and parses its contents into a Conf object
Configuration conf = new configuration ();
You can also manually set the configuration information in the Conf in your code, overwriting the read value in the configuration file
Conf.set ("Fs.defaultfs", "hdfs://hadoopmaster:9000/");
To obtain a client action instance object for a specific file system, based on the configuration information
FS = Filesystem.get (New URI ("Hdfs://hadoopmaster:9000/"), conf, "Hadoop");
}
/**
* Upload files to compare the underlying wording
*
* @throws Exception
*/
@Test
public void Upload () throws Exception {
Configuration conf = new configuration ();
Conf.set ("Fs.defaultfs", "hdfs://hadoopmaster:9000/");
FileSystem fs = Filesystem.get (conf);
Path DST = new Path ("Hdfs://hadoopmaster:9000/aa/qingshu.txt");
Fsdataoutputstream OS = fs.create (DST);
FileInputStream is = new FileInputStream ("C:/qingshu.txt");
Ioutils.copy (is, OS);
}
/**
* Upload files, package the wording
* @throws Exception
* @throws IOException
*/
@Test
public void Upload2 () throws Exception, ioexception{
Fs.copyfromlocalfile (New Path ("C:/qingshu.txt"), New Path ("Hdfs://hadoopmaster:9000/aaa/bbb/ccc/qingshu2.txt"));
}
/**
* Download File
* @throws Exception
* @throws IllegalArgumentException
*/
@Test
public void Download () throws Exception {
Fs.copytolocalfile (New Path ("Hdfs://hadoopmaster:9000/aa/qingshu2.txt"), New Path ("C:/qingshu2.txt"));
}
/**
* View file information
* @throws IOException
* @throws IllegalArgumentException
* @throws FileNotFoundException
*
*/
@Test
public void Listfiles () throws FileNotFoundException, IllegalArgumentException, IOException {
Listfiles lists the file information, and provides recursive traversal
remoteiterator<locatedfilestatus> files = fs.listfiles (new Path ("/"), true);
while (Files.hasnext ()) {
Locatedfilestatus file = Files.next ();
Path FilePath = File.getpath ();
String fileName = Filepath.getname ();
System.out.println (FileName);
}
System.out.println ("---------------------------------");
Liststatus can list information about files and folders, but does not provide self-recursive traversal
filestatus[] Liststatus = fs.liststatus (New Path ("/"));
for (Filestatus status:liststatus) {
String name = Status.getpath (). GetName ();
SYSTEM.OUT.PRINTLN (name + (Status.isdirectory ()? "is dir": "Is File"));
}
}
/**
* Create Folder
* @throws Exception
* @throws IllegalArgumentException
*/
@Test
public void mkdir () throws IllegalArgumentException, Exception {
Fs.mkdirs (New Path ("/AAA/BBB/CCC"));
}
/**
* Delete files or folders
* @throws IOException
* @throws IllegalArgumentException
*/
@Test
public void Rm () throws IllegalArgumentException, IOException {
Fs.delete (New Path ("/aa"), true);
}
public static void Main (string[] args) throws Exception {
Configuration conf = new configuration ();
Conf.set ("Fs.defaultfs", "hdfs://hadoopmaster:9000/");
FileSystem fs = Filesystem.get (conf);
Fsdatainputstream is = Fs.open (new Path ("/jdk-7u65-linux-i586.tar.gz"));
FileOutputStream OS = new FileOutputStream ("c:/jdk7.tgz");
Ioutils.copy (is, OS);
}
}
Package ZHOULS.BIGDATA.MYWHOLEHADOOP.HDFS.HDFS1;
Import java.io.IOException;
Import Java.net.URI;
Import org.apache.hadoop.conf.Configuration;
Import Org.apache.hadoop.fs.FileSystem;
Import Org.apache.hadoop.fs.Path;
public class Hdfsutilha {
public static void Main (string[] args) throws exception{
Configuration conf = new configuration ();
FileSystem fs = Filesystem.get (New URI ("hdfs://hadoopmaster/9000"), conf, "Hadoop");
Fs.copyfromlocalfile (New Path ("C:/test.txt"), New Path ("hdfs://hadoopmaster/9000"));
}
}
Hadoop HDFs Programming API Primer Series Hdfsutil version 2 (vii)