Package COM.GW;
Import java.io.IOException;
Import Java.io.InputStream;
Import org.apache.hadoop.conf.Configuration;
Import Org.apache.hadoop.fs.FSDataInputStream;
Import Org.apache.hadoop.fs.FSDataOutputStream;
Import Org.apache.hadoop.fs.FileStatus;
Import Org.apache.hadoop.fs.FileSystem;
Import Org.apache.hadoop.fs.FsStatus;
Import Org.apache.hadoop.fs.Path;
Import Org.apache.hadoop.hdfs.DistributedFileSystem;
Import Org.apache.hadoop.hdfs.protocol.DatanodeInfo;
Import Org.apache.hadoop.io.IOUtils;
/**
* @author Wangweifeng
* @dercription: Implement new folder in HDFs, delete folder, create file, delete file get all node name information on HDFs cluster
*, file Rename, read contents in HDFs file, upload local file to HDFs, download file local file system in HDFs, list all files in directory
* (If the directory is displayed, hierarchical display of files within the directory)
*/
public class Hdfsutil {
Initializing configuration parameters
static Configuration conf = new configuration ();
static {
String Path = "/home/hadoop/hadoop/etc/hadoop/";
Conf.addresource (New Path + "Core-site.xml");
Conf.addresource (New Path + "Hdfs-site.xml");
Conf.addresource (New Path + "Mapred-site.xml");
}
Get filesystem
public static FileSystem Getfs () throws IOException {
FileSystem fs = Filesystem.get (conf);
return FS;
}
Implementing a new folder in HDFs
public static void MkDir (String path) throws IOException {
FileSystem fs = Getfs ();
Path Srcpath = new Path (path);
Boolean isOK = Fs.mkdirs (Srcpath);
if (isOK) {
System.out.println ("Create dir ok!");
} else {
System.out.println ("Create dir failure!");
}
Fs.close ();
}
Delete a folder or file in HDFs
public static void Delete (String path) throws IOException {
FileSystem fs = Getfs ();
Path Srcpath = new Path (path);
Boolean isOK = Fs.delete (Srcpath, true);
if (isOK) {
System.out.println ("delete ok!");
} else {
System.out.println ("delete failure!");
}
Fs.close ();
}
Create a file in HDFs and write the content
public static void CreateFile (String path, byte[] contents)
Throws IOException {
FileSystem fs = Getfs ();
Path Dstpath = new Path (path); Target path
Open an output stream
Fsdataoutputstream outputstream = fs.create (Dstpath);
Outputstream.write (contents);
Outputstream.close ();
Fs.close ();
System.out.println ("file is created sucess!");
}
Create a file in HDFs and write the content
public static void CreateFile (string path, string contents)
Throws IOException {
CreateFile (Path, contents.getbytes ("UTF-8"));
}
Renaming files in HDFs
public static void RenameFile (String oldname, String newName)
Throws IOException {
FileSystem fs = Getfs ();
Path Oldnamepath = new Path (oldname);
Path Newnamepath = new Path (newName);
Boolean isOK = Fs.rename (Oldnamepath, Newnamepath);
if (isOK) {
System.out.println ("Rename ok!");
} else {
SYSTEM.OUT.PRINTLN ("rename failure");
}
Fs.close ();
}
Read the contents of the file in HDFs and print to standard output
public static void Readfileprint (String path) throws IOException {
FileSystem fs = Getfs ();
Path Srcpath = new Path (path);
Open an input stream
InputStream in = Fs.open (Srcpath);
try {
Fs.open (Srcpath);
Ioutils.copybytes (in, System.out, 4096, false); Copy to standard output stream
} finally {
Ioutils.closestream (in);
}
}
Read File contents
public static byte[] ReadFile (String path) throws IOException {
FileSystem fs = Getfs ();
if (isexist (path)) {
Path Srcpath = new Path (path);
Fsdatainputstream is = Fs.open (Srcpath);
Filestatus stat = fs.getfilestatus (Srcpath);
byte[] buffer = new byte[(int) Stat.getlen ()];
is.readfully (0, buffer);
Is.close ();
Fs.close ();
return buffer;
} else {
throw new IOException ("The file is not found.");
}
}
Upload
public static void Uploadfromloacl (String hdfs_path, String loacl_path)
Throws IOException {
FileSystem fs = Getfs ();
Path Srcpath = new Path (Loacl_path);
Path Despath = new Path (Hdfs_path);
Fs.copyfromlocalfile (Srcpath, Despath);
Print file path
System.out.println ("Upload to" + Conf.get ("Fs.default.name"));
System.out.println ("------------list files------------" + "\ n");
filestatus[] Filestatus = Fs.liststatus (Despath);
for (Filestatus status:filestatus) {
System.out.println (Status.getpath ());
}
Fs.close ();
}
Download
public static void Downloadtoloacl (String hdfs_path, String loacl_path)
Throws IOException {
FileSystem fs = Getfs ();
Path Srcpath = new Path (Hdfs_path);
Path Despath = new Path (Loacl_path);
Fs.copytolocalfile (Srcpath, Despath);
Fs.close ();
System.out.println ("Download to sucess!");
}
Determines whether a file or folder exists, returns True if it exists, or returns false
public static Boolean isexist (String path) throws IOException {
FileSystem fs = Getfs ();
Path Srcpath = new Path (path);
Boolean isexist = false;
if (Fs.isdirectory (Srcpath)) {
Isexist = true;
} else if (Fs.isfile (Srcpath)) {
Isexist = true;
}
return isexist;
}
Get the status of all data nodes in the cluster
public static void Getdatenodeinfo () throws IOException {
FileSystem fs = Getfs ();
Distributedfilesystem HDFs = (distributedfilesystem) fs;
datanodeinfo[] Datanodeinfo = Hdfs.getdatanodestats ();
for (int i = 0; i < datanodeinfo.length; i++) {
System.out.println ("Datanode_" + i + "_name:"
+ datanodeinfo[i].gethostname () + "Datanode_" + i + "Ip:"
+ DATANODEINFO[I].GETINFOADDR ());
}
}
public static void Main (string[] args) throws IOException {
System.out.println ("... Start testing HDFs ... ");
1. Create a folder named HDTest2, and create a folder in that folder test to test the deletion
MkDir ("/hdtest2");
MkDir ("/hdtest2/test");
2. Delete/hdtest2/test
Delete ("/hdtest2/test");
3. Create a file in HDFs and write "HelloWorld"
CreateFile ("/hdtest2/helloworld.txt", "HelloWorld of hdfs!");
4. Name/hdtest2/helloworld.txt as/hdtest2/helloworld.ini
RenameFile ("/hdtest2/helloworld.txt", "/hdtest2/helloworld.ini");
5. Print out the contents of the file
Readfileprint ("/hdtest2/helloworld.ini");
6. Read the contents of the file and create the/hdtest2/test.txt file.
CreateFile ("/hdtest2/test.txt", ReadFile ("/hdtest2/helloworld.ini"));
7. Upload the native desktop my.cnf file to/hdtest2
Uploadfromloacl ("/hdtest2/", "/home/hadoop/desktop/my.cnf");
8. Download the/hdtest2/test.txt to the desktop
Downloadtoloacl ("/hdtest2/test.txt", "/home/hadoop/desktop");
9. Print Datanode Information
Getdatenodeinfo ();
}
}
Hdfsjava API Experiment