Not much to say, directly on the code.
Code
Package zhouls.bigdata.myWholeHadoop.HDFS.hdfs4;
Import java.io.IOException;
Import java.net.URISyntaxException;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.fs.BlockLocation;
Import Org.apache.hadoop.fs.FileStatus;
Import Org.apache.hadoop.fs.FileSystem;
Import Org.apache.hadoop.fs.FileUtil;
Import Org.apache.hadoop.fs.Path;
Import Org.apache.hadoop.hdfs.DistributedFileSystem;
Import Org.apache.hadoop.hdfs.protocol.DatanodeInfo;
Import Java.net.URI;
public class HDFStest1
{
/**
* @param args
* @throws ioexception
* @throws urisyntaxexception
*/
public static void main (St Ring[] args) throws IOException, URISyntaxException
{
//TODO auto-generated method Stub
//mkdir ();
Copytohdfs ();
//getfile ();
//listallfile ();
//getfilelocal ();
//rmdir ();
Gethdfsnodes ();
}
//Get HDFs file system
public static FileSystem Getfilesystem () throws Ioexception,urisyntaxexception
{
Configuration conf = new configuration ()//read Config file, such as Core-site.xml
//filesystem FS =filesystem.get (conf);
uri uri = new Uri ("hdfs://hadoopmaster:9000");
FileSystem fs = Filesystem.get (uri,conf);
Return FS;
}
public static void MkDir () throws Ioexception,urisyntaxexception
{
The first step is to get the file system
FileSystem FS =getfilesystem ();
Second step, create the file directory
Fs.mkdirs (New Path ("/zhouls/data"));
Step three, release the resources
Fs.close ();
}
public static void Copytohdfs () throws Ioexception,urisyntaxexception
{
The first step
FileSystem Fs=getfilesystem ();
Step Two
Path srcpath=new path ("D://data/weibo.txt");
Path dstpath=new path ("/zhouls/data");
Step Three
Fs.copyfromlocalfile (Srcpath, Dstpath);
Fourth Step
Fs.close ();
}
public static void GetFile () throws IOException, URISyntaxException
{
The first step
FileSystem Fs=getfilesystem ();
Step Two
Path srcpath=new path ("/zhouls/data/weibo.txt");
Path dstpath=new path ("D://data/test");
Step Three
Fs.copytolocalfile (Srcpath, Dstpath);
Fourth Step
Fs.close ();
}
public static void Listallfile () throws IOException, URISyntaxException
{
The first step
FileSystem Fs=getfilesystem ();
Step Two
Filestatus[] Status =fs.liststatus (New Path ("/zhouls"));
Step Three
path[] Listedpaths = fileutil.stat2paths (status);
Fourth Step
for (Path p:listedpaths)
{
SYSTEM.OUT.PRINTLN (P);
}
Fifth Step
Fs.close ();
}
public static void Getfilelocal () throws IOException, URISyntaxException
{
The first step
FileSystem Fs=getfilesystem ();
Step Two
Path path=new path ("/zhouls/data/weibo.txt");
Step Three
Filestatus filestatus=fs.getfilelinkstatus (path);
Fourth Step
blocklocation[] blklocations = fs.getfileblocklocations (filestatus, 0, Filestatus.getlen ());
Fifth Step
for (int i=0;i< blklocations.length;i++)
{
String[] hosts = blklocations[i].gethosts ();
System.out.println ("Block_" +i+ "_location:" +hosts[0]);
}
Sixth step
Fs.close ();
}
public static void RmDir () throws IOException, URISyntaxException
{
The first step
FileSystem Fs=getfilesystem ();
Step Two
Fs.delete (New Path ("/zhouls/data"), true);
Step Three
Fs.close ();
}
public static void Gethdfsnodes () throws IOException, URISyntaxException
{
The first step
FileSystem Fs=getfilesystem ();
Step Two
Distributedfilesystem HDFs = (distributedfilesystem) fs;
Step Three
datanodeinfo[] Datanodestats = Hdfs.getdatanodestats ();
Fourth Step
for (int i=0;i< datanodestats.length;i++)
{
System.out.println ("Datanode_" +i+ "_name:" +datanodestats[i].gethostname ());
}
Fifth Step
Fs.close ();
}
}
Hadoop HDFs Programming API Primer Series Simple synthesis version 1 (iv)