Package Com.ibeifeng.hadoop.senior.hdfs;
Import Java.io.File;
Import Java.io.FileInputStream;
Import java.io.IOException;
Import org.apache.hadoop.conf.Configuration;
Import Org.apache.hadoop.fs.FSDataInputStream;
Import Org.apache.hadoop.fs.FSDataOutputStream;
Import Org.apache.hadoop.fs.FileSystem;
Import Org.apache.hadoop.fs.Path;
Import Org.apache.hadoop.io.IOUtils;
/**
*
* @author Wangw
*
*/
public class Hdfsapp {
/**
*
* @return
* @throws Exception
* Get filesystem
*/
public static FileSystem Getfilesystem () throws Exception {
Core-site.xml
Configuration conf = new configuration ();
Get filesystem
FileSystem FileSystem = filesystem.get (conf);
System.out.println (FileSystem);
return fileSystem;
}
/**
* Read Data
* @param fileName
* @throws Exception
*/
public static void Read (String fileName) throws Exception {
Get filesystem
FileSystem FileSystem = Getfilesystem ();
String fileName = "/user/wangw/mapreduce/wordcount/wc.input";
Read path
Path Readpath = new Path (fileName);
Open File
Fsdatainputstream instream = Filesystem.open (Readpath);
//
try{
Read
Ioutils.copybytes (instream, System.out, 4096, false);
}catch (Exception e) {
E.printstacktrace ();
}finally{
Close Stream
Ioutils.closestream (instream);
}
}
public static void Main (string[] args) throws Exception {
String fileName = "/user/wangw/mapreduce/wordcount/wc.input";
Read (fileName);
Get filesystem
FileSystem FileSystem = Getfilesystem ();
Write path
String putfilename = "/user/wangw/put-wc.input";
Path Writepath = new Path (putfilename);
Output Stream
Fsdataoutputstream OutStream = filesystem.create (Writepath);
File Input Stream
FileInputStream instream = new FileInputStream (New File (
"/opt/modules/hadoop-2.5.0/wc.input"));
Stream Read/write
try{
Read
Ioutils.copybytes (instream, OutStream, 4096, false);
}catch (Exception e) {
E.printstacktrace ();
}finally{
Close Stream
Ioutils.closestream (instream);
Ioutils.closestream (OutStream);
}
}
}
The HDFS Java API uses the Read upload file