Environment: Hadoop1.2.
Example 1: Read a local file about 200k in size and write its 第101-120 byte content to HDFs as a new text
Import Org.apache.hadoop.conf.configuration;import Org.apache.hadoop.fs.filesystem;import Org.apache.hadoop.fs.path;import Org.apache.hadoop.io.ioutils;import Org.apache.hadoop.util.progressable;import Java.io.bufferedinputstream;import Java.io.fileinputstream;import Java.io.inputstream;import Java.io.OutputStream Import Java.net.uri;public class Inputtest {public static void main (string[] args) throws Exception {String L OCALSRC = Args[0]; String DST = args[1]; InputStream in = new Bufferedinputstream (new FileInputStream (LOCALSRC)); Configuration conf = new configuration (); FileSystem fs = Filesystem.get (Uri.create (DST), conf); OutputStream out = fs.create (new Path (DST), new progressable () {public void progress () {System . Out.print ("."); } }); In.read (new byte[100], 0, 100); Ioutils.copybytes (in, out, +, +, true); }}
Example 2. Reads a file of approximately 200k in HDSF and writes its contents of 第101-120 bytes to the local file system
Import Org.apache.hadoop.conf.configuration;import Org.apache.hadoop.fs.filesystem;import Org.apache.hadoop.fs.path;import Org.apache.hadoop.io.ioutils;import Java.io.bufferedoutputstream;import Java.io.file;import Java.io.fileoutputstream;import Java.io.inputstream;import Java.io.OutputStream;import Java.net.uri;public class Outputtest {public static void main (string[] args) {try {String dst = ar Gs[0]; String localsrc = args[1]; File LocalFile = new file (LOCALSRC); if (!localfile.exists ()) {localfile.mkdirs (); } Configuration conf = new configuration (); FileSystem fs = Filesystem.get (Uri.create (DST), conf); InputStream in = Fs.open (new Path (DST)); OutputStream out = new Bufferedoutputstream (new FileOutputStream (LocalFile)); In.read (new byte[100], 0, 100); Ioutils.copybytes (in, out, +, +, true); } catch (Exception e) { E.printstacktrace (); } }}
Second call to the Hadoop Java API