Hadoop HDFS Java API

Source: Internet
Author: User
Tags create directory dateformat hdfs dfs

[TOC]

Hadoop HDFS Java API

Mainly Java operation HDFs Some of the common code, the following direct code:

Package Com.uplooking.bigdata.hdfs;import Org.apache.hadoop.conf.configuration;import org.apache.hadoop.fs.*; Import Org.apache.hadoop.fs.permission.fspermission;import Org.apache.hadoop.io.ioutils;import org.junit.After; Import Org.junit.before;import org.junit.test;import Java.io.bufferedreader;import Java.io.ioexception;import Java.io.inputstreamreader;import Java.net.uri;import Java.text.dateformat;import Java.text.SimpleDateFormat; Import java.util.date;/** * List contents of directory: Liststatus * Read file: Open * Create directory: Mkdirs * Create file: Creation * Delete file or directory: Delete * Display file storage location: GetFile    Blocklocations */public class Hdfstest {private FileSystem fs;    Private DateFormat df = new SimpleDateFormat ("Yyyy-mm-dd hh:mm"); /** * Initialize resource * @throws Exception */@Before public void SetUp () throws Exception {uri uri = new U        RI ("hdfs://uplooking01:9000");        Configuration configuration = new configuration ();    fs = Filesystem.get (URI, configuration); }/** * List contents of directory: Liststatus * Imitation: * $ hdfs dfs-ls/*-rw-r--r--1 uplooking supergroup 2018-02-28 12:29/hello * drwxr-xr-x -uplooking supergroup 0 2018-02-28 12:31/output * drwx-------uplooking supergroup 0 2018-02- 12:31/TMP * * @throws ioexception * * @Test public void testlist () throws IOException {Filest        atus[] filestatuses = fs.liststatus (New Path ("/"));            for (Filestatus filestatus:filestatuses) {//First define the item that needs to be judged to determine String prefix = "D";            String repliaction = "-";            Gets the file type if (Filestatus.isfile ()) {prefix = "-";            }//Get permission list fspermission permission = Filestatus.getpermission (); String Uacl = Permission.getuseraction ().            SYMBOL; String GaCl = Permission.getgroupaction ().            SYMBOL; String Oacl = Permission.getotheraction ().            SYMBOL;            String ACL = uacl + GaCl + oacl; Get replication becauseThe number of children if (Filestatus.isfile ()) {repliaction = Filestatus.getreplication () + "";            }//Get file belongs to main String owner = Filestatus.getowner ();            Gets the file genus Group String group = Filestatus.getgroup ();            Gets the file size long len = Filestatus.getlen ();            Gets the file modification time String mTime = Df.format (New Date (Filestatus.getmodificationtime ()));            Gets the path of the file paths Path = Filestatus.getpath (); Formatted output System.out.println (prefix + ACL + "\ T" + repliaction + "\ T" + owner + "" + group + "\ T" + MTime + "\        T "+ path);  }}/** * Read file: Open * * @throws IOException */@Test public void Testopen () throws IOException        {Fsdatainputstream FIS = fs.open (New Path ("Hdfs://uplooking01:9000/hello"));        Mode 1:/* byte[] bytes = new byte[1024];        int len = 0; while (len = fis.read (bytes))! =-1) {System.out.println (new String (bytes, 0, Len));        } fis.close ();///Mode 2:/*bufferedreader br = new BufferedReader (new InputStreamReader (FIS));        String line = null;        while (line = Br.readline ())! = null) {System.out.println (line);    } fis.close (); *//Mode 3:ioutils.copybytes (FIS, System.out, 1024x768, false);         }/** * Create directory: mkdirs * * @throws IOException */@Test public void Testmkdir () throws IOException {        boolean ret = Fs.mkdirs (new Path ("/input/hdfs")); SYSTEM.OUT.PRINTLN (ret?)    "Create directory Success": "Failed to create directory"); /** * Created file: Create * * @throws IOException */@Test public void Testcreate () throws IOException {///The second parameter is overwrite, Files is overwritten by default Fsdataoutputstream FOS = Fs.create (The new Path ("/input/hdfs/wo        Rd.txt "), false);        Fos.write ("hello\n". GetBytes ());        Fos.write ("xpleaf\n". GetBytes ());    Fos.close (); }/** * Delete files or directories: DelETE * * @throws IOException */@Test public void Testdelete () throws IOException {//second parameter is recursive delete        boolean ret = Fs.delete (new Path ("/input/hdfs/word.txt"), false) except (when deleting a directory); SYSTEM.OUT.PRINTLN (ret?)    "Delete succeeded": "Delete failed"); /** * Display file storage location: Getfileblocklocations * * @throws IOException */@Test public void testlocations        () throws IOException {path Path = new Path ("/hadoop-2.6.4.tar.gz");        Filestatus filestatus = fs.getfilestatus (path);        Parameters are: File path offset start position file length blocklocation[] locations = fs.getfileblocklocations (path, 0, Filestatus.getlen ());        SYSTEM.OUT.PRINTLN (locations);        for (Blocklocation location:locations) {System.out.println (location); }/** * 0,134217728,UPLOOKING01 (offsets starting from 0, blocks of 128MB in size are stored on the node uplooking01) 134217728,61798247 , UPLOOKING01 (offset from 128M, the size of 59M blocks (that is, the remaining size) is stored on the node UPLOOKING01) can see that two blocks are only present on the UPLOOKING01, this is because thisIn the Hadoop environment is pseudo-distributed */}/** * Release resources * @throws IOException */@After public void cleanUp () thr    oWS IOException {fs.close (); }}

Hadoop HDFS Java API

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.