A Hadoop HDFs operation class __hadoop

Source: Internet
Author: User
Tags filetime readfile

A Hadoop HDFs operation class


Package com.viburnum.util;
Import Java.net.URI;
Import Java.text.SimpleDateFormat;
Import Java.util.Date;

Import java.io.*;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.fs.BlockLocation;
Import Org.apache.hadoop.fs.FSDataInputStream;
Import Org.apache.hadoop.fs.FSDataOutputStream;
Import Org.apache.hadoop.fs.FileStatus;
Import Org.apache.hadoop.fs.FileSystem;
Import Org.apache.hadoop.fs.Path;

Import Org.apache.hadoop.io.IOUtils;

	public class Hdfsutil {//HDFS APIs public static String Hdfsurl = "";

	Car information Save directory private static String Carinfodir = "/car/lbs/";
	
	LBS file name private static String Carinfofile = "";
	
	private static String Carinfofileprefix = "Lbs_";

	Properties file private static String Properitesfile = "/viburnum.properties";

	private static Configuration conf = new Configuration ();

	private static filesystem HDFs; Public Hdfsutil () {Hdfsurl = Proputil.getresourcevalue (Properitesfile, "Hdfs.url");
		Carinfodir = Proputil.getresourcevalue (Properitesfile, "Car.lbs.dir");
			try {conf.setboolean ("Dfs.support.append", true);
		HDFs = Filesystem.get (Uri.create (Hdfsurl), conf);
		catch (IOException e) {e.printstacktrace ();
		} SimpleDateFormat sdfdate = new SimpleDateFormat ("YyyyMMdd");
		SimpleDateFormat sdftime = new SimpleDateFormat ("Hhmmsssss");
		String filedate = Sdfdate.format (New Date ());
		String filetime = Sdftime.format (New Date ()) + system.nanotime ();
	Carinfofile = Carinfodir + "/" + Filedate + "/" + Carinfofileprefix + filetime + ". txt";
	public static String Getproperitesfile () {return properitesfile;
	The public static void Setproperitesfile (String properitesfile) {hdfsutil.properitesfile = Properitesfile;
	public static String Gethdfsurl () {return hdfsurl;
	The public static void Sethdfsurl (String hdfsurl) {hdfsutil.hdfsurl = Hdfsurl;
	public static String Getcarinfodir () {return carinfodir; } public static void SetcarinfodiR (String carinfodir) {hdfsutil.carinfodir = Carinfodir;
	public static String Getcarinfofile () {return carinfofile;
	The public static void Setcarinfofile (String carinfofile) {hdfsutil.carinfofile = Carinfofile;
	public static Configuration getconf () {return conf;
	public static void Setconf (Configuration conf) {hdfsutil.conf = conf; /** * Create HDFS folder * * @param dirpath * @return * * public static void Createdir (String dirpath) thro
	WS Exception {hdfs.mkdirs (new Path (Dirpath)); /** * Delete HDFS folder * * @param dirpath * * @return */@SuppressWarnings ("deprecation") public static V
	OID Deletedir (String dirpath) throws Exception {Hdfs.delete (new Path (Dirpath)); /** * Create a file with default name * * @param filePath * @return/public static void CreateFile (Strin
		G content) throws Exception {Fsdataoutputstream out = hdfs.create (new Path (Getcarinfofile ())); Out.write (content + "\ n"). GetBytes ());
	Out.close (); /** * Create a file with given name * * @param fileName * @param content * @throws Exception/Public
		static void CreateFile (string filename, string content) throws Exception {setcarinfofile (filename);
		Fsdataoutputstream out = hdfs.create (new Path (FileName));
		Out.write (content + "\ n"). GetBytes ());
	Out.close (); /** * Create a file if append * * @param content * @throws Exception/public static void CreateFile (Stri
			ng content, Boolean append) throws Exception {if (append = = True) {Path fileName = new Path (Getcarinfofile ());
			if (!hdfs.exists (filename)) {hdfs.create (filename);
			} InputStream in = new Bytearrayinputstream (Content.getbytes ());
			Fsdataoutputstream out = Hdfs.append (new Path (Getcarinfofile ()));
			Ioutils.copybytes (in, Out, Conf);
		Out.close ();
		} else {CreateFile (content); }/** * Writing content append to a file use the file path * * @param filePath * @param filecOntent * @throws Exception */public static void WriteFile (string fileName, String content) throws Exception {I
		Nputstream in = new Bufferedinputstream (New Bytearrayinputstream (Content.getbytes ()));
		OutputStream out = Hdfs.append (new Path (FileName));
	Ioutils.copybytes (in, out, 4096, true);
	/** * Normal read one file ' s all content and append to another * * @param content * @throws Exception * * public static void Appendall (String content) throws exception{Fsdataoutputstream out = hdfs.create (new Path (getcarinfo
		File ()));
		Byte[] Contentbytes = (content + "\ n"). GetBytes ();
		byte[] oldcontentbytes = ReadFile (Getcarinfofile ());
		Out.write (Bytemerge (Contentbytes, oldcontentbytes));
	Out.close (); }/** * Rename a file * * * @param oldpath * @param newpath * @return/public static void RenameFile (Stri
	Ng OldPath, String NewPath) throws Exception {hdfs.rename (New path (OldPath), New Path (NewPath)); }/** * Delete a file * *@param hadoopfile * @return isdeleted/public static Boolean DeleteFile (String hadoopfile) throws Exception {@Su
		Ppresswarnings ("Deprecation") Boolean isdeleted = Hdfs.delete (new Path (Hadoopfile));
	return isdeleted; }/** * Upload a local file * * * @param localpath * @param hadooppath * @return/public static void Uploa Dlocalfile (String localpath, String hadooppath) throws Exception {hdfs.copyfromlocalfile (new Path (LocalPath), new Pa
	Th (Hadooppath)); /** * Read the file bytes * * @param hadoopfile * @return buffer */public static byte[] ReadFile (String h
		Adoopfile) throws Exception {path Path = new Path (hadoopfile);
			if (hdfs.exists (path)) {Fsdatainputstream in = Hdfs.open (path);
			Filestatus stat = hdfs.getfilestatus (path);
			byte[] buffer = new Byte[integer.parseint (string.valueof (Stat. Getlen ()))];
			in.readfully (0, buffer);
			In.close ();
		return buffer; else {throw new Exception ("The file is not found.)"); }/** * List files under the folder * * @param hadooppath * @return filestring/public static String Li
		Stfiles (String hadooppath) throws Exception {path DST = new path (Hadooppath);
		filestatus[] files = hdfs.liststatus (DST);
		String filestring = "";
			for (Filestatus file:files) {System.out.println (File.getpath (). toString ());
		Filestring + + File.getpath (). toString () + "";
	return filestring; /** * List block info of the file * * @param hadooppath * @return blockstring/public static String Getb
		Lockinfo (String hadooppath) throws Exception {Filestatus filestatus = hdfs.getfilestatus (new Path (Hadooppath));
		blocklocation[] Blkloc = hdfs.getfileblocklocations (filestatus, 0, Filestatus.getlen ());
		String blockstring = ""; for (Blocklocation Loc:blkloc) {for (int i = 0; i < loc.gethosts (). length; i++) System.out.println (loc.gethos
		TS () [i]);
	return blockstring; /** * Merge two byte[] * *Param byte_1 * @param byte_2 * @return byte_3 * * public static byte[] Bytemerge (byte[] byte_1, byte[] byte_2) {by
		te[] Byte_3 = new Byte[byte_1.length+byte_2.length];
		System.arraycopy (byte_1, 0, byte_3, 0, byte_1.length);
		System.arraycopy (byte_2, 0, Byte_3, Byte_1.length, byte_2.length);
	return byte_3;
		public void Finalize () throws throwable{Hdfs.close ();
	Super.finalize ();
 }

}


Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.