標籤:
1.讀出hadoop目錄下的hello檔案:
方案一(只能讀不能寫):
import java.net.MalformedURLException;
import java.net.URL;
import java.io.*;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;
public class App1 {
public static final String HDFS_PATH="hdfs://mlj:9000/hello";
public static void main(String[] args) throws Exception {
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());//url解析器,url通常情況下只識別http,在這裡聲明是為了讓它識別hdfs協議
final URL url=new URL(HDFS_PATH);
final InputStream in=url.openStream();
IOUtils.copyBytes(in, System.out, 1024, true);//輸入資料流,輸出資料流,緩衝區大小,是否關閉
}
}
----------------------------------
方案二(使用hdfs提供的filesytem類可進行所有操作)alt+shift+m alt+shift+l:
package hdfs;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class App2 {
/**
* @param args
*/
public static final String HDFS_PATH="hdfs://mlj:9000/hello";
public static final String DIR_PATH="/d";
public static final String FILE_PATH="/d1";
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
final FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH), new Configuration());//獲得對象
//建立檔案夾
//mkdir(fileSystem);
//上傳檔案(欲上傳,先建立)
//uploadFile(fileSystem);
//下載檔案(欲下載,先開啟)
//downloadData(fileSystem);
//刪除檔案
//deleteFile(fileSystem);
//瀏覽檔案夾
list(fileSystem);
}
private static void list(FileSystem fileSystem) throws IOException {
final FileStatus[] listStatus = fileSystem.listStatus(new Path("/"));
for (FileStatus fileStatus : listStatus) {
String isDir = fileStatus.isDir()?"檔案夾":"檔案";
final String permission = fileStatus.getPermission().toString();
final short replication = fileStatus.getReplication();
final long len = fileStatus.getLen();
final String path = fileStatus.getPath().toString();
System.out.println(isDir+"\t"+permission+"\t"+replication+"\t"+len+"\t"+path);
}
}
private static void deleteFile(final FileSystem fileSystem)
throws IOException {
fileSystem.delete(new Path(FILE_PATH), true);
}
private static void downloadData(final FileSystem fileSystem)
throws IOException {
final FSDataInputStream in = fileSystem.open(new Path(FILE_PATH));
IOUtils.copyBytes(in, System.out, 1024, true);
}
private static void mkdir(final FileSystem fileSystem) throws IOException {
fileSystem.mkdirs(new Path(DIR_PATH));
}
private static void uploadFile(final FileSystem fileSystem)
throws IOException, FileNotFoundException {
final FSDataOutputStream out = fileSystem.create(new Path(FILE_PATH));//create產生一個輸出資料流,檔案目的地
final FileInputStream in = new FileInputStream("D:/b.txt");
IOUtils.copyBytes(in, out, 1024, true);
}
}
hdfs java操作