package com.ibeifeng.hadoop.senior.hdfs;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
/**
*
* @author wangw
*
*/
public class HdfsApp {
/**
*
* @return
* @throws Exception
* get filesystem
*/
public static FileSystem getFileSystem() throws Exception {
// core-site.xml
Configuration conf = new Configuration();
//get filesystem
FileSystem fileSystem = FileSystem.get(conf);
//System.out.println(fileSystem);
return fileSystem;
}
/**
* read data
* @param fileName
* @throws Exception
*/
public static void read(String fileName) throws Exception {
//get filesystem
FileSystem fileSystem = getFileSystem();
//String fileName = "/user/wangw/mapreduce/wordcount/wc.input";
// read path
Path readPath = new Path(fileName);
//open file
FSDataInputStream inStream = fileSystem.open(readPath);
//
try{
//read
IOUtils.copyBytes(inStream, System.out, 4096, false);
}catch(Exception e){
e.printStackTrace();
}finally{
// close Stream
IOUtils.closeStream(inStream);
}
}
public static void main(String[] args) throws Exception {
//String fileName = "/user/wangw/mapreduce/wordcount/wc.input";
//read(fileName);
//get filesystem
FileSystem fileSystem = getFileSystem();
// write path
String putFileName = "/user/wangw/put-wc.input";
Path writePath = new Path(putFileName);
//Output Stream
FSDataOutputStream outStream = fileSystem.create(writePath);
// file input Stream
FileInputStream inStream = new FileInputStream(new File(
"/opt/modules/hadoop-2.5.0/wc.input"));
// stream read/write
try{
//read
IOUtils.copyBytes(inStream, outStream, 4096, false);
}catch(Exception e){
e.printStackTrace();
}finally{
// close Stream
IOUtils.closeStream(inStream);
IOUtils.closeStream(outStream);
}
}
}
原文地址:http://19921009.blog.51cto.com/8714442/1852026