标签:
环境:Hadoop1.2.例1:读取一个大约200k大小的本地文件,并将其第101-120字节的内容写入HDFS成为一个新文
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.Progressable; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; public class InputTest { public static void main(String[] args) throws Exception { String localSrc = args[0]; String dst = args[1]; InputStream in = new BufferedInputStream(new FileInputStream(localSrc)); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(dst), conf); OutputStream out = fs.create(new Path(dst), new Progressable() { public void progress() { System.out.print("."); } }); in.read(new byte[100], 0, 100); IOUtils.copyBytes(in, out, 20, 20, true); } }
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; public class OutputTest { public static void main(String[] args) { try { String dst = args[0]; String localSrc = args[1]; File localFile = new File(localSrc); if (!localFile.exists()) { localFile.mkdirs(); } Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(dst), conf); InputStream in = fs.open(new Path(dst)); OutputStream out = new BufferedOutputStream(new FileOutputStream(localFile)); in.read(new byte[100], 0, 100); IOUtils.copyBytes(in, out, 20, 20, true); } catch (Exception e) { e.printStackTrace(); } } }
标签:
原文地址:http://my.oschina.net/zc741520/blog/365336