标签:oca actor sharp etc int hadoop2 ring src stream
搭建完hadoop集群之后在windows环境下搭建java项目进行测试 操作hdfs中的文件
package com.slp.hadoop274.hdfs; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; import org.junit.Test; /** * * @author sangliping *完成hdfs操作 */ public class TestHDFS { /** * 读取hdfs文件 * @throws IOException */ @Test public void readFile() throws IOException{ URL url = new URL("hdfs://192.168.181.201:8020/user/sanglp/hadoop/copyFromLocal"); URLConnection con = url.openConnection(); InputStream is = con.getInputStream(); byte[] buf = new byte[is.available()]; is.read(buf); is.close(); String str = new String(buf,"UTF-8"); System.out.println(str); } }
以上运行测试的时候会报错,原因是URL无法识别hdfs协议。
package com.slp.hadoop274.hdfs; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; import org.junit.Test; /** * * @author sangliping *完成hdfs操作 */ public class TestHDFS { static{ //注册hdfs协议否则URL无法识别该协议 URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory()); } /** * 读取hdfs文件 * @throws IOException */ @Test public void readFile() throws IOException{ URL url = new URL("hdfs://192.168.181.201:8020/user/sanglp/hadoop/copyFromLocal"); URLConnection con = url.openConnection(); InputStream is = con.getInputStream(); byte[] buf = new byte[is.available()]; is.read(buf); is.close(); String str = new String(buf,"UTF-8"); System.out.println(str); } }
这个时候就可以正确的打印出hdfs文件copyFromLocal的文件内容。
附:可以将hadoop解压文件下etc中的log4j.properties文件放到项目文件src文件下使控制台打印更友好。
【大数据系列】windows环境下搭建hadoop开发环境从hadoop URL读取数据
标签:oca actor sharp etc int hadoop2 ring src stream
原文地址:http://www.cnblogs.com/dream-to-pku/p/7930241.html