码迷,mamicode.com
首页 > 编程语言 > 详细

HDFS Java Client对hdfs文件增删查改

时间:2017-05-19 09:55:50      阅读:1154      评论:0      收藏:0      [点我收藏+]

标签:artifact   inpu   技术   get   cep   epo   data   static   tor   

 
step1:增加依赖
pom.xml
          ...
     <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
           <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-common</artifactId>
                <version>2.2.0</version>
                <exclusions>
                     <exclusion>
                           <artifactId>jdk.tools</artifactId>
                           <groupId>jdk.tools</groupId>
                     </exclusion>
                </exclusions>
           </dependency>
           <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-hdfs</artifactId>
                <version>2.2.0</version>
           </dependency>
     
     ...
 
 
 
step2: 拷贝配置文件
‘hdfs-site.xml’和‘core-site.xml’
 
step3:测试代码

package cjkjcn.demo.hadoop.hdfs;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
 *
 * @author junhong
 *
 * 2017年5月18日
 */
public class HDFSDao {
     private static Configuration conf = new Configuration();
     private FileSystem hdfs;
     final static String ROOT_PATH = "/user";
     public HDFSDao() {
           conf.addResource("hdfs-site.xml");
           conf.addResource("core-site.xml");
           try {
                hdfs = FileSystem.get(conf); // 初始化hdfs
           } catch (IOException e) {
                e.printStackTrace();
           }
           System.out.println("param size=" + conf.size());
     }
     /**
      * 扫描测试文件是否存在
      */
     public void scanFiles() {
           try {
                Path path = new Path(ROOT_PATH);
                System.out.println(hdfs.exists(path));
           } catch (IOException e) {
                e.printStackTrace();
           }
     }
     /**
      * 按行读取文本文件
      * @param file
      * @return
      */
     public List<String> lines(String file) {
           List<String> list = new LinkedList<>();
           Path f = new Path(file);
           try {
                FSDataInputStream input = hdfs.open(f);
                InputStreamReader inr = new InputStreamReader(input);
                BufferedReader read = new BufferedReader(inr);
                String line;
                while ((line = read.readLine()) != null) {
                     list.add(line);
                }
           } catch (IOException e) {
                e.printStackTrace();
           }
           return list;
     }
}

 


 
注意:
 
1)若缺少依赖 
 
          <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-hdfs</artifactId>
                <version>2.2.0</version>
           </dependency>
 
将导致如下错误!!
 java.io.Exception: No FileSystem for scheme: hdfs
技术分享
 
 
2)测试写文件或者创建目录方法接口时,可能会出现权限问题
Pemission denied for test
 

HDFS Java Client对hdfs文件增删查改

标签:artifact   inpu   技术   get   cep   epo   data   static   tor   

原文地址:http://www.cnblogs.com/SeaSky0606/p/6876534.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!