码迷,mamicode.com
首页 > 其他好文 > 详细

HDFS基本工具类的实现

时间:2017-03-29 23:08:31      阅读:182      评论:0      收藏:0      [点我收藏+]

标签:tab   org   trace   stack   end   mkdir   rac   util   path   

package com.shikun.HdfsTool;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.util.Progressable;

/*
* hdfs平台接口类,批量上传和下载文件
*/
public class HdfsTool {

private FileSystem f = null;
// 初始化相关配置
public HdfsTool() {

//读配置文件
Configuration conf = new Configuration();
try {
f = FileSystem.get(URI.create("/"), conf, "root");
} catch (IOException | InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

/*--------------------文件表面的操作------------------------------------*/
// 循环删除某一文件夹下的所有文件
public void deletersomefile(String path) {
File f = new File(path);
File[] f1 = f.listFiles();
for (File f2 : f1) {
if (f2.isFile() && f2.getName().endsWith(".crc")) {
f2.delete();
} else {
// System.out.println(f2.getAbsolutePath());
if (f2.isDirectory()) {
deletersomefile(f2.getAbsolutePath());
}
}

}

}

// 上传文件到hdfs,sc11本地文件目录,scr2 hdfs文件目录
// 批量上传
public void upload(String src1, String src2) {
try {
// init();
f.copyFromLocalFile(new Path(src1), new Path(src2));
System.out.println("上传成功");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println("路径不正确");
}
// 从本地上传到hdfs
}

// 从hdfs下载文件,到本地路径,src2,是hdfs路径,src1,是本地路径
public void download(String src2, String src1) {
try {

f.copyToLocalFile(new Path(src2), new Path(src1));
deletersomefile(src1);

System.out.println("下载成功");
} catch (IllegalArgumentException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println("路径不正确");
}
}

// 删除hdfs的某个文件或者目录
public void deletefiles(String src) {
try {
f.delete(new Path(src), true);
System.out.println("删除完成");
} catch (IllegalArgumentException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println("路径不存在");
}

}

// 创建文件目录
public void createmkdir(String src) {
try {
f.mkdirs(new Path(src));
System.out.println("创建完成");
} catch (IllegalArgumentException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println("输入不合法");
}

}

// 创建文件
public void creatfile(String src) {
try {
FSDataOutputStream fis = f.create(new Path(src), new Progressable() {

@Override
public void progress() {
// TODO Auto-generated method stub
System.out.println(".");
}
});
System.out.println("文件创建完成");
} catch (IllegalArgumentException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//查看某个目录下的文件,并打印出其详细信息
public void listview()
{
FileStatus[] file1;
try {

file1 = f.listStatus(new Path("/bigfile"));
for(FileStatus s:file1)
{
System.out.println("time:"+s.getAccessTime());
System.out.println("group:"+s.getGroup());
System.out.println("blocksize:"+s.getBlockSize());
System.out.println("owner:"+s.getOwner());
}
} catch (IllegalArgumentException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//Path[]list=FileUtil.stat2Paths(file1);
}
/*--------------------文件本身的的操作------------------------------------*/
public void listviewnew(String src)
{
try {
RemoteIterator<LocatedFileStatus> ri=f.listFiles(new Path(src), false);
while(ri.hasNext())
{

LocatedFileStatus file=ri.next();
BlockLocation[] blc=file.getBlockLocations();

for(BlockLocation b:blc)
{

System.out.println("length--"+b.getLength());
System.out.println("name--");
String[] ss=b.getNames();
for(String str:ss)
{
System.out.println(ss);
}
System.out.println("offset--"+b.getOffset());
String hos[]=b.getHosts();
System.out.println("hosname:");
for(String s:hos)
{
System.out.println(s);

}

}
}

} catch (IllegalArgumentException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

}
//文件过滤操作
public void filterfiles(String regex,String regex1)
{
try {
f.globStatus(new Path(regex),new RegexExcludePathFileter(regex1));
System.out.println("过滤完毕,结果如下");
} catch (IllegalArgumentException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

}

HDFS基本工具类的实现

标签:tab   org   trace   stack   end   mkdir   rac   util   path   

原文地址:http://www.cnblogs.com/shellshell/p/6641775.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!