appendToFile
cat
checksum
chgrp
chmod
chown
copyFromLocal
copyToLocal
count
cp
createSnapshot
deleteSnapshot
df
du
dus
expunge
find
get
getfacl
getfattr
getmerge
help
ls
lsr
mkdir
moveFromLocal
moveToLocal
mv
put
renameSnapshot
rm
rmdir
rmr
setfacl
setfattr
setrep
stat
tail
test
text
touchz
truncate
usage
测试代码
package hdfs;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class HdfsOperator {
static final String PATH = "hdfs://Master:9000/";
static final String DIR = "/d1";
static final String FILE = "/d1/hello";
static final String loadFile = "F:/readme.txt";
static final String downFile = "F:/readme_tmp.txt";
private static void getList(FileSystem fileSystem, String DIR) throws Exception {
final FileStatus[] listStatus = fileSystem.listStatus(new Path(DIR));
for(FileStatus fileStatus : listStatus) {
String isDir = fileStatus.isDirectory() ? "dir":"file";
final String permission = fileStatus.getPermission().toString();
final short replication = fileStatus.getReplication();
final long len = fileStatus.getLen();
final String path = fileStatus.getPath().toString();
System.out.println(isDir+"\t"+permission+"\t"+replication+"\t"+len+"\t"+path);
}
}
private static void getData(FileSystem fileSystem, String FILE) throws Exception {
final FSDataInputStream in = fileSystem.open(new Path(FILE));
final FileOutputStream out = new FileOutputStream(downFile);
IOUtils.copyBytes(in, out, 4096, true);
System.out.println("get hdfs: " +FILE+ " success! " + "saved as"+ downFile);
}
private static void putData(FileSystem fileSystem, String src, String dest ) throws Exception {
final FSDataOutputStream out = fileSystem.create(new Path(dest));
final FileInputStream in = new FileInputStream(src);
IOUtils.copyBytes(in, out, 4096, true);
}
private static void remove(FileSystem fileSystem, String DIR) throws Exception {
boolean flag = fileSystem.delete(new Path(DIR), true);
System.out.println("del " + DIR + (flag ? " Success":" failure") );
}
private static void create(FileSystem fileSystem, String DIR) throws Exception {
if( fileSystem.exists(new Path(DIR)) == true) {
remove(fileSystem, DIR);
}
fileSystem.mkdirs(new Path(DIR));
}
private static FileSystem getFileSystem(String PATH) throws Exception {
return FileSystem.get(new URI(PATH), new Configuration());
}
public static void main(String[] args) throws Exception {
FileSystem fileSystem = getFileSystem(PATH);
create(fileSystem, DIR);
putData(fileSystem, loadFile, FILE);
getData(fileSystem, FILE);
getList(fileSystem, PATH);
remove(fileSystem, FILE);
}
}
版权声明:本文为博主原创文章,未经博主允许不得转载。
HDFS学习笔记(2)hdfs_shell & JavaAPI
原文地址:http://blog.csdn.net/yew1eb/article/details/46662159