码迷,mamicode.com
首页 > Windows程序 > 详细

一步一步跟我学习hadoop(6)----hadoop利用FileSystem API 执行hadoop文件读写操作

时间:2015-08-03 01:18:23      阅读:254      评论:0      收藏:0      [点我收藏+]

标签:hadoop   filesystem   filesystem api   

    hadoop文件系统较普通的文件系统差异性主要在于其容错性,普通文件系统不能直接查看hadoop的hdfs对应的文件信息。文件存储起来之后,我们是需要可以访问才能够体现它的价值,hadoop提供了FileSystem API来进行hadoop的文件读写。

    本节我是对照hadoop的API中的FileSystem类写的demo,包含了一些主要的方法,更多的需要从相关api中进行查询编写。

package org.apache.hadoop.wyg;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Test;

public class FileSystemAPI {
	private static final String BASE_URL = "hdfs://192.168.88.128:9000";
	/**
	 * 追加到文件
	 * @throws IOException 
	 */
	@Test
	public void testAppend() throws IOException{
		Configuration conf= new Configuration();
	    FileSystem fs = FileSystem.get(URI.create(BASE_URL+"/1.txt"),conf);
	    fs.append(new Path(BASE_URL+"/user/root/input/2.txt"));
	    System.out.println();
	}
	/**本地文件上传测试
	 * @throws IOException
	 */
	@Test
	public void testCopyFromLocalFile() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(BASE_URL), conf);
		fs.copyFromLocalFile(new Path("C:\\Users\\lenovo\\Desktop\\3.txt"), new Path(BASE_URL+"/user/root/input"));
	}
	/**从hdfs下载文件到本地
	 * @throws IOException
	 */
	@Test
	public void testCopyToLocalFile() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(BASE_URL), conf);
		fs.copyToLocalFile(new Path(BASE_URL+"/user/root/input/3.txt"), new Path("C:\\Users\\lenovo\\Desktop\\4.txt"));
	}
	/**测试文件创建
	 * @throws IOException
	 */
	@Test
	public void testCreate() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(BASE_URL), conf);
		Path path = new Path("/user/root/input/4.txt");
		FSDataOutputStream out = fs.create(path,true);
		out.write("hello hadoop".getBytes());
	}
	/**创建空文件
	 * @throws IOException
	 */
	@Test
	public void testCreateNewFile() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(BASE_URL), conf);
		Path path = new Path("/user/root/input/5.txt");
		fs.createNewFile(path);
	}
	/**删除文件
	 * @throws IOException
	 */
	@Test
	public void testDelete() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(BASE_URL), conf);
		Path path = new Path("/user/root/input/5.txt");
		fs.delete(path);
	}
	/**文件是否存在
	 * @throws IOException
	 */
	@Test
	public void testExists() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(BASE_URL), conf);
		Path path = new Path("/user/root/input/5.txt");
		System.out.println(fs.exists(path));
	}
	/**文件状态
	 * @throws IOException
	 */
	@Test
	public void testGetFileStatus() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(BASE_URL), conf);
		Path path = new Path("/user/root/input/1.txt");
		FileStatus status = fs.getFileStatus(path);
		System.out.println(status.getModificationTime());
	}
	/**文件状态,可以获得文件夹信息
	 * @throws IOException
	 */
	@Test
	public void testListFile() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(BASE_URL), conf);
		Path path = new Path("/user/root/input");
		FileStatus[] status = fs.listStatus(path);
		for (FileStatus fileStatus : status) {
			System.out.println(fileStatus.getPath());
		}
	}
	
	/**创建文件夹
	 * @throws IOException
	 */
	@Test
	public void testMkdirs() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(BASE_URL), conf);
		Path path = new Path("/user/root/input/sub");
		fs.mkdirs(path);
	}
	/**移动本地文件到hdfs
	 * @throws IOException
	 */
	@Test
	public void testMoveFromLocalFile() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(BASE_URL), conf);
		Path path = new Path("/user/root/input/sub");
		fs.moveFromLocalFile(new Path("C:\\Users\\lenovo\\Desktop\\4.txt"), path);
	}
	/**重命名
	 * @throws IOException
	 */
	@Test
	public void testRename() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(BASE_URL), conf);
		Path path = new Path("/user/root/input/sub/4.txt");
		Path dest = new Path("/user/root/input/sub/4_1.txt");
		fs.rename(path, dest);
	}
}

    源代码已上传,明天公开下载地址

一步一步跟我学习hadoop(6)----hadoop利用FileSystem API 执行hadoop文件读写操作

标签:hadoop   filesystem   filesystem api   

原文地址:http://blog.csdn.net/wuyinggui10000/article/details/47156513

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!