标签:字节 replica tst href 内容 中文 bsp roo tle
在第一篇博文(Hadoop入坑之路(一))中,讲述了HDFS如何在服务器上搭建以及命令行客户端的一些基本命令的用法。这一节中主要讲在Java客户端上,实现Windows与HDFS服务器的数据的交互。
在运行Java客户端时,需要从官网上下载hadoop的源码包进行编译为windows版本的安装包,并且需要将Hadoop添加到系统环境变量中。
启动Eclipse,新建一个Java项目,添加lib文件。将目录/share/hadoop/common、/share/common/lib、/share/hadoop/hdfs/lib以及/share/hadoop/hdfs下的jar包添加到Java项目的lib文件夹中,并添加到调试环境中。
public class HdfsClient { public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException { /** * configuration参数对象的机制: * 构造时,会先加载jar包中的默认配置 xx-default.xml * 再加载用户配置xx-default.xml,覆盖掉默认参数 * * 构造完成后,还可以conf.set("p", "n"),会再次覆盖用户配置文件中的参数值 */ //new Configuration()会从项目的classpath中自动加载core-defult.xml hdfs-defult.xml hdfs-site.xml等文件 Configuration conf = new Configuration(); //指定本客户端上传文件到hdfs时需要保存的副本为:2 conf.set("dfs.replication", "2"); //指定本客户端上传文件到hdfs时切块的规格大小:64M conf.set("dfs.blocksize", "64m"); //构造一个访问指定HDFS系统的客户端对象:参数1:HDFS系统的URI,参数2:客户端指定的参数,参数3:客户身份(用户名) FileSystem fs = FileSystem.get(new URI("hdfs://222.18.157.50:9000/"), conf, "root"); //上传一个文件到hdfs中 fs.copyFromLocalFile(new Path("C:\\xxx\\xx\\xx\\xxxx.txt"), new Path("/")); fs.close(); } }
public class HdfsClient { FileSystem fs = null; @Before public void init() throws IOException, InterruptedException, URISyntaxException { //new Configuration()会从项目的classpath中自动加载core-defult.xml hdfs-defult.xml hdfs-site.xml等文件 Configuration conf = new Configuration(); //指定本客户端上传文件到hdfs时需要保存的副本为:2 conf.set("dfs.replication", "2"); //指定本客户端上传文件到hdfs时切块的规格大小:64M conf.set("dfs.blocksize", "64m"); //构造一个访问指定HDFS系统的客户端对象:参数1:HDFS系统的URI,参数2:客户端指定的参数,参数3:客户身份(用户名) fs = FileSystem.get(new URI("hdfs://222.18.157.50:9000/"), conf, "root"); } /** * 从HDFS中下载文件到本地磁盘 * @throws Exception * @throws IllegalArgumentException */ @Test public void testGet() throws IllegalArgumentException, Exception { fs.copyToLocalFile(new Path("/xxx.txt"), new Path("F:\\")); fs.close(); } }
/** * 在HDFS内部移动(修改)文件名称 * @throws IOException * @throws IllegalArgumentException */ @Test public void testRename() throws Exception { fs.rename(new Path("/office激活.txt"), new Path("/install.log")); fs.close(); }
/** * 在HDFS中创建文件夹 * @throws IOException * @throws IllegalArgumentException */ @Test public void testMkdir() throws IllegalArgumentException, IOException { fs.mkdirs(new Path("/client/java")); fs.mkdirs(new Path("/client/command")); fs.close(); }
/** * 在HDFS中删除文件或文件夹 * @throws IOException * @throws IllegalArgumentException */ @Test public void testRm() throws IllegalArgumentException, IOException { fs.delete(new Path("/install.log"), false); fs.close(); }
/** * 查询HDFS指定目录下的文件信息 * @throws IOException * @throws IllegalArgumentException * @throws FileNotFoundException */ @Test public void testLs() throws FileNotFoundException, IllegalArgumentException, IOException { // 只查询文件信息,不返回文件夹信息 RemoteIterator<LocatedFileStatus> iter = fs.listFiles(new Path("/client/java"), true); while(iter.hasNext()) { LocatedFileStatus status = iter.next(); System.out.println("文件全路径:" + status.getPath()); System.out.println("块大小:" + status.getBlockSize()); System.out.println("文件长度:" + status.getLen()); System.out.println("副本数量:" + status.getReplication()); System.out.println("快信息:" + Arrays.toString(status.getBlockLocations())); System.out.println("所属用户:" + status.getOwner()); System.out.println("========华==丽==的==分==割==线========"); } fs.close(); }
/** * 查询HDFS指定目录下的文件和文件夹信息 * @throws IOException * @throws IllegalArgumentException * @throws FileNotFoundException */ @Test public void testLs2() throws FileNotFoundException, IllegalArgumentException, IOException { FileStatus[] liststatus = fs.listStatus(new Path("/client/")); for(FileStatus status : liststatus) { System.out.println("文件全路径:" + status.getPath()); System.out.println(status.isDirectory() ? "这是文件夹" : "这是文件"); System.out.println("块大小:" + status.getBlockSize()); System.out.println("文件长度:" + status.getLen()); System.out.println("副本数量:" + status.getReplication()); System.out.println("========华==丽==的==分==割==线========"); } fs.close(); }
/** * 读取HDFS中的文件内容 * @throws IOException * @throws IllegalArgumentException */ @Test public void testReadData() throws IllegalArgumentException, IOException { FSDataInputStream in =fs.open(new Path("/doc/HadoopReadMe.txt")); BufferedReader br = new BufferedReader(new InputStreamReader(in)); String line = null; while((line = br.readLine()) != null) { System.out.println(line); } br.close(); in.close(); fs.close(); }
/** * 读取HDFS中文件指定偏移量范围的内容 * @throws IOException * @throws IllegalArgumentException */ @Test public void testRandomReadData() throws IllegalArgumentException, IOException { FSDataInputStream in = fs.open(new Path("/doc/HadoopReadMe.txt")); // 指定读取的起始位置 in.seek(32); // 指定读取长度:读取64个字节 byte[] buf =new byte[64]; in.read(buf); System.out.println(new String(buf)); in.close(); fs.close(); }
/** * 往HDFS中的文件写数据 * @throws IOException * @throws IllegalArgumentException */ @Test public void testWriteData() throws IllegalArgumentException, IOException { FSDataOutputStream out = fs.create(new Path("/client/java/testPic.jpg")); FileInputStream in =new FileInputStream("C:\\xxx\\xx\\Desktop\\xxxx\\xxxx\\xxxx.png"); byte[] buf = new byte[1024]; int read = 0; while((read = in.read(buf)) != -1) { out.write(buf, 0, read); } in.close(); out.close(); fs.close(); }
标签:字节 replica tst href 内容 中文 bsp roo tle
原文地址:https://www.cnblogs.com/zhangchao162/p/9628937.html