标签:
package org.apache.sxh.hadoop.fs; import java.io.File; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; public class Copy { public static void main(String[] args) throws Exception { // TODO Auto-generated method stub //创建Configuration对象,取名为conf Configuration conf=new Configuration(); //如果是远程集群的hdfs,我们需要有以下两个配置文件 conf.addResource(new Path("/home/sxh/hadoop-1.2.1/conf/core-site.xml")); conf.addResource(new Path("/home/sxh/hadoop-1.2.1/conf/hdfs-site.xml")); //创建两个FileSystem对象 FileSystem sendhdfs=FileSystem.get(conf); FileSystem local=FileSystem.getLocal(conf); //确定需要上传的视频流本地路径,相当于设置"缓存区"中文件的路径 Path localdirPath=new Path("/home/sxh/SharedDirectory"); //设置接收视频流路径,创建HDFS上的movie目录来接收视频流数据 Path hdfsdirPath=new Path("/movie"); sendhdfs.mkdirs(hdfsdirPath); FileStatus[] fileStatus=local.listStatus(localdirPath); FSDataOutputStream outputStream; //循环写入视频文件到HDFS for(int i=0;i<fileStatus.length;i++){ System.out.println(fileStatus[i].getPath().getName()); FSDataInputStream inputStream=local.open(fileStatus[i].getPath()); outputStream=sendhdfs.create(new Path("/movie/"+fileStatus[i].getPath().getName())); byte[] buffer=new byte[256]; int bytesRead=0; while((bytesRead=inputStream.read(buffer))>0){ outputStream.write(buffer, 0, bytesRead); } //上传一个文件结束后关闭流 outputStream.close(); inputStream.close(); //上传完成后删除本地文件 File file=new File(fileStatus[i].getPath().toString().substring(5)); System.out.println(fileStatus[i].getPath().toString()); file.delete(); } } }
标签:
原文地址:http://blog.csdn.net/sxhlovehmm/article/details/45481519