码迷,mamicode.com
首页 > 其他好文 > 详细

使用Hadoop自己的类操作HDFS

时间:2016-03-14 01:31:30      阅读:235      评论:0      收藏:0      [点我收藏+]

标签:

 

 

 1 package hdfs;
 2 
 3 import java.io.FileInputStream;
 4 import java.io.FileNotFoundException;
 5 import java.io.IOException;
 6 import java.net.URI;
 7 import java.net.URISyntaxException;
 8 
 9 import org.apache.hadoop.conf.Configuration;
10 import org.apache.hadoop.fs.FSDataInputStream;
11 import org.apache.hadoop.fs.FSDataOutputStream;
12 import org.apache.hadoop.fs.FileStatus;
13 import org.apache.hadoop.fs.FileSystem;
14 import org.apache.hadoop.fs.Path;
15 import org.apache.hadoop.io.IOUtils;
16 
17 public class App2 {
18     static final String PATH = "hdfs://chaoren:9000/";
19     static final String DIR = "/d1";
20     static final String FILE = "/d1/hello";
21     public static void main(String[] args) throws Exception {
22         FileSystem fileSystem = getFileSystem();
23         //创建文件夹     hadoop fs -mkdir   /f1
24         mkdir(fileSystem);
25         //上传文件  -put  src  des
26         putData(fileSystem);
27         //下载文件   hadoop fs -get src des
28         //getData(fileSystem);
29         //浏览文件夹
30         list(fileSystem);
31         //删除文件夹
32         //remove(fileSystem);
33     }
34     private static void list(FileSystem fileSystem) throws IOException {
35         final FileStatus[] listStatus = fileSystem.listStatus(new Path("/"));
36         for (FileStatus fileStatus : listStatus) {
37             String isDir = fileStatus.isDir()?"文件夹":"文件";
38             final String permission = fileStatus.getPermission().toString();
39             final short replication = fileStatus.getReplication();
40             final long len = fileStatus.getLen();
41             final String path = fileStatus.getPath().toString();
42             System.out.println(isDir+"\t"+permission+"\t"+replication+"\t"+len+"\t"+path);
43         }
44     }
45     private static void getData(FileSystem fileSystem) throws IOException {
46         final FSDataInputStream in = fileSystem.open(new Path(FILE));
47         IOUtils.copyBytes(in, System.out, 1024, true);
48     }
49     private static void putData(FileSystem fileSystem) throws IOException,
50             FileNotFoundException {
51         final FSDataOutputStream out = fileSystem.create(new Path(FILE));
52         final FileInputStream in = new FileInputStream("H:/kuaipan/hadoop/classes/yy131009/day2/readme.txt");
53         IOUtils.copyBytes(in, out, 1024, true);
54     }
55     private static void remove(FileSystem fileSystem) throws IOException {
56         fileSystem.delete(new Path(DIR), true);
57     }
58     private static void mkdir(FileSystem fileSystem) throws IOException {
59         fileSystem.mkdirs(new Path(DIR));
60     }
61     private static FileSystem getFileSystem() throws IOException, URISyntaxException {
62         return FileSystem.get(new URI(PATH), new Configuration());
63     }
64 }

 

使用Hadoop自己的类操作HDFS

标签:

原文地址:http://www.cnblogs.com/mrxiaohe/p/5274318.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!