色婷五一,精品亚洲欧美一区二区三区日产 ,精选国产AV剧情,无码丰满少妇2在线观看

18600329666

咨詢技術(shù)專家

掃一掃
與技術(shù)專家在線溝通

Menu
JAVA使用idea遠(yuǎn)程連接并操作Hadoop分布式存儲(chǔ)HDFS
      JAVA使用idea遠(yuǎn)程連接并操作Hadoop分布式存儲(chǔ)HDFS,需要安裝hadoop cdh 版本,apache版本的hadoop如果遠(yuǎn)程連接需要編譯bin文件比較麻煩,安裝完cdh版本的hadoop后需要配置允許遠(yuǎn)程訪問具體如下
        


使用java鏈接hdfs須配置為0.0.0.0 hadoopmat,并且本地配置windows系統(tǒng)的host文件111.111.11.11  hadoopmat
一、初始化添加maven依賴

<dependency>
  <groupId>org.apache.hadoop</groupId>
  <artifactId>hadoop-client</artifactId>
  <version>2.6.0-cdh5.13.0</version>
</dependency>
二、初始化hdfs連接獲得FileSystem對(duì)象
public static final String HDFS_PATH="hdfs://hadoopmat:9000";
private Configuration configuration;
private FileSystem fileSystem;

@Before
public void function_before() throws URISyntaxException, IOException, InterruptedException {
    configuration = new Configuration();
    fileSystem = FileSystem.get(new URI(HDFS_PATH),configuration,"root");

}
三、JAVA對(duì)hdfs的文件操作
/**
 * 在HDFS上創(chuàng)建一個(gè)目錄
 * @throws Exception
 */
@Test
public void testMkdirs()throws Exception{
    fileSystem.mkdirs(new Path("/springhdfs/test"));
}
/*
 * 查看目錄
 */
@Test
public void testLSR() throws IOException {
    Path path = new Path("/");
    FileStatus fileStatus = fileSystem.getFileStatus(path);
    System.out.println("*************************************");
    System.out.println("文件根目錄: "+fileStatus.getPath());
    System.out.println("文件目錄為:");
    for(FileStatus fs : fileSystem.listStatus(path)){
        System.out.println(fs.getPath());
    }
}
/*
 * 上傳文件
 */
@Test
public void upload() throws Exception{
    Path srcPath = new Path("F:/hadooptst/hadoop.txt");
    Path dstPath = new Path("/springhdfs/test");
    fileSystem.copyFromLocalFile(false, srcPath, dstPath);
    fileSystem.close();
    System.out.println("*************************************");
    System.out.println("上傳成功!");
}

/*
 * 下載文件
 */
@Test
public void download() throws Exception{
    InputStream in = fileSystem.open(new Path("/springhdfs/test/hadoop.txt"));
    OutputStream out = new FileOutputStream("E://hadoop.txt");
    IOUtils.copyBytes(in, out, 4096, true);
}

/*
 * 刪除文件
 */
@Test
public void delete() throws Exception{
    Path path = new Path("/springhdfs/test/hadoop.txt");
    fileSystem.delete(path,true);
    System.out.println("*************************************");
    System.out.println("刪除成功!");
}

/*
 * 瀏覽文件內(nèi)容
 */
@Test
public void look() throws Exception{
    Path path = new Path("/springhdfs/test/hadoop.txt");
    FSDataInputStream fsDataInputStream = fileSystem.open(path);
    System.out.println("*************************************");
    System.out.println("瀏覽文件:");
    int c;
    while((c = fsDataInputStream.read()) != -1){
        System.out.print((char)c);
    }
    fsDataInputStream.close();
}