Java操作HDFS示例

原创
2020/02/26 10:35
阅读数 213

1. 环境准备

  1. 大数据集群一套,没有的可以自己本地搭建一套(参考地址:https://www.jianshu.com/p/2c2ae6490fa0
  2. 本地安装JDK
  3. 本地安装IDEA或者Eclipse

2. 创建Maven项目

在IDEA工具中创建一个maven项目,并在pom.xml中添加以下依赖:

<dependencies>
		<dependency>
			<groupId>junit</groupId>
			<artifactId>junit</artifactId>
			<version>RELEASE</version>
		</dependency>
		<dependency>
			<groupId>org.apache.logging.log4j</groupId>
			<artifactId>log4j-core</artifactId>
			<version>2.8.2</version>
		</dependency>
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-common</artifactId>
			<version>2.7.2</version>
		</dependency>
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-client</artifactId>
			<version>2.7.2</version>
		</dependency>
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-hdfs</artifactId>
			<version>2.7.2</version>
		</dependency>
		<dependency>
			<groupId>jdk.tools</groupId>
			<artifactId>jdk.tools</artifactId>
			<version>1.8</version>
			<scope>system</scope>
			<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
		</dependency>
</dependencies>

3. 编写代码

新建名为HDFSClient的HDFS客户端操作类,下面是一些基础的客户端操作样例。

package com.lancer.hdfs;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

public class HDFSClient {


	/**
	 * 创建目录
	 * @throws Exception
	 * @throws IOException
	 * @throws URISyntaxException
	 */
	@Test
	public void mkdir() throws Exception,IOException,URISyntaxException {
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://hadoop102:9000");
		// 获取hdfs客户端对象
		//FileSystem fs = FileSystem.get(conf); 
		FileSystem fs = FileSystem.get(new java.net.URI("hdfs://hadoop102:9000")  , conf,  "root");
		
		// 在hdfs上创建路径
		fs.mkdirs(new Path("/client/test4"));

		// 关闭资源
		fs.close();
		
		System.out.println("done");
	}
	
	
	/**
	 * 上传文件
	 * @throws IOException
	 * @throws InterruptedException
	 * @throws URISyntaxException
	 */
	@Test
	public void testCopyFromLocalFile() throws IOException, InterruptedException, URISyntaxException {
		// 1. 获取fs对象
		FileSystem fs = FileSystem.get(new java.net.URI("hdfs://hadoop102:9000")  , new Configuration(),  "root");
		//2 执行上传API
		fs.copyFromLocalFile(new Path("D:/学习/Hadoop权威指南(第四版).pdf"), new Path("/client/test"));
		// 3 关闭资源
		fs.close();
	}
	
	/**
	 * 上传文件,并设置备份数量
	 * @throws IOException
	 * @throws InterruptedException
	 * @throws URISyntaxException
	 */
	@Test
	public void testCopyFromLocalFile2() throws IOException, InterruptedException, URISyntaxException {

			// 1 获取文件系统
			Configuration configuration = new Configuration();
			//configuration.set("dfs.replication", "2");
			FileSystem fs = FileSystem.get(new java.net.URI("hdfs://hadoop102:9000"), configuration, "root");

			// 2 上传文件
			fs.copyFromLocalFile(new Path("D:/学习/Hadoop权威指南(第四版).pdf"), new Path("/client/test3"));

			// 3 关闭资源
			fs.close();

			System.out.println("done");
	}

	/**
	 * 文件下载
	 * @throws IOException
	 * @throws InterruptedException
	 * @throws URISyntaxException
	 */
	@Test
	public void testCopyToLocalFile() throws IOException, InterruptedException, URISyntaxException{

			// 1 获取文件系统
			Configuration configuration = new Configuration();
			FileSystem fs = FileSystem.get(new java.net.URI("hdfs://hadoop102:9000"), configuration, "root");
			
			// 2 执行下载操作
			// boolean delSrc 指是否将原文件删除
			// Path src 指要下载的文件路径
			// Path dst 指将文件下载到的路径
			// boolean useRawLocalFileSystem 是否开启文件校验
			fs.copyToLocalFile(false, new Path("/client/test3/Hadoop权威指南(第四版).pdf"), new Path("d:/Hadoop权威指南(第四版).pdf"), true);
			
			// 3 关闭资源
			fs.close();
			System.out.println("done");
	}

	
	/**
	 * 文件夹删除
	 * @throws IOException
	 * @throws InterruptedException
	 * @throws URISyntaxException
	 */
	@Test
	public void testDelete() throws IOException, InterruptedException, URISyntaxException{

		// 1 获取文件系统
		Configuration configuration = new Configuration();
		FileSystem fs = FileSystem.get(new java.net.URI("hdfs://hadoop102:9000"), configuration, "root");
			
		// 2 执行删除
		fs.delete(new Path("/client/"), true);
			
		// 3 关闭资源
		fs.close();
		
		System.out.println("done");
	}
	
	/**
	 * 文件名修改
	 * @throws IOException
	 * @throws InterruptedException
	 * @throws URISyntaxException
	 */
	@Test
	public void testRename() throws IOException, InterruptedException, URISyntaxException{

		// 1 获取文件系统
		Configuration configuration = new Configuration();
		FileSystem fs = FileSystem.get(new java.net.URI("hdfs://hadoop102:9000"), configuration, "root"); 
			
		// 2 修改文件名称
		fs.rename(new Path("/README.txt"), new Path("/README222.txt"));
			
		// 3 关闭资源
		fs.close();
		
		System.out.println("done");
	}

	
	@Test
	public void testListFiles() throws IOException, InterruptedException, URISyntaxException{

		// 1获取文件系统
		Configuration configuration = new Configuration();
		FileSystem fs = FileSystem.get(new java.net.URI("hdfs://hadoop102:9000"), configuration, "root"); 
			
		// 2 获取文件详情
		RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);
			
		while(listFiles.hasNext()){
			LocatedFileStatus status = listFiles.next();
				
			// 输出详情
			// 文件名称
			System.out.println("文件名:" + status.getPath().getName());
			// 长度
			System.out.println("长度:" + status.getLen());
			// 权限
			System.out.println("权限:" + status.getPermission());
			// 分组
			System.out.println("分组:" + status.getGroup());
				
			// 获取存储的块信息
			BlockLocation[] blockLocations = status.getBlockLocations();
			System.out.println("存储的块信息:" + status.getGroup());
			for (BlockLocation blockLocation : blockLocations) {
					
				// 获取块存储的主机节点
				String[] hosts = blockLocation.getHosts();
					
				for (String host : hosts) {
					System.out.println(host);
				}
			}
				
			System.out.println("-----------班长的分割线----------");
		}

		// 3 关闭资源
		fs.close();
		
		System.out.println("done");
	}

	
	/**
	 * HDFS文件和文件夹判断
	 * @throws IOException
	 * @throws InterruptedException
	 * @throws URISyntaxException
	 */
	@Test
	public void testListStatus() throws IOException, InterruptedException, URISyntaxException{
			
		// 1 获取文件配置信息
		Configuration configuration = new Configuration();
		FileSystem fs = FileSystem.get(new java.net.URI("hdfs://hadoop102:9000"), configuration, "root");
			
		// 2 判断是文件还是文件夹
		FileStatus[] listStatus = fs.listStatus(new Path("/"));
			
		for (FileStatus fileStatus : listStatus) {
			
			// 如果是文件
			if (fileStatus.isFile()) {
					System.out.println("f:"+fileStatus.getPath().getName());
				}else {
					System.out.println("d:"+fileStatus.getPath().getName());
				}
			}
			
		// 3 关闭资源
		fs.close();
		
		System.out.println("done");
	}

	
	/**
	 * IO操作,上传文件,不使用封装好的方法
	 * @throws IOException
	 * @throws InterruptedException
	 * @throws URISyntaxException
	 */
	@Test
	public void putFileToHDFS() throws IOException, InterruptedException, URISyntaxException {

		// 1 获取文件系统
		Configuration configuration = new Configuration();
		FileSystem fs = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "root");

		// 2 创建输入流
		FileInputStream fis = new FileInputStream(new File("D:/学习/Hadoop权威指南(第四版).pdf"));

		// 3 获取输出流
		FSDataOutputStream fos = fs.create(new Path("/Hadoop权威指南(第四版).pdf"));

		// 4 流对拷
		IOUtils.copyBytes(fis, fos, configuration);

		// 5 关闭资源
		IOUtils.closeStream(fos);
		IOUtils.closeStream(fis);
	    fs.close();
	    
		System.out.println("done");
	}

	
	/**
	 * O操作,文件下载,不使用封装好的方法
	 * @throws IOException
	 * @throws InterruptedException
	 * @throws URISyntaxException
	 */
	@Test
	public void getFileFromHDFS() throws IOException, InterruptedException, URISyntaxException{

		// 1 获取文件系统
		Configuration configuration = new Configuration();
		FileSystem fs = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "root");
			
		// 2 获取输入流
		FSDataInputStream fis = fs.open(new Path("/Hadoop权威指南(第四版).pdf"));
			
		// 3 获取输出流
		FileOutputStream fos = new FileOutputStream(new File("d:/Hadoop权威指南(第四版)222.pdf"));
			
		// 4 流的对拷
		IOUtils.copyBytes(fis, fos, configuration);
			
		// 5 关闭资源
		IOUtils.closeStream(fos);
		IOUtils.closeStream(fis);
		fs.close();
		
		System.out.println("done");
	}
	
	/**
	 * 分块下载文件,第一块
	 * @throws IOException
	 * @throws InterruptedException
	 * @throws URISyntaxException
	 */
	@Test
	public void readFileSeek1() throws IOException, InterruptedException, URISyntaxException{

		// 1 获取文件系统
		Configuration configuration = new Configuration();
		FileSystem fs = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "root");
			
		// 2 获取输入流
		FSDataInputStream fis = fs.open(new Path("/hadoop-2.7.2.tar.gz"));
			
		// 3 创建输出流
		FileOutputStream fos = new FileOutputStream(new File("d:/hadoop-2.7.2.tar.gz.part1"));
			
		// 4 流的拷贝
		byte[] buf = new byte[1024];
			
		for(int i =0 ; i < 1024 * 128; i++){
			fis.read(buf);
			fos.write(buf);
		}
			
		// 5关闭资源
		IOUtils.closeStream(fis);
		IOUtils.closeStream(fos);
	    fs.close();
	    
	    System.out.println("done");
	}

	
	/**
	 * 分块下载文件,第二块
	 * @throws IOException
	 * @throws InterruptedException
	 * @throws URISyntaxException
	 */
	@Test
	public void readFileSeek2() throws IOException, InterruptedException, URISyntaxException{

		// 1 获取文件系统
		Configuration configuration = new Configuration();
		FileSystem fs = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "root");
			
		// 2 打开输入流
		FSDataInputStream fis = fs.open(new Path("/hadoop-2.7.2.tar.gz"));
			
		// 3 定位输入数据位置
		fis.seek(1024*1024*128);
			
		// 4 创建输出流
		FileOutputStream fos = new FileOutputStream(new File("d:/hadoop-2.7.2.tar.gz.part2"));
			
		// 5 流的对拷
		IOUtils.copyBytes(fis, fos, configuration);
			
		// 6 关闭资源
		IOUtils.closeStream(fis);
		IOUtils.closeStream(fos);
		System.out.println("done");
		
		/**
		 * 
		 * 在Window命令窗口中进入到目录E:\,然后执行如下命令,对数据进行合并
         * type hadoop-2.7.2.tar.gz.part2 >> hadoop-2.7.2.tar.gz.part1
         * 合并完成后,将hadoop-2.7.2.tar.gz.part1重新命名为hadoop-2.7.2.tar.gz。解压发现该tar包非常完整。
		 */
	}



}




展开阅读全文
加载中

作者的其它热门文章

打赏
0
0 收藏
分享
打赏
0 评论
0 收藏
0
分享
返回顶部
顶部
返回顶部
顶部