使用java写HDFS的操作代码

原创
2017/03/08 17:54
阅读数 89

学习hadoop,学了hadoop的shell命令,诸如:hadoop fs -put | -mkdir | -cat | -ls 等等等等。这里不再说。这里主要说说,使用java API来操作HDFS。

 一>>>配置JDK
     略

 二>>>配置maven

    配置maven的环境变量和配置eclipse的maven插件 略
    新建一个maven工程:
    配置pom.xml文件(如下):
<!-- 设定仓库 -->
	<repositories>
		<repository>
			<id>central-repos</id>
			<name>Central Repository</name>
			<url>http://repo.maven.apache.org/maven2</url>
			<releases>
				<enabled>true</enabled>
			</releases>
			<snapshots>
				<enabled>false</enabled>
			</snapshots>
		</repository>

		<repository>
			<id>central-repos2</id>
			<name>Central Repository 2</name>
			<url>http://repo1.maven.org/maven2/</url>
			<releases>
				<enabled>true</enabled>
			</releases>
			<snapshots>
				<enabled>false</enabled>
			</snapshots>
		</repository>

		<repository>
			<id>springsource-repos</id>
			<name>SpringSource Repository</name>
			<url>http://repo.springsource.org/libs-milestone-local</url>
			<releases>
				<enabled>true</enabled>
			</releases>
			<snapshots>
				<enabled>false</enabled>
			</snapshots>
		</repository>
		<repository>
			<name>osc Repository</name>
			<id>osc_thirdparty</id>
			<url>http://maven.oschina.net/content/repositories/thirdparty/</url>
		</repository>
		<repository>
			<id>bitwalker.user-agent-utils.mvn.repo</id>
			<url>https://raw.github.com/HaraldWalker/user-agent-utils/mvn-repo/</url>
			<!-- use snapshot version -->
			<snapshots>
				<updatePolicy>always</updatePolicy>
			</snapshots>
		</repository>
	</repositories>
	  
<dependencies>
	<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs -->
	<dependency>
	    <groupId>org.apache.hadoop</groupId>
	    <artifactId>hadoop-hdfs</artifactId>
	    <version>2.6.4</version>
	</dependency>

	<dependency>
	    <groupId>org.apache.hadoop</groupId>
	    <artifactId>hadoop-common</artifactId>
	    <version>2.6.4</version>
	</dependency>
	
	<dependency>
	    <groupId>org.apache.hadoop</groupId>
	    <artifactId>hadoop-client</artifactId>
	    <version>2.6.4</version>
	</dependency>	
		
	<!-- https://mvnrepository.com/artifact/junit/junit -->
	<dependency>
	    <groupId>junit</groupId>
	    <artifactId>junit</artifactId>
	    <version>4.8.1</version>
	</dependency>
	 
	<dependency>
		<groupId>jdk.tools</groupId>
		<artifactId>jdk.tools</artifactId>
		<version>1.7</version>
		<scope>system</scope>
		<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
	</dependency>  
</dependencies>

写java代码 如下:下面代码实现了一些基本的对HDFS的操作:

public class HDFSDemo {
	static String uri="hdfs://192.168.1.101:8020";
	static FileSystem fs;
	/**
	 * 获取FileSystem
	 * @return
	 */
	public static FileSystem getFs(){
		try {
			fs = FileSystem.get(new URI(uri),new Configuration(),"root");
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} 
		return fs;
	}
	/**
	 * 创建目录
	 * @param path
	 */
	public static void mkdir(String path){
		try {
			fs = getFs(); 
			fs.mkdirs(new Path(path));
			fs.close();
		} catch (Exception e) {
			e.printStackTrace();
		} 
		
	}
	/**
	 * 查看目录下文件(可能不包含目录)
	 * @param path
	 */
	public static void show(String path){
		fs = getFs();
		try {
			RemoteIterator<LocatedFileStatus> ri = fs.listFiles(new Path(path), true);
			
			while (ri.hasNext()) {
				LocatedFileStatus lfs = ri.next();
				Path fs = lfs.getPath();
				System.out.println(fs.getName());
				
			}
			fs.close();
		} catch (Exception e) {
			// TODO: handle exception
			e.printStackTrace();
		}
	}
	/**
	 * 查看所有目录 并得到当前目录
	 */
	public static String showAll(String path){
		fs = getFs();
		try {
			FileStatus[] fls = fs.listStatus(new Path(path));
			for (FileStatus file : fls) {
				System.out.println(file.getPath().getName());
			}
			fs.close();
		} catch (Exception e) {
			e.printStackTrace();
		}
		return path;
	}
	
	
	/**
	 * 删除文件
	 * @param args
	 */
	public static void delete(String path){
		fs = getFs();
		try {
			fs.delete(new Path(path),true);
			fs.close();
		} catch (Exception e) {
			e.printStackTrace();
		} 
	}
	/**
	 * 下载文件
	 * @param args
	 */
	@Test 
	public  void download(){
		
		fs = getFs();
		try {
			fs.copyToLocalFile(new Path("/jdk-7u75-linux-x64.gz"),new Path("E:\\12.gz"));
			fs.close();
		} catch (Exception e) {
			e.printStackTrace();
		}
//		fs = getFs();
//		try {
//			FSDataInputStream fip = fs.open(new Path(path1)); //获取一个输入流,HDFS文件的输入流
//			OutputStream ops = new FileOutputStream(path2);
//			IOUtils.copyBytes(fip,ops,4096,true);
//		} catch (Exception e) {
//			e.printStackTrace();
//		}
//		
		
		
		
	}
	/**
	 * 上传文件
	 * @param args
	 */
	public static void upload(String path1,String path2){
		fs = getFs();
		try {
			fs.copyFromLocalFile(new Path(path1),new Path(path2));
			fs.close();
		} catch (Exception e) {
			e.printStackTrace();
		}
		
	}

大概就是这些。 感谢观看!

展开阅读全文
打赏
0
0 收藏
分享
加载中
更多评论
打赏
0 评论
0 收藏
0
分享
返回顶部
顶部