Hadoop增删改查(Java)

需要的jar包在hadoop里都可以找到,下面的例子中,至少需要这些jar包:

commons-cli-1.2.jar
commons-collections-3.2.1.jar
commons-configuration-1.6.jar
commons-io-2.4.jar
commons-lang-2.6.jar
commons-logging-1.1.3.jar
guava-11.0.2.jar
hadoop-auth-2.7.1.jar
hadoop-common-2.7.1.jar
hadoop-hdfs-2.7.1.jar
htrace-core-3.1.0-incubating.jar
log4j-1.2.17.jar
protobuf-java-2.5.0.jar
servlet-api.jar
slf4j-api-1.7.10.jar
slf4j-log4j12-1.7.10.jar

代码如下:

package com.neohope.hadoop.test;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class HDFSTest {

	static Configuration hdfsConfig;
	static {
		hdfsConfig = new Configuration();
		hdfsConfig.addResource(new Path("etc/hadoop/core-site.xml"));
		hdfsConfig.addResource(new Path("etc/hadoop/hdfs-site.xml"));
	}

	// 创建文件夹
	public static void createDirectory(String dirPath) throws IOException {
		FileSystem fs = FileSystem.get(hdfsConfig);
		Path p = new Path(dirPath);
		try {
			fs.mkdirs(p);
		} finally {
			fs.close();
		}
	}

	// 删除文件夹
	public static void deleteDirectory(String dirPath) throws IOException {
		FileSystem fs = FileSystem.get(hdfsConfig);
		Path p = new Path(dirPath);
		try {
			fs.deleteOnExit(p);
		} finally {
			fs.close();
		}
	}

	// 重命名文件夹
	public static void renameDirectory(String oldDirPath, String newDirPath)
			throws IOException {
		renameFile(oldDirPath, newDirPath);
	}

	// 枚举文件
	public static void listFiles(String dirPath) throws IOException {
		FileSystem hdfs = FileSystem.get(hdfsConfig);
		Path listf = new Path(dirPath);
		try {
			FileStatus statuslist[] = hdfs.listStatus(listf);
			for (FileStatus status : statuslist) {
				System.out.println(status.getPath().toString());
			}
		} finally {
			hdfs.close();
		}
	}

	// 新建文件
	public static void createFile(String filePath) throws IOException {
		FileSystem fs = FileSystem.get(hdfsConfig);
		Path p = new Path(filePath);
		try {
			fs.createNewFile(p);
		} finally {
			fs.close();
		}
	}

	// 删除文件
	public static void deleteFile(String filePath) throws IOException {
		FileSystem fs = FileSystem.get(hdfsConfig);
		Path p = new Path(filePath);
		try {
			fs.deleteOnExit(p);
		} finally {
			fs.close();
		}
	}

	// 重命名文件
	public static void renameFile(String oldFilePath, String newFilePath)
			throws IOException {
		FileSystem fs = FileSystem.get(hdfsConfig);
		Path oldPath = new Path(oldFilePath);
		Path newPath = new Path(newFilePath);
		try {
			fs.rename(oldPath, newPath);
		} finally {
			fs.close();
		}
	}

	// 上传文件
	public static void putFile(String locaPath, String hdfsPath)
			throws IOException {
		FileSystem fs = FileSystem.get(hdfsConfig);
		Path src = new Path(locaPath);
		Path dst = new Path(hdfsPath);
		try {
			fs.copyFromLocalFile(src, dst);
		} finally {
			fs.close();
		}
	}

	// 取回文件
	public static void getFile(String hdfsPath, String locaPath)
			throws IOException {
		FileSystem fs = FileSystem.get(hdfsConfig);
		Path src = new Path(hdfsPath);
		Path dst = new Path(locaPath);
		try {
			fs.copyToLocalFile(false, src, dst, true);
		} finally {
			fs.close();
		}
	}

	// 读取文件
	public static void readFile(String hdfsPath) throws IOException {
		FileSystem hdfs = FileSystem.get(hdfsConfig);
		Path filePath = new Path(hdfsPath);

		InputStream in = null;
		BufferedReader buff = null;
		try {
			in = hdfs.open(filePath);
			buff = new BufferedReader(new InputStreamReader(in));
			String str = null;
			while ((str = buff.readLine()) != null) {
				System.out.println(str);
			}
		} finally {
			buff.close();
			in.close();
			hdfs.close();
		}
	}

	public static void main(String[] args) throws IOException {
		System.setProperty("HADOOP_USER_NAME", "hadoop");
		// createDirectory("hdfs://hadoop-master:9000/usr");
		// createDirectory("hdfs://hadoop-master:9000/usr/hansen");
		// createDirectory("hdfs://hadoop-master:9000/usr/hansen/test");
		// renameDirectory("hdfs://hadoop-master:9000/usr/hansen/test","hdfs://hadoop-master:9000/usr/hansen/test01");
		// createFile("hdfs://hadoop-master:9000/usr/hansen/test01/hello.txt");
		// renameFile("hdfs://hadoop-master:9000/usr/hansen/test01/hello.txt","hdfs://hadoop-master:9000/usr/hansen/test01/hello01.txt");
		// putFile("hello.txt","hdfs://hadoop-master:9000/usr/hansen/test01/hello02.txt");
		// getFile("hdfs://hadoop-master:9000/usr/hansen/test01/hello02.txt","hello02.txt");
		// readFile("hdfs://hadoop-master:9000/usr/hansen/test01/hello02.txt");
		listFiles("hdfs://hadoop-master:9000/usr/hansen/test01/");
	}

}

Leave a Reply

Your email address will not be published. Required fields are marked *

*