HDFS 업로드 다운로드 예시

3850 단어 자바hadoophdfs
일반 판
package club.drguo.hadoop.hdfs;

import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.Test;

public class HdfsClient {
	private FileSystem fs = null;

	@Before
	public void getFS() throws IOException {
		//       
		Configuration conf = new Configuration();
		//        HDFS
		conf.set("fs.default.name", "hdfs://localhost:9000");
		//      
		conf.set("dfs.replication", "3");
		//   HDFS   
		fs = FileSystem.get(conf);
	}

	//   
	@Test
	public void upload() throws IOException {
		//      
		FileInputStream fileInputStream = new FileInputStream("/home/guo/test.py");
		//      
		Path destFile = new Path("hdfs://localhost:9000/up.py");
		FSDataOutputStream dataOutputStream = fs.create(destFile);
		//        
		IOUtils.copy(fileInputStream, dataOutputStream);
	}

	//   
	@Test
	public void download() throws IllegalArgumentException, IOException {
		//      
		FSDataInputStream dataInputStream = fs.open(new Path("hdfs://localhost:9000/up.py"));
		//      
		FileOutputStream fileOutputStream = new FileOutputStream("/home/guo/down.py");
		//        
		IOUtils.copy(dataInputStream, fileOutputStream);
	}

}

일반 판
package club.drguo.hadoop.hdfs;

import java.io.FileNotFoundException;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.Before;
import org.junit.Test;

public class HdfsClientNormal {
	private FileSystem fs = null;

	@Before
	public void getFS() throws IOException {
		//       
		Configuration conf = new Configuration();
		//        HDFS
		conf.set("fs.default.name", "hdfs://localhost:9000");
		//      
		conf.set("dfs.replication", "3");
		//   HDFS   
		fs = FileSystem.get(conf);
	}

	//   
	@Test
	public void upload() throws IOException {
		fs.copyFromLocalFile(new Path("/home/guo/test.py"), new Path("/up.py"));
	}
	//  
	@Test
	public void download() throws IllegalArgumentException, IOException{
		fs.copyToLocalFile(new Path("/up.py"), new Path("/home/guo/down.java"));
	}
	//     
	@Test
	public void mkDir() throws IllegalArgumentException, IOException{
		fs.mkdirs(new Path("/test"));
	}
	//  
	@Test
	public void removeFile() throws IllegalArgumentException, IOException{
		fs.delete(new Path("/test"), true);
	}
	//     
	@Test
	public void rename() throws IllegalArgumentException, IOException{
		fs.rename(new Path("/Up.py"), new Path("/up.java"));
	}
	//  
	@Test
	public void queryList() throws FileNotFoundException, IllegalArgumentException, IOException{
		//            
		RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);//     ,    
		while(listFiles.hasNext()){
			LocatedFileStatus fileStatus = listFiles.next();
			System.out.println(fileStatus.getPath().getName());
		}
		System.out.println("--------------------------------------------------------------");
		//              
		FileStatus[] listStatus = fs.listStatus(new Path("/"));
		for(FileStatus fileStatus : listStatus){
			System.out.println(fileStatus.getPath().getName()+"----"+(fileStatus.isDirectory()?"   ":"  "));
		}
	}
}

좋은 웹페이지 즐겨찾기