一,加载环境
二,创建输入输出流
三,关闭流
package hdfs;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
public class FiletoHdfs {
Configuration configuration=null;
FileSystem fs=null;
@Before
public void init() throws IOException, InterruptedException, URISyntaxException{
Configuration configuration=new Configuration();
configuration.set("dfs.replication", "2");
configuration.set("dfs.blocksize", "128m");
FileSystem fs=FileSystem.get(new URI("hdfs://192.168.146.137:9000"),
configuration, "root");
}
//上传文件
@Test
public void fileToHdsf() throws IllegalArgumentException, IOException{
//输入流
FileInputStream fileInputStream=new FileInputStream(new File("F:/test/file1/b.txt"));
//输出流
FSDataOutputStream fsDataOutputStream = fs.create(new Path("/file1.txt"));
//复制流
IOUtils.copyBytes(fileInputStream, fsDataOutputStream, configuration);
//关闭流
IOUtils.closeStream(fileInputStream);
IOUtils.closeStream(fsDataOutputStream);
//fs.close();
}
@Test
public void filefromHdfs() throws IllegalArgumentException, IOException{
FSDataInputStream fsDataInputStream = fs.open(new Path("hdfs.txt"));
FileOutputStream fileOutputStream=new FileOutputStream("f:/file.txt");
IOUtils.copyBytes(fsDataInputStream, fileOutputStream, configuration);
IOUtils.closeStream(fsDataInputStream);
IOUtils.closeStream(fileOutputStream);
}
}