HDFS的Java编程API:
需求:
创建Maven工程 pom.xml引入hadoop所需jar包
编写测试类连通Hadoop
读取a.txt至D盘根目录
上传pom.xml至hdfs根目录,解决上传权限问题
编写连通Hadoop的第二种方法
编写创建hdfs目录/a/b
编写删除hdfs目录/a/b
代码如下:
package com.etc;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URL;
import org.apache.commons.math3.linear.FieldDecompositionSolver;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
import org.mockito.internal.verification.api.VerificationData;
public class HDFSAPI {
private FileSystem fileSystem = null;
//*****************************从本地上传文件到hdfs
@Before
public void init() throws Exception{
//跟HDFS建立连接,要知道Namenode地址即可
Configuration conf = new Configuration();
//System.setProperty("HADOOP_USER_NAME", "root");
//解决上传权限问题
//conf.set("fs.defaultFS","hdfs://192.168.43.73:9000" );
//fileSystem = FileSystem.get(conf);
//简写
fileSystem = FileSystem.get(new URI("hdfs://192.168.43.73:9000"), conf, "root");
}
//******************从本地上传文件到服务器
@Test
public void testUpload() throws Exception{
//跟HDFS建立连接
//打开本地文件系统的一个文件作为输入流
InputStream in = new FileInputStream("d://b.txt");
//使用hdfs的fileSystem打开一个输出流
FSDataOutputStream out = fileSystem.create(new Path("/b.txt"));
//从本地文件的输入流写到输出流
IOUtils.copyBytes(in, out, 1024,true);
//关闭
fileSystem.close();
}
//******************删除文件夹
@Test
public void testDel() throws Exception{
boolean flag = fileSystem.delete(new Path("/a"), true);
System.out.println(flag);
}
//*******************创建文件夹
@Test
public void testMkdir() throws IOException{
fileSystem.mkdirs(new Path("/a/b"));
//关闭fileSystem连接
fileSystem.close();
}
//***********************************从服务器上下载文件到本地
public static void main(String[] args) throws Exception {
//跟HDFS建立连接,要知道nomenode的地址即可
Configuration conf = new Configuration();
conf.set("fs.defaultFS","hdfs://192.168.43.73:9000");
FileSystem fileSystem = FileSystem.get(conf);
//打开输入流
InputStream in = fileSystem.open(new Path("/a.txt"));
//打开一个本地的输出流
OutputStream out = new FileOutputStream("d://123.txt");
//拷贝从输入流到输出流
IOUtils.copyBytes(in, out, 1024,true);
//关闭流
out.close();
in.close();
fileSystem.close();
}
}
package com.etc; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.net.URL; import org.apache.commons.math3.linear.FieldDecompositionSolver; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.junit.Before; import org.junit.Test; import org.mockito.internal.verification.api.VerificationData; public class HDFSAPI { private FileSystem fileSystem = null; //*****************************从本地上传文件到hdfs @Before public void init() throws Exception{ //跟HDFS建立连接,要知道Namenode地址即可 Configuration conf = new Configuration(); //System.setProperty("HADOOP_USER_NAME", "root"); //解决上传权限问题 //conf.set("fs.defaultFS","hdfs://192.168.43.73:9000" ); //fileSystem = FileSystem.get(conf); //简写 fileSystem = FileSystem.get(new URI("hdfs://192.168.43.73:9000"), conf, "root"); } //******************从本地上传文件到服务器 @Test public void testUpload() throws Exception{ //跟HDFS建立连接 //打开本地文件系统的一个文件作为输入流 InputStream in = new FileInputStream("d://b.txt"); //使用hdfs的fileSystem打开一个输出流 FSDataOutputStream out = fileSystem.create(new Path("/b.txt")); //从本地文件的输入流写到输出流 IOUtils.copyBytes(in, out, 1024,true); //关闭 fileSystem.close(); } //******************删除文件夹 @Test public void testDel() throws Exception{ boolean flag = fileSystem.delete(new Path("/a"), true); System.out.println(flag); } //*******************创建文件夹 @Test public void testMkdir() throws IOException{ fileSystem.mkdirs(new Path("/a/b")); //关闭fileSystem连接 fileSystem.close(); } //***********************************从服务器上下载文件到本地 public static void main(String[] args) throws Exception { //跟HDFS建立连接,要知道nomenode的地址即可 Configuration conf = new Configuration(); conf.set("fs.defaultFS","hdfs://192.168.43.73:9000"); FileSystem fileSystem = FileSystem.get(conf); //打开输入流 InputStream in = fileSystem.open(new Path("/a.txt")); //打开一个本地的输出流 OutputStream out = new FileOutputStream("d://123.txt"); //拷贝从输入流到输出流 IOUtils.copyBytes(in, out, 1024,true); //关闭流 out.close(); in.close(); fileSystem.close(); } }