依赖如下:
<repositories>
<repository>
<id>cloudera</id>
<url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.0-mr1-cdh5.14.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0-cdh5.14.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.6.0-cdh5.14.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.6.0-cdh5.14.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/junit/junit -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>RELEASE</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
<!-- <verbal>true</verbal>-->
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<minimizeJar>true</minimizeJar>
</configuration>
</execution>
</executions>
</plugin>
<!-- <plugin>
<artifactId>maven-assembly-plugin </artifactId>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<archive>
<manifest>
<mainClass>cn.itcast.hadoop.db.DBToHdfs2</mainClass>
</manifest>
</archive>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>-->
</plugins>
</build>
代码如下:
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.testng.annotations.Test;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
public class HdfsTest {
@Test
/**
* 使用url连接hdfs集群(了解)
*/
public void hdfsDemo01(){
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
String url = "hdfs://node01:8020/word.txt";
InputStream inputStream = null;
FileOutputStream outputStream = null;
//打开文件输入、输出流
try{
inputStream = new URL(url).openStream();
outputStream = new FileOutputStream(new File("c:\\work\\hdfs_out.txt"));
IOUtils.copy(inputStream,outputStream);
}catch (IOException e){
e.printStackTrace();
}finally {
IOUtils.closeQuietly(inputStream);
IOUtils.closeQuietly(outputStream);
}
}
@Test
/**
* 使用文件系统访问数据(掌握)_方式1_记住这个就好
*/
public void hdfsDemo02() throws Exception {
Configuration configuration = new Configuration();
FileSystem fileSystem = FileSystem.get(new URI("hdfs://node01:8020"), configuration);
System.out.println(fileSystem);
fileSystem.close();
}
@Test
/**
* 使用文件系统访问数据(掌握)_方式2
*/
public void hdfsDemo03() throws Exception{
Configuration configuration = new Configuration();
configuration.set("fs.defaultFS","hdfs://node01:8020");
FileSystem fileSystem = FileSystem.get(new URI("/"), configuration);
System.out.println(fileSystem);
fileSystem.close();
}
@Test
/**
* 使用文件系统访问数据(掌握)_方式3
*/
public void hdfsDemo04() throws Exception{
Configuration configuration = new Configuration();
FileSystem fileSystem = FileSystem.newInstance(new URI("hdfs://node01:8020"), configuration);
System.out.println(fileSystem);
fileSystem.close();
}
@Test
/**
* 使用文件系统访问数据(掌握)_方式4
*/
public void hdfsDemo05() throws Exception{
Configuration configuration = new Configuration();
configuration.set("fs.defaultFS","hdfs://node01:8020");
FileSystem fileSystem = FileSystem.newInstance(configuration);
System.out.println(fileSystem);
fileSystem.close();
}
@Test
/**
* 递归遍历所有文件_1
*/
public void hdfsDemo06()throws Exception{
//先获得文件系统
FileSystem fileSystem = FileSystem.get(new URI("hdfs://node01:8020"), new Configuration(),"root");
//判断是文件还是目录
FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/"));
for (FileStatus fileStatus : fileStatuses) {
if (fileStatus.isDirectory()){
Path path = fileStatus.getPath();
listFiles(path,fileSystem);
}else {
System.out.println("文件路径为:"+fileStatus.getPath());
}
}
fileSystem.close();
}
public void listFiles(Path path,FileSystem fileSystem) throws IOException {
FileStatus[] fileStatuses = fileSystem.listStatus(path);
for (FileStatus fileStatus : fileStatuses) {
if (fileStatus.isDirectory()){
listFiles(fileStatus.getPath(),fileSystem);
}else
System.out.println("文件路径为:"+fileStatus.getPath());
}
}
@Test
/**
* 递归遍历所有文件_2(官方API)
*/
public void hdfsDemo07() throws Exception{
FileSystem fileSystem = FileSystem.get(new URI("hdfs://node01:8020"), new Configuration(), "root");
RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = fileSystem.listFiles(new Path("/"), true);
while (locatedFileStatusRemoteIterator.hasNext()){
LocatedFileStatus fileStatus = locatedFileStatusRemoteIterator.next();
System.out.println(fileStatus.getPath());
}
fileSystem.close();
}
@Test
/**
* 下载文件到本地(方法很多)
*/
public void hdfsDemo08() throws Exception{
FileSystem fileSystem = FileSystem.get(new URI("hdfs://node01:8020"), new Configuration(), "root");
fileSystem.copyToLocalFile(false,new Path("/word.txt"),new Path("c:\\work\\java_hdfs_01.txt"));
fileSystem.close();
}
@Test
/**
* 在hdfs上创建文件夹(同样很多种方法)
*/
public void hdfsDemo09() throws Exception{
FileSystem fileSystem = FileSystem.get(new URI("hdfs://node01:8020"), new Configuration(), "root");
//底下这种方式虽然结果也能创建,但是实际上是开了一个输出流
//fileSystem.create(new Path("/test_java"));
fileSystem.mkdirs(new Path("/test_java"));
fileSystem.close();
}
@Test
/**
* 文件上传
*/
public void hdfsDemo10() throws Exception{
FileSystem fileSystem = FileSystem.get(new URI("hdfs://node01:8020"), new Configuration(), "root");
fileSystem.copyFromLocalFile(new Path("c:\\work\\name.txt"),new Path("/name_java.txt"));
fileSystem.close();
}
@Test
public void hdfsDemo11() throws Exception{
FileSystem fileSystem = FileSystem.get(new URI("hdfs://node01:8020"), new Configuration(), "root");
//开启输出流
FSDataOutputStream outputStream = fileSystem.create(new Path("/bigFile.txt"));
//获取本地文件系统
LocalFileSystem local = FileSystem.getLocal(new Configuration());
FileStatus[] fileStatuses = local.listStatus(new Path("c:\\work\\smallFiles_hdfs_java"));
for (FileStatus fileStatus : fileStatuses) {
FSDataInputStream inputStream = local.open(fileStatus.getPath());
IOUtils.copy(inputStream,outputStream);
IOUtils.closeQuietly(inputStream);
}
IOUtils.closeQuietly(outputStream);
local.close();
fileSystem.close();
}
}