版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/eieiei438/article/details/82112847
Spring配置Hadoop
核心
- 将Hadoop中的FileSystem的创建通过Spring的依赖注入的方式进行创建
- 其他的操作与Java操作Hadoop的API方式是一致的
项目工程
- Maven工程
Maven的setting.xml文件配置
。。。
<mirror>
<id>nexus</id>
<mirrorOf>*,!cloudera</mirrorOf>
<url>http://repo1.maven.org/maven2/</url>
</mirror>
。。。
pom配置
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.peng</groupId>
<artifactId>hdfstest</artifactId>
<version>1.0-SNAPSHOT</version>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
<!--配置下载的仓库-->
<repositories>
<repository>
<id>repo</id>
<url>http://repo1.maven.org/maven2/</url>
</repository>
<repository>
<id>cloudera</id>
<url>https://repository.cloudera.com/content/repositories/releases/</url>
</repository>
</repositories>
<!--配置依赖【注:源pom文件有冲突的依赖配置进行手动删除】-->
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.0-cdh5.7.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.10</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
<version>6.1.26</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
<scope>test</scope>
</dependency>
<!--spring hadoop start-->
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-hadoop</artifactId>
<version>2.5.0.RELEASE</version>
</dependency>
<!--spring hadoop end-->
</dependencies>
</project>
配置文件【resources下】
- application.properties【配置文件,映射键值对】
spring.hadoop.fsUri=hdfs://centos00:8020
-
beans.xml【依赖注入的功能】
<?xml version="1.0" encoding="UTF-8"?> <beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:hdp="http://www.springframework.org/schema/hadoop" xmlns:context="http://www.springframework.org/schema/context" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd"> <!--加载配置文件--> <context:property-placeholder location="application.properties"/> <!--hadoop的配置--> <hdp:configuration id="hadoopConfiguration"> fs.defaultFS=${spring.hadoop.fsUri} </hdp:configuration> <!--文件系统的配置--> <hdp:file-system id="fileSystem" configuration-ref="hadoopConfiguration" user="root"></hdp:file-system> </beans>
测试程序
-
SpringHadoopTest.java
package com.peng; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; public class SpringHadoopTest { private ApplicationContext ctx; FileSystem fileSystem; @Test public void testMain() throws Exception { fileSystem.mkdirs(new Path("/springfile")); } @Before public void setUp() throws Exception { System.out.println("==================start======================"); ctx = new ClassPathXmlApplicationContext("beans.xml"); fileSystem = (FileSystem) ctx.getBean("fileSystem"); } @After public void clearTail() throws Exception { ctx = null; fileSystem.close(); System.out.println("====================end======================"); } }