//log4j配置文件
### set log levels ### log4j.rootLogger=INFO, stdout, file, flume log4j.logger.per.flume=INFO ### flume ### log4j.appender.flume=org.apache.flume.clients.log4jappender.Log4jAppender log4j.appender.flume.Encoding=UTF-8 log4j.appender.flume.layout=org.apache.log4j.PatternLayout log4j.appender.flume.Hostname=master log4j.appender.flume.Port=44446 ### stdout ### log4j.appender.stdout=org.apache.log4j.ConsoleAppender log4j.appender.stdout.Encoding=UTF-8 log4j.appender.stdout.Threshold=INFO log4j.appender.stdout.Target=System.out log4j.appender.stdout.layout=org.apache.log4j.PatternLayout log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %c{1} [%p] %m%n ### file ### log4j.appender.file=org.apache.log4j.DailyRollingFileAppender log4j.appender.file.Encoding=UTF-8 log4j.appender.file.Threshold=INFO log4j.appender.file.File=./logs/tracker/tracker.log log4j.appender.file.Append=true log4j.appender.file.DatePattern='.'yyyy-MM-dd log4j.appender.file.layout=org.apache.log4j.PatternLayout log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %c{1} [%p] %
//实现代码
import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import org.apache.log4j.Logger; public class ExportData { private static Logger log = Logger.getLogger(ExportData.class); public static final String url = "jdbc:mysql://master:3306/waste?useUnicode=true&characterEncoding=utf8"; public static final String name = "com.mysql.jdbc.Driver"; public static final String user = "hadoop"; public static final String password = "hadoop"; public static Connection conn = null; public static PreparedStatement pst = null; public static void main(String[] args) { String sql = "SELECT " + "o.order_no '订单编号'," + "s.scrap_name '废品名称'," + "d0.dict_text '废品类别'," + "s.unit '回收单位'," + "s.price '回收价格(分/千克)'," + "o.amount '回收数量'," + "o.money '回收金额'," + "d3.dict_text '订单状态'," + "ow.id '业主ID'," + "ow.name '业主姓名'," + "ow.idCard '业主身份证'," + "d1.dict_text '业主性别'," + "ow.tel '业主电话'," + "ow.address '业主地址'," + "sc.id '回收员ID'," + "sc.name '回收员姓名'," + "d2.dict_text '回收员性别'," + "sc.idCard '回收员身份证'," + "sc.tel '回收员电话'," + "o.create_time '订单创建时间'" + " FROM reclamation_order o" + " LEFT JOIN scrap s" + " ON s.scrap_code = o.scrap_code" + " LEFT JOIN sys_dict d0" + " ON s.category = d0.dict_option" + " LEFT JOIN owner ow" + " ON ow.id = o.owner_id" + " LEFT JOIN sys_dict d1" + " ON ow.sex = d1.dict_option" + " LEFT JOIN scrap_collector sc" + " ON sc.id = o.sc_id" + " LEFT JOIN sys_dict d2" + " ON sc.sex = d2.dict_option" + " LEFT JOIN sys_dict d3" + " ON o.sc_status = d3.dict_option"; try { for (int i = 0; i < 101; i++) { String str = ""; try { Class.forName(name);// 指定连接类型 conn = DriverManager.getConnection(url, user, password);// 获取连接 } catch (Exception e) { e.printStackTrace(); } pst = conn.prepareStatement(sql + " LIMIT " + i * 1000 + ",1000"); ResultSet rs = pst.executeQuery(); int count = 0; while (rs.next()) { str = rs.getString(1) + "\t" + rs.getString(2) + "\t" + rs.getString(3) + "\t" + rs.getString(4) + "\t" + rs.getString(5) + "\t" + rs.getString(6) + "\t" + rs.getString(7) + "\t" + rs.getString(8) + "\t" + rs.getString(9) + "\t" + rs.getString(10) + "\t" + rs.getString(11) + "\t" + rs.getString(12) + "\t" + rs.getString(13) + "\t" + rs.getString(14) + "\t" + rs.getString(15) + "\t" + rs.getString(16) + "\t" + rs.getString(17) + "\t" + rs.getString(18) + "\t" + rs.getString(19) + "\t" + rs.getString(20) ; log.info(str); count++; } // method2("D:\\大数据\\小区业主服务平台大数据需求\\待分析数据\\reclamation_order.txt",str); System.out.println("==============count:"+count); pst.close(); conn.close(); } } catch (Exception e) { e.printStackTrace(); } }}