現在寫一個100M的文件大概要4~5分鐘
因為經常要寫超過100萬的數據,所以這速度略顯不足,求大神看看,哪里還能優化,多線程的就先別說了,這個就是多線程中的一個線程
* @author liuhongtian * */public class ResultFileTools {
private static Logger log = Logger.getLogger(ResultFileTools.class);
/** * 根據查詢結果集生成結果文件內容 * * @param rs * 查詢結果集 * @param fileFullName * 結果文件名稱(含路徑) * @param columnNames * 查詢字段名稱 * @param properties * 查詢屬性 * @return 成功或失敗 */ public static boolean writeFile(ResultSet rs, String fileFullName, String[] columnNames, String[] propertys) { Handler baseHandler = new BaseHandler(); Handler calcHandler = new CalcHandler(baseHandler); Handler specialHandler = new SpecialHandler(calcHandler);
File file = new File(fileFullName);
// 創建文件 try (BufferedWriter bos = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "gbk"));) { // file.createNewFile();
// 寫入文件頭 StringBuffer hBuffer = new StringBuffer(); for (String string : columnNames) { hBuffer.append("\"" + string + "\","); } String cols = hBuffer.toString().substring(0, hBuffer.toString().length() - 1); bos.write(cols); bos.newLine(); bos.flush();
// 寫入查詢結果記錄 try { int i = 0 ; while (rs.next()) { StringBuffer buffer = new StringBuffer(); if(i!=0){ buffer.append("\n"); } for (String property : propertys) { String s = specialHandler.toHandle(property, rs); buffer.append("\"" + s + "\","); } bos.write(buffer.toString().substring(0, buffer.toString().length()-1)); buffer = null; i++; } } catch (SQLException e) { log.error("result set process faild!", e); return false; } catch (IOException e) { log.error("record write faild!", e); return false; } } catch (IOException e1) { log.error("open file faild!", e1); return false; }
return true; }
}
添加回答
舉報
0/150
提交
取消