标签:
2015年12月,XX项目中需要做一个数据导出功能,当时所有页面的到处功能均已经实现,但有个页面数据量太大,导出过程中导出页面直接卡死。不得已我准备选用ADO.NET来重新完成这个功能,因为考虑到越偏向底层操作速度越快这个逻辑,我选用从SqlCommand中直接读取数据,每两万条数据写一次文件,避免一次写入过多页面直接卡死,最后测试可以导出25G的数据,满足系统需求。
using System; using System.Data; using System.Data.SqlClient; public class ADO_NET { public ActionResult ExportData() { string sAbsolutePath = "xxx"; string FileName = string.Format("AQ_{0}.csv", DateTime.Now.ToString("yyyyMMddHHmmss")); try { FinCapDbContext db = DbContextFactory.GetCurrentContext(); SqlConnection conn = new SqlConnection(db.CurrentConnectionString); conn.Open(); SqlCommand cmd = new SqlCommand("SQL Statement", conn); cmd.CommandTimeout = 1200; SqlDataReader sdr = cmd.ExecuteReader(); StreamWriter sw = new StreamWriter(sAbsolutePath + "\\" + FileName, false, Encoding.GetEncoding("GB2312")); StringBuilder sb = new StringBuilder(); int k = 0; for (int m = 0; m < sdr.FieldCount; m++) { sb.Append(sdr.GetName(m) + ","); } sb.Append(Environment.NewLine); while (sdr.Read()) { k++; for (int n = 0; n < sdr.FieldCount; n++) { sb.Append(sdr[n] + ","); } sb.Append(Environment.NewLine); if (k > 20000) { k = 0; sw.Write(sb.ToString()); sb.Length = 0; } } if (k <= 20000) { sw.Write(sb.ToString()); } sw.Flush(); sw.Close(); conn.Close(); return File(sAbsolutePath + "\\" + FileName, "application/zip-x-compressed", FileName); } catch { return File("xxx"); } } }
标签:
原文地址:http://www.cnblogs.com/sccd/p/5466686.html