Import millions of data into the database, how to import efficiently?
Here's an efficient way to do this:
1. Import the database file (Db.csv) into the DataTable:
<summary>////Read data from CSV file into DataTable///</summary>//<param name= "FileName" > CSV file path </param>////<returns> returns the datatable</returns> public static DataTable Opencs that read the CSV data V (String filePath) {Encoding Encoding = encoding.getencoding ("Utf-8");//encoding.ascii;//D Atatable dt = new DataTable (); FileStream fs = new FileStream (FilePath, System.IO.FileMode.Open, System.IO.FileAccess.Read); StreamReader sr = new StreamReader (FS, Encoding.UTF8); StreamReader sr = new StreamReader (fs, encoding); String filecontent = Sr. ReadToEnd (); encoding = Sr. currentencoding; Records a row of records per read string strLine = ""; Record the contents of each field in each row of records string[] aryline = null; string[] Tablehead = null; Number of marked columns int columnCount = 0; Indicates whether the first line of the read is bool IsFirst = true; Read the data in the CSV row by line while (StrLine = Sr. ReadLine ()) = null) {//strline = Common.convertstringutf8 (strLine, encoding); StrLine = Common.convertstringutf8 (strLine); if (IsFirst = = true) {Tablehead = Strline.split (', '); IsFirst = false; ColumnCount = Tablehead.length; Create column for (int i = 0; i < ColumnCount; i++) {DataColumn D c = new DataColumn (Tablehead[i]); Dt. Columns.Add (DC); }}} else {if (! String.IsNullOrEmpty (StrLine)) {aryline = Strline.split (', '); DataRow dr = dt. NewRow (); for (int j = 0; J < ColumnCount; J + +) {Dr[j] = ARYLINE[J]; } dt. Rows.Add (DR); }}} if (Aryline! = null && aryline.length > 0) { Dt. Defaultview.sort = Tablehead[0] + "+" ASC "; } Sr. Close (); Fs. Close (); return DT; } }
2. Save the database to the database:
public static void Tablevaluedtodb (DataTable dt) {SqlConnection sqlconn = new SqlConnection ( configurationmanager.connectionstrings["DefaultConnection"]. ConnectionString); Const string tsqlstatement = "INSERT into table (col1,col2)" + "Select Nc.col1,nc.col2" + "From @NewBulkTestTvp as NC"; SqlCommand cmd = new SqlCommand (tsqlstatement, sqlconn); SqlParameter catparam = cmd. Parameters.addwithvalue ("@NewBulkTestTvp", DT); Catparam.sqldbtype = sqldbtype.structured; The name of the table-valued parameter is Bulkudt, which is in the SQL that created the test environment above. Catparam.typename = "dbo." Bulkudt "; try {sqlconn.open (); if (dt! = null && dt. Rows.Count! = 0) {cmd. ExecuteNonQuery (); }} catch (Exception ex) {throw ex; } finally { Sqlconn.close (); } }
3. Create table-valued parameter types in the database:
Create type dbo. Bulkudt (col1 bigint,col2 nvarchar (10));
4. Start importing data:
Stopwatch SW = new Stopwatch (); String filePath = @ "C:\DB.csv"; DataTable dt = csvfilehelper.opencsv (FilePath); Sw. Start (); TABLEVALUEDTODB (DT); Sw. Stop (); Trace.WriteLine (String. Format ("Elapsed time is {0} Milliseconds", SW. Elapsedmilliseconds));
Make an advertisement: Need the iphone 5S/5C/5 tempered glass film, protective case/protection shell friend, on http://kener.taobao.com, contact customer service to peer of the program members of a preferential price Oh!
Transferred from: http://www.dengyukeji.com/thread-564-1-1.html