SQL generates 1000 million data and SQL generates million data
Declare @ d datetimeset @ d = getdate () select * from dbo. chunkSegmentMappingselect [statement execution time (MS)] = datediff (MS, @ d, getdate () USE tempdbGOCREATE TABLE tb (id char (8 )) create unique index IX_tb ON tb (id) WITH IGNORE_DUP_KEY -- ignore duplicate record index godeclare @ dt datetimeSET @ dt = GETDATE () set nocount offdeclare @ row intSET @ row = 1000000 -- the total number of records is WWHILE @ row> 0 begin raiserror ('need % d rows ', 10, 1, @ row) with nowait set rowcount @ row INSERT tb SELECT id = RIGHT (100000000 + CONVERT (bigint, ABS (CHECKSUM (NEWID (), 8) -- FROM syscolumns c1, syscolumns c2 -- set nocount on 49 Second FROM syscolumns c1, sysobjects o -- set nocount on 47 Second SET @ row = @ row-@ ROWCOUNT -- SET the number of records to be inserted in the next loop ENDSELECT BeginDate = @ dt, EndDate = GETDATE (), second = DATEDIFF (Second, @ dt, GETDATE () goselect count (*) FROM tbGOSELECT * FROM tbDROP TABLE tb