-- Define the loop variable declare @ rows int, @ rows_limit int, @ rows_batch int, @ rows_count int; -- set the Loop Variable Parameter Select @ rows = 0, @ rows_limit = 9517792, @ rows_batch = 1000, @ rows_count = @ rows_batch; -- create a temporary table variable and record all primary key values of the operand data if object_id ('tempdb .. # temp001 ') is not null begin drop table # temp001 end -- create a temporary table variable and record the primary key value if object_id ('tempdb .. # temp002 ') is not null begin drop table # temp002 endcreate table # temp001 (transactionnumber int primary key,); Create Table # temp002 (transactionnumber int primary key ,); -- insert into # temp001 select transactionnumber from table01 while @ rows_count = @ rows_batch and @ rows <@ rows_limit begin; -- insert into @ table01 A output inserted. transactionnumber into # temp002 select top (@ row_batch) from @ table02 inner join # temp001 C on. transactionnumber = C. transactionnumberwhere indate between @ fromdate and @ enddate -- delete each batch of migrated data from # temp001 A where transactionnumber exists (select top 1 1 from # temp002 B where. transactionnumber = B. transactionnumber) Select @ row_count = @ rowcount, @ rows = @ rows + @ row_count; -- truncate table # temp002 waitfor delay '00: 00: 10'; end; when processing large data volumes, You can process data in batches according to the current method.