Python implements simulated segmentation of large files and multi-thread processing,
This example describes how to simulate and separate large files and multi-thread processing in Python. We will share this with you for your reference. The details are as follows:
#! /Usr/bin/env python # -- * -- coding: UTF-8 -- * -- from random import randintfrom time import ctimefrom time import sleepimport queueimport threadingclass MyTask (object ): "specific task class" def _ init _ (self, name): self. name = name self. _ work_time = randint (1, 5) def work (self): print ("Task % s is start: % s, sleep time = % d" % (self. name, ctime (), self. _ work_time) sleep (self. _ work_time) print ("Task % s is end: % s" % (self. name, ctime () class MyThread (threading. thread): "multi-threaded class" def _ init _ (self, my_queue): self. my_queue = my_queue super (MyThread, self ). _ init _ () def run (self): while True: if self. my_queue.qsize ()> 0: self. my_queue.get (). work () else: breakdef print_split_line (num = 30): print ("*" * num) if _ name _ = "_ main _": print_split_line () import my_read_file # split file sf = my_read_file.SplitFiles (r "F: \ multiple_thread_read_file.txt", line_count = 300) file_num = sf. split_file () queue_length = file_num my_queue = queue. lifoQueue (queue_length) threads = [] for I in range (queue_length): file_name = sf. get_part_file_name (I) mt = MyTask (file_name) my_queue.put_nowait (mt) for I in range (queue_length): mtd = MyThread (my_queue) threads. append (mtd) for I in range (queue_length): threads [I]. start () for I in range (queue_length): threads [I]. join () print_split_line ()