#!/user/bin/env python#@Time: 2018/6/6 10:10#@Author:P gidysq#@File: filebackup2.pyImportOSImportfilecmpImportShutilImportSYSImporttime,sched" "Scheduled Task backup, incremental backup" "Schedule=Sched.scheduler (Time.time, Time.sleep)defautobackup (scrdir,dstdir):if(( notOs.path.isdir (Scrdir))or( notOs.path.isdir (Dstdir))or(Os.path.abspath (scrdir)! = Scrdir)or(Os.path.abspath (dstdir)! =dstdir)): Usage () forIteminchOs.listdir (scrdir): Scritem=Os.path.join (scrdir,item) Dstitem=scritem.replace (Scrdir,dstdir)ifOs.path.isdir (scritem):#create newly added folders to ensure that the target folder structure is consistent with the original file if notos.path.exists (Dstitem): Os.makedirs (Dstitem)Print('Make directory'+Dstitem)#recursive invocation of self functionautobackup (Scritem,dstitem)elifOs.path.isfile (scritem):#Copy only new or modified files if(( notOs.path.exists (Dstitem))or( notFILECMP.CMP (scritem,dstitem,shallow=False)): Shutil.copyfile (Scritem,dstitem)Print('File:'+scritem+'==>'+dstitem,os.system ('Echo%time%')) Schedule.enter (10, 0, Autobackup, (Scrdir, Dstdir))defusage ():Print('Error') Print('For example:{0}'. Format (sys.argv[0)) sys.exit (0)if __name__=="__main__": #If Len (sys.argv)! = 3: #usage () #Scrdir,dstdir = sys.argv[1],sys.argv[2]Scrdir, Dstdir = r'E:\PyCharm\WorkSpace\TestPkg\base\src'R'E:\PyCharm\WorkSpace\TestPkg\base\dest' #perform backup tasks on a timed cycleSchedule.enter (10, 0, Autobackup, (Scrdir,dstdir)) Schedule.run ()#run until the scheduled time queue becomes empty #autobackup (Scrdir,dstdir)
#!/user/bin/env python#@Time: 2018/6/6 10:10#@Author:P gidysq#@File: filebackup.pyImport TimeImportOSImportTarfileImportShutilImportPickle as PImportHashlib" "achieve full-scale backups every Monday, incremental backups for the rest of the time" "defMd5check (fname): M=hashlib.md5 () with open (fname) as Fobj: whileTrue:data= Fobj.read (4096) if notData: Breakm.update (Data.encode ())# Pay returnm.hexdigest ()deffull_backup (Src_dir:object, Dst_dir, md5file): Par_dir, Base_dir= Os.path.split (Src_dir.rstrip ('/')) Back_name='%s_full_%s.tar.gz'% (Base_dir, Time.strftime ('%y%m%d')) Full_name=Os.path.join (Dst_dir, back_name) md5dict= {} #tar = Tarfile.open (full_name, ' w:gz ') #Tar.add (Src_dir) #tar.close () ifos.path.exists (Dst_dir): Shutil.rmtree (Dst_dir) shutil.copytree (src_dir,dst_dir)#Copy Files #shutil.make_archive (dst_dir, ' zip ', src_dir) #将文件压缩, note: If you compress tar, Chinese filenames may be garbled forpath, folders, filesinchOs.walk (src_dir): forFNameinchFiles:full_path=os.path.join (Path, fname) Md5dict[full_path]=Md5check (Full_path)ifos.path.exists (md5file): With open (Md5file,'WB') as F0:p.dump (Md5dict, F0)Else: With open (Md5file,'XB') as F1:p.dump (MD5DICT,F1)defincr_backup (Src_dir, Dst_dir, md5file): Par_dir, Base_dir= Os.path.split (Src_dir.rstrip ('/')) Back_name='%s_incr_%s.tar.gz'% (Base_dir, Time.strftime ('%y%m%d')) Full_name=Os.path.join (Dst_dir, back_name) md5new= {} forpath, folders, filesinchOs.walk (src_dir): forFNameinchFiles:full_path=os.path.join (Path, fname) Md5new[full_path]=Md5check (Full_path) with open (Md5file,'RB') as Fobj:md5old=P.load (Fobj) with open (Md5file,'WB') as Fobj:p.dump (Md5new, Fobj)#tar = Tarfile.open (full_name, ' w:gz ') forKeyinchmd5new:ifMd5old[key]! =Md5new[key]:#Tar.add (Key)shutil.copyfile (Key,dst_dir)#tar.close ()if __name__=='__main__': Src_dir='base/src'Dst_dir='Base/dest/backupfile'Md5file='Base/md5.data'full_backup (Src_dir, Dst_dir, Md5file)#if Time.strftime ('%a ') = = ' Mon ': #full_backup (Src_dir, Dst_dir, Md5file) #Else: #incr_backup (Src_dir, Dst_dir, Md5file)
Python implements full-volume, incremental backups of files