4 APR 18
Last lesson review: functions are used within a program and modules can be shared by several programs
First, the Software development directory
confàsettings.py
Core (primary logic) àsrc.py
Dbàdb.txt
Lib (library) àcommon.py
Bin (inlet, start) àstart.py
Logàaccess.log
Readme (instruction manual)
Second, the use of logging module
The log is divided into five levels: Debug, info, warning, error critical 50
Log can be printed if the log level is set to 10, including more than 10
The logging module has four objects:
Logger: Responsible for generating logs
Filter: Filtering logs (not commonly used)
Handler: target of control log output: Print to file or terminal
Configuration file for logging
"""
Logging configuration
"""
Import OS
Import Logging.config
# define three log output formats to start
Standard_format = ' [% (asctime) s][% (threadname) s:% (thread) d][task_id:% (name) s][% (filename) s:% (Lineno) d] ' \
' [% (levelname) s][% (message) s] ' #其中name为getlogger指定的名字
Simple_format = ' [% (LevelName) s][% (asctime) s][% (filename) s:% (Lineno) d]% (message) s '
Id_simple_format = ' [% (LevelName) s][% (asctime) s]% (message) s '
# define the end of the log output format
Logfile_dir = Os.path.dirname (Os.path.abspath (__file__)) # log file directory
Logfile_name = ' all2.log ' # log file name
# If a defined log directory is not present, create a
If not Os.path.isdir (Logfile_dir):
Os.mkdir (Logfile_dir)
# Full path to log file
Logfile_path = Os.path.join (Logfile_dir, Logfile_name)
# log Config dictionary
Logging_dic = {
' Version ': 1,
' Disable_existing_loggers ': False,
' formatters ': {
' Standard ': {
' Format ': Standard_format
},
' Simple ': {
' Format ': Simple_format
},
},
' Filters ': {},
' Handlers ': {
#打印到终端的日志
' Console ': {
' Level ': ' DEBUG ',
' class ': ' Logging. Streamhandler ', # Print to screen
' Formatter ': ' Simple '
},
#打印到文件的日志, collect logs of info and above
' Default ': {
' Level ': ' DEBUG ',
' Class ': ' Logging.handlers.RotatingFileHandler ', # Save to File
' Formatter ': ' Standard ',
' filename ': logfile_path, # log file
' MaxBytes ': 1024*1024*5, # log size 5M
' Backupcount ': 5,
' Encoding ': ' Utf-8 ', # Log file encoding, no need to worry about Chinese log garbled
},
},
' Loggers ': {
#logging. GetLogger (__name__) Get the logger configuration
‘‘: {
' Handlers ': [' Default ', ' Console '], # This adds the two handler defined above, that is, the log data is written to the file and printed to the screen
' Level ': ' DEBUG ',
' Propagate ': True, # Logger up (higher level)
},
},
}
Def load_my_logging_cfg ():
Logging.config.dictConfig (logging_dic) # Import the logging configuration defined above
Logger = Logging.getlogger (__name__) # generates a log instance
Logger.info (' It works! ') # Record the running status of the file
if __name__ = = ' __main__ ':
Load_my_logging_cfg ()
Third, serialization
Serialization: In-memory data structure----"into an intermediate format (string)----" stored in a file
dic={' name ': ' Egon ', ' Age ': 18}
With open (' Db.txt ', ' W ', encoding= ' utf-8 ') as F:
F.write (str (DIC))
Deserialization: File----Read Intermediate format (string)------eval turns into in-memory data structures
#注意: eval is not used to deserialize, it is to take something out of the string
With open (' Db.txt ', ' R ', encoding= ' utf-8 ') as F:
Data=f.read ()
Dic=eval (data)
Print (Dic,type (DIC))
L=[1,true,none,false]
X=str (L) # "[1,true,none,false]"
Import JSON
X= "[null,true,false,1]"
Res=json.loads (x)
Print (Res,type (res))
Serialization deserialization--------------------"Json.dumps,json.loads
#json的跨平台型好, but only a few commonly used data types can be identified; turn data into STR to save
Import JSON
Serialization: In-memory data structure----"into an intermediate format (string)----" stored in a file
dic={' name ': ' Egon ', ' Age ': 18}
Res=json.dumps (DIC) # JSON format is all double quotes
Print (Res,type (res))
With open (' Db.json ', ' WB ') as F:
F.write (Res.encode (' Utf-8 '))
Deserialization: File----Read Intermediate format (string)------eval turns into in-memory data structures
With open (' Db.json ', ' R ', encoding= ' utf-8 ') as F:
Data=f.read ()
Dic=json.loads (data)
Print (Dic,type (DIC), dic[' name ')
Verify that the JSON format is all double quotes
With open (' Db.txt ', ' R ', encoding= ' utf-8 ') as F:
Data=f.read ()
Dic=json.loads (data)
Print (Dic,type (DIC), dic[' name ')
Serialization deserialization--------------------"Json.dump,json.load
Import JSON
dic={' name ': ' Egon ', ' Age ': 18}
With open (' Db1.json ', ' wt ', encoding= ' utf-8 ') as F:
Json.dump (DIC,F)
With open (' Db1.json ', ' RT ', encoding= ' utf-8 ') as F:
Dic=json.load (f)
Print (dic[' name '])
Serialization of =-==============================pickle
#pickle可识别各种数据, but only for python, with poor cross-platform, to bytes data into a hard drive
Import Pickle
Serialization of
s={1,2,3,4,}
Res=pickle.dumps (s)
Print (Res,type (res))
With open (' Db.pkl ', ' WB ') as F:
F.write (RES)
Deserialization
With open (' db.pkl ', ' RB ') as F:
Data=f.read ()
Print (data)
S=pickle.loads (data)
Print (S,type (s))
Dump and load
Import Pickle
s={1,2,3}
With open (' Db1.pkl ', ' WB ') as F:
Pickle.dump (S,F)
With open (' db1.pkl ', ' RB ') as F:
S=pickle.load (f)
Print (S,type (s))
Iv. OS Module
Os.system #如果正常运行返回0, abnormal run return non 0, get results, just hit the end
#重点掌握os. Path series
Among them, the priority to grasp
Print (Os.path.dirname (R ' C:\a\b\c\d.txt '))
Print (Os.path.basename (R ' C:\a\b\c\d.txt '))
Os.path.exists The path exists, does not distinguish between files and folders
Print (os.path.exists (R ' D:\code\SH_fullstack_s1\day15\ pm \json.py '))
Print (os.path.exists (R ' D:\code\SH_fullstack_s1\day15 '))
Os.path.isfile If path is an existing file, returns True. otherwise returns false
Print (Os.path.isfile (R ' D:\code\SH_fullstack_s1\day15\ pm '))
Os.path.isdir folders that exist
Print (Os.path.join (' c:\\ ', ' A ', ' B ', ' a.txt '))
Print (Os.path.join (' c:\\ ', ' a ', ' d:\\ ', ' B ', ' a.txt '))
Print (Os.path.join (' A ', ' B ', ' a.txt '))
Res=os.path.getsize (R ' D:\code\SH_fullstack_s1\day15\ am \settings.py ') # units are bytes
Print (RES)
4 APR 18 Software Development Catalog Logging module using serialization (Json, Pickle) OS module