Python implements simple multi-task mysql to xml Conversion, pythonmysql
This article describes how to implement simple multi-task mysql-to-xml Conversion Using Python. We will share this with you for your reference. The details are as follows:
The format to be exported should be the same as the xml exported by navicat.
When gevent is used, the file I/o operations are blocked, so they are not completely asynchronous.
1. mysql2xml. py:
#-*-Coding: UTF-8-*-''' Created on @ author: Yoki ''' import geventimport pymysqlfrom pymysql. cursors import DictCursorimport reimport codecsdb_conn = Nonedef init_mysql_connect (* args, ** kwargs): global db_conn = pymysql. connect (* args, ** kwargs) def list_to_xml (result_cur, key_list): ''' mysql result set to xml, non-xml standard export method; the xml dom does not support node: param result_cur: param key_list: return: ''' with the same name :''' Content = ''content + = '<? Xml version = "1.0" encoding = "UTF-8"?> \ R \ n' content + = '<RECORDS> \ r \ n' # root Node for item in result_cur: content + = '\ t <RECORD> \ r \ n' for k in key_list: v = item. get (k, '') real_value = v content + = '\ t <% s> % s </% s> \ r \ n' % (k, real_value, k) content + = '\ t </RECORD> \ r \ n' content + =' </RECORDS> \ r \ n' return contentdef get_table_rows (tb_name ): ''' obtain the mysql table rows: param tb_name: return: ''' global db_conn rows = [] cursor = db_conn.cursor (curs Or = DictCursor) cursor.exe cute ('select * from % s' % tb_name) for row in cursor: rows. append (row) return rowsdef get_table_keys (tb_name): ''' get the table fields in the order of param tb_name: return: '''global db_conn cursor = db_conn.cursor (cursor = DictCursor) cur = cursor.exe cute ('show create table % s' % tb_name) if cur! = 1: raise Exception for r in cursor: create_ SQL = r ['create table'] fields = re. findall (''(.*?) '', Create_ SQL) result = [] # processing field for I in xrange (1, len (fields): field = fields [I] if field in result: continue result. append (field) return result return [] def mysql_to_xml (tb_name, output_dir = 'xml', postfix = 'xml'): '''mysql data export xml,: param tb_name: database Table Name: param output_dir: param postfix: return: ''' rows = get_table_rows (tb_name) keys = get_table_keys (tb_name) content = list_to_xml (rows, keys) fp = codecs. open ('% s/% s. % s' % (output_dir, tb_name, postfix), 'w', 'utf-8') fp. write (content) fp. close () tb_list = ['tb _ item', 'tb _ state'] if _ name _ = '_ main __': init_mysql_connect (host = "localhost", user = 'user', password = "password", database = 'test', port = 3306, charset = 'utf8 ') jobs = [] for tb_name in tb_list: jobs. append (gevent. spawn (mysql_to_xml, tb_name) gevent. joinall (jobs)
2. The list_to_xml function is modified to increase the speed by hundreds of times.
def list_to_xml(result_cur, key_list): fp = codecs.open('test.xml'), 'w', 'utf-8') fp.write('<?xml version="1.0" encoding="UTF-8" ?>\r\n') fp.write('<RECORDS>\r\n') for item in result_cur: fp.write('\t<RECORD>\r\n') for k in key_list: v = item.get(k, '') if v is None: real_value = '' else: if type(v) == unicode: real_value = cgi.escape(v) else: real_value = v fp.write('\t\t<%s>%s</%s>\r\n' % (k, real_value, k)) fp.write('\t</RECORD>\r\n') fp.write('</RECORDS>\r\n') fp.close()