Import Jsonimport pymysql# read review data and write to database # Imported database successfully, Total 4,736,897 records def PREM (db): cursor = Db.cursor () cursor.execute ( "Select VERSION ()") data = Cursor.fetchone () print ("Database VERSION:%s"% data) # The results indicate that the connection was successful Cursor.execute ( "DROP TABLE IF EXISTS Review") # Habitual sql = "" "CREATE TABLE Review (review_id VARCHAR), u ser_id varchar (+), business_id varchar ($), stars INT, text VARCHAR (10000) not NUL L, useful int, funny int, cool INT) "" "Cursor.execute (SQL) # Create a table def reviewd as needed Ata_insert (db): With open (' E:/data/yelp_data/dataset/review.json ', encoding= ' Utf-8 ') as F:i = 0 while Tr Ue:i + = 1 print (u ' loading in line%s ... '% i) Try:lines = F.readline () # Use progressive read Method Review_text = Json.loads (lines) # Parse each row of data result = [] Result.append ((re view_text[' review_id '),review_text[' user_id ', review_text[' business_id '],review_text[' stars '], review_text[' text ', review_text[' Usef UL '], review_text[' funny '], review_text[' cool ')) print (result) I Nesrt_re = "INSERT INTO review (review_id, user_id, business_id, stars, text, useful, funny, cool) values (%s,% S,%s,%s,%s,%s,%s,%s) "cursor = Db.cursor () cursor.executemany (inesrt_re, result) Db.commit () except Exception as E:db.rollback () print (str (e)) Breakif __name__ = = "__main__": # plays the role of an initialization or call function db = Pymysql.connect ("localhost", "root", "password (your password)", "number According to the library name ", charset= ' UTF8 ') cursor = Db.cursor () Prem (DB) Reviewdata_insert (db) Cursor.close ()
Working with JSON large files