1.1 Requirements
Database 3 million user data, traverse get all users, various combinations associated, get to a new JSON, save to Redis.
1.2 Difficulties
Database is much more, it is impossible to single-threaded query all the data to memory.
1.3 Solutions
Multi-threaded reading, the producer gets 200 data each time, the consumer goes to consume. (This is mainly based on MySQL paging to get the next 200 data)
1.4 Code 1.4.1 Calling method
/*** Thread Start*/ Public voidUpdate () {
Redis Operation class Hashredisutil Redisutil=hashredisutil.getinstance (); Producer Consumer Producerconsumer pc=NewProducerconsumer (); Data Warehouse Storage s= PC.NewStorage (); Executorservice Service=Executors.newcachedthreadpool (); //a thread to queryProducer p = pc.NewProducer (S,usermapper); Service.submit (P); System.err.println ("The production line is in the process ... "); //is a thread to modify for(inti=0;i<10;i++) {System.err.println ("Consumer thread" +i+ "is in consumption .... "); Service.submit (PC.NewConsumer (redisutil,usermapper,s)); } }
1.4.2 Main Core Class
PackageCom.ypp.thread;ImportJava.math.BigDecimal;ImportJava.util.Calendar;ImportJava.util.HashMap;Importjava.util.List;ImportJava.util.Map;ImportJava.util.Set;ImportJava.util.concurrent.BlockingQueue;ImportJava.util.concurrent.LinkedBlockingQueue;Importorg.apache.commons.lang.StringUtils;ImportOrg.apache.log4j.Logger;ImportOrg.joda.time.LocalDateTime;ImportCom.alibaba.fastjson.JSONObject;Importcom.ypp.constants.Constants;ImportCom.ypp.mapper.UserMapper;ImportCom.ypp.model.User;ImportCom.ypp.model.UserAlis;ImportCom.ypp.model.UserBaseModel;ImportCom.ypp.model.UserVip;ImportCom.ypp.util.HashRedisUtil;Importcom.ypp.util.JsonUtils;ImportCom.ypp.util.PHPSerializer; Public classProducerconsumer {Private StaticLogger Logger = Logger.getlogger (Producerconsumer.class);
This page is the core, global variables, when the producer production once, get 200 users, will put this page++, next fetch is the last 200 usersPrivate StaticInteger page = 0; Consumers
Public classConsumerImplementsRunnable {PrivateHashredisutil Redisutil; PrivateUsermapper Usermapper; PrivateStorage s =NULL; PublicConsumer (Hashredisutil redisutil, Usermapper usermapper, Storage s) {Super(); This. Redisutil =Redisutil; This. Usermapper =Usermapper; This. S =s; } Public voidrun () {Try { while(true) {User users=S.pop (); LongBBB =System.currenttimemillis (); //get a user's fan list coexisting to Redis Try{fansupdate (Users.gettoken (), Users.getuserid (), redisutil); } Catch(Exception E1) {e1.printstacktrace (); } //get a list of users ' concerns and coexist to Redis Try{followupdate (Users.gettoken (), Users.getuserid (), redisutil); } Catch(Exception e) {e.printstacktrace (); } //get a blacklist of users and coexist to Redis Try{blackupdate (Users.gettoken (), Users.getuserid (), redisutil); } Catch(Exception e) {e.printstacktrace (); } //User basic Information Try{userbaseupdate (Users.gettoken (), Users.getuserid (), redisutil); } Catch(Exception e) {e.printstacktrace (); } LongCCC =System.currenttimemillis (); System.out.println ("User:" + users.gettoken () + "Total time Spent:" + (CCC-BBB) + "milliseconds"); Thread.Sleep (500); } } Catch(interruptedexception e) {e.printstacktrace (); } } PublicList<user>GetUserInfo (Integer ithread) {returnUsermapper.finduserinfo ((iThread-1) * 200 + 1); } /*** User basic information modification * *@paramtoken *@parammyUserID *@paramRedisutil *@throwsException*/ Private voidUserbaseupdate (String token, string myuserid, Hashredisutil redisutil)throwsException { } /*** Update a user's blacklist (original token changed to userid) * *@paramtoken *@paramString *@paramRedisutil *@throwsException*/ Private voidBlackupdate (String token, string myuserid, Hashredisutil redisutil)throwsException { } /*** Get a user's attention * *@paramtoken *@paramString *@paramRedisutil *@throwsException*/ Private voidFollowupdate (String token, string myuserid, Hashredisutil redisutil)throwsException { } /*** Get a user's fan list * *@paramtoken *@paramUserId *@paramRedisutil *@throwsException*/ Private voidFansupdate (String token, string myuserid, Hashredisutil redisutil)throwsException { } //producer Public classProducerImplementsRunnable {PrivateStorage s =NULL; PrivateUsermapper Mapper; PublicProducer (Storage s, Usermapper mapper) { This. S =s; This. Mapper =mapper; } Public voidrun () {Try { while(true) {System.err.println ("Current paging is:" +page+ "****************************************"); List<User> list=mapper.finduserinfo (page); S.push (list); Page++; } } Catch(interruptedexception E1) {e1.printstacktrace (); } } }
Data Warehouse Public classStorage {blockingqueue<User> queues =NewLinkedblockingqueue<user> (200); /*** Production * *@paramp * Products *@throwsinterruptedexception*/ Public voidPush (List<user> p)throwsinterruptedexception { for(user user:p) {queues.put (user); } } /*** Consumption * *@returnProduct *@throwsinterruptedexception*/ PublicUser pop ()throwsinterruptedexception {returnQueues.take (); } }}
Java thread pool + producer consumer +mysql read 3 million data