Python-Written monitoring tools

Source: Internet
Author: User

Recently there is a demand, statistics input personnel actual working time, think of, every time to do a client to the user's current window title record down, regularly passed to MySQL, and then use another script to analyze the actual working time, beginner python, did not write anything, such as errors, look correct.


The client is as follows:

import win32guiimport timeimport getpassimport pythoncomimport pyhookimport  Threadingimport mysqldbimport syslog_list = []  # init global log  listlast_log = none  # time_stamp of last log.class mysql () :     def __init__ (self):        try:             self.conn = mysqldb.connect (                 host= ' 192.168.100.250 ',                 user= ' hook_agent ',  passwd= ' qwqwqw480066 ',                 db= ' Qc_upload ', port=3306,                 charset= ' UTF8 ')         except mysqldb.error,  e:            print  ' Connect to  mysql failed ', e             Sys.exit (5)         else:             self.cursor = self.conn.cursor ()     def __del __ (self):         self.cursor.close ()          self.conn.close ()     def insert (self, sql, params):         print  ' insert to db. '         try:             self.cursor.executemany (Sql, params)         except exception, e:             print  ' failed to write db ', e         else:             Self.conn.commit () def get_window_name ():     "" "return current focus  Window name "" "    window_name = win32gui. GetWindowText (Win32gui. GetForegroundWindow ())      "For fault tolerance. when cursor in  StartMenu,    window_name may will return None, and  Nonetype have no decode method,    cause attributeerror '      try:        return window_name.decode (' GBK ') [ : 50]    except AttributeError:        return  ' None ' def upload (data):      "" "upload to server,    if failed, retry  Every 2 minutes, total 10 times. "" "     print data    retry = 0     while retry <= 10:        try:             sql_helper = mysql ()              sql =  ' Insert into qc_worklog (uid,  Date, time_stmp, win_title)               values (%s, %s, %s, %s) '              sql_helper.insert (Sql, data)             break        except:             time.sleep (     )        retry += 1             continue        finally:             del sql_helperdef log (Event):      "" "every 120 seconds log the win_name,    and  When logs '  count >= 30, upload. ' "" "     global log_list, last_log    time_now = int ( Time.time ())     if not LAST_LOG or time_now - LAST_LOG  >= 120:        log_list = [            getpass.getuser (),  Time.strftime ("%y-%m-%d"),             int ( Time.time ()),  get_window_name ()]        log_list.append (LOG_LIST)         if len (log_list)  >= 30:             upload_t = threading. Thread (target=upload, args= (Log_list,))              upload_t.start ()             log_list =  []   # re-init the LOG_LIST         last_log = time_now    return true  # must return  true, or cursor will not abLe to move.def main ():     hm = pyhook.hookmanager ()      hm. Mouseall = log    hm. Hookmouse ()     hm. Keydown = log    hm. Hookkeyboard ()     pythoncom. Pumpmessages () if __name__ ==  ' __main__ ':     main ()


The server-side judgment working time script is as follows:

#!/bin/env pythonimport jsonimport sysimport mysqldbimport timeimport  Urllibimport urllib2class mysql ():     def __init__ (self):         try:             Self.conn = mysqldb.connect (                 host= ' 127.0.0.1 ',                 user= ' root ',  passwd= ' qwqwqw ',                 db= ' Qc_upload ', port=3306,                 charset= ' UTF8 ')          except MySQLdb.Error, e:             print ' Connect to mysql failed ', e             log (E)             sys.exit (5)          else:             self.cursor = self.conn.cursor ()     def __del__ (self):         self.cursor.close ()          Self.conn.close ()     def query (self, sql, params):         try:             Self.cursor.execute (sql, params)         except exception,  e:            print human_time (),  ' Failed to read db ', &Nbsp;e            log (e)          else:            return  self.cursor.fetchall () Def human_time ():     return time.strftime ("%Y-%m-%d  %h:%m:%s ") Def log (E):     content = human_time ()  + e.__str__ ()  +  ' \ n '     with open (' logs.log ',  ' a ')  as f:         f.write (content) def calculate (username, day):      "" "Analyse today ' S log, if keyword (' Linkdoc ')  in win_title,     add the time to work time. and return work_time "" "     # select passed username ' s log    sql_helper =  mysql ()     sql =  ' select time_stmp, win_title from qc_worklog          where uid=%s and date=%s order by time_stmp '     logs = sql_helper.query (sql,  (username[0], day))      # calculate the current user ' S work time    work_time  = 0    for log in logs:         if  ' Linkdoc '  in log[1]:             if  ' last_time_stmp '  not in dir ():                 last_time_stmp = log[0]             delta_time = log[0] - last_time_stmp             if delta_time <= 300:                 work_time += delta_time         last_time_stmp = log[0]     return {' username ': username[0],  ' qc_time ': work_time,  ' Date ': day}def  Analyse (Day=time.strftime ("%y-%m-%d"),  action= ' upload '):     "" "Analyse user ' s  worktime of today "" "    sql_helper = mysql ()      sql =  ' select distinct uid from qc_worklog where date=%s '      # get all distinct username of today     Usernames = sql_helper.query (sql,  (day,))     # call calculate  func. and add tO the result list.    result = []    for  u in usernames:        result.append (Calculate (U,  day))     if action ==  ' upload ':         upload (Result)     elif action ==  ' print ':         print resultdef get_token ():     "" "Get token  from dig "" "    url = " Http://192.168.10.38:8089/login/collectorlogin     ?collecter_name=dig_api_auth&password=wei712372_knil '     while  True:        try:             req = urllib2. Request (URL)             respoNse = urllib2.urlopen (req, timeout=10)              res_data = json.loads (Response.read ())              break        except urllib2. Urlerror, e:            log (e)              time.sleep (     )        continue    return res_data[' data ' [' token ' ]def upload (Result):     "" "" "Upload to dig" ""     post_ value = {' data ':  json.dumps (Result),  ' token ':  get_token ()}     Post = urllib.urlencode (post_value)     url =  ' HTTP// 192.168.10.38:8089/api/saveuserworktime '     while true:        try:             req = urllib2. Request (URL)             response =  Urllib2.urlopen (req, post, timeout=10)              res = response.read ()              break        except urllib2. Urlerror, e:            log (e)              time.sleep (     )        continue    log (RES) def print_usage ():     print  "" "Usage: -p for print today ' s workload. &Nbsp;  -p yyyy-mm-dd for print specific day ' s workload     run the script without any args, upload the workload  Of today    run the srcipt with yyyy-mm-dd, upload the  workload of the specific day "" "Def main ():     args =  sys.argv    if len (args)  > 1:         # print the workload of today         if  '-P '  in args and len (args)  == 2:             analyse (action= ' print ')          # print the workload of specific day         elif  '-P ' &nbSp;in args and len (args)  == 3 and len (args[2])  == 10:             analyse (day=args[2], action= ' print ')          # upload the workload of specific day.         elif len (args[1])  == 10:             analyse (day=args[1])          else:            print_usage ()      # upload the workload of today    elif len (args)  == 1:        analyse ()     else:         print_usage () if __name__ ==  ' __main__ ':     main () 


This article is from "Lao Guo's Journal" blog, please be sure to keep this source http://laoguo.blog.51cto.com/11119466/1786824

Python-Written monitoring tools

Related Article

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.