Mysql sql慢查询监控脚本代码实例
1、修改my.cnf
#整体的效果,全局开启表和日志文件都写,但是对于general_log只写表,对于slow_query_log,表和日志文件都记录。 general_log=1#开启mysql执行sql的日志 slow_query_log=1#开启mysql慢sql的日志 #设置之后会影响general_log和slow_query_log, log_output=table,File#日志输出会写表,也会写日志文件,为了便于程序去统计,所以最好写表 #这里没配置general_log_file,那么general_log就只会写表了 #在mysql5.1.29以上,设置以下即可打开mysql将执行的sql记录在文件中 #general_log_file=/log/general.log #5.1.29以以前为: #log=/var/lib/mysql/sql_row.log long_query_time=1#设置mysql的慢查询为超过1s的查询 slow_query_log_file=/log/slow.log
2、修改mysql的日志表(在mysql库中)的格式
#默认general_log是csv的格式,修改为MyISAM格式查询效率会高很多
set global general_log = off;
alter table general_log engine = MyISAM;
set global general_log = on;
#默认slow_query_log是csv的格式,修改为MyISAM格式查询效率会高很多
set global slow_query_log = off;等于0效果一样
alter table slow_log engine = MyISAM;
set global slow_query_log = on;等于1效果一样
3、因为mysql的日志表:general_log和slow_query_log不允许修改,所以需要新建出一个便于删除修改的表(这个日志表太大,需要定期清理n天前得数据)
建立slow_log_dba的表
CREATE TABLE `slow_log_dba` ( `start_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `user_host` mediumtext NOT NULL, `query_time` time NOT NULL, `lock_time` time NOT NULL, `rows_sent` int(11) NOT NULL, `rows_examined` int(11) NOT NULL, `db` varchar(512) NOT NULL, `last_insert_id` int(11) NOT NULL, `insert_id` int(11) NOT NULL, `server_id` int(10) unsigned NOT NULL, `sql_text` mediumtext NOT NULL ) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='Slow log for dba';
建立general_log_dba的表
CREATE TABLE `general_log_dba` ( `event_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `user_host` mediumtext NOT NULL, `thread_id` int(11) NOT NULL, `server_id` int(10) unsigned NOT NULL, `command_type` varchar(64) NOT NULL, `argument` mediumtext NOT NULL, KEY `user_host` (`user_host`(200)), KEY `event_time` (`event_time`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='general log for dba op';
4、因为程序最终使用的general_log_dba和slow_log_dba的表,所以需要定时的将general_log和slow_query_log的数据拷贝到general_log_dba和slow_log_dba之中
因为报告是每天生成一次,所以这个动作只要每天操作一次即可
#脚本是保存10天得数据,每天将general_log和slow_query_log的数据拷贝到general_log_dba和slow_log_dba之中
#做定时任务每天执行一次 mysqllogtable.sh
#!/bin/sh
NDaysAgo=$(date -d '-10 days' "+%F %H:%M:%S")
/usr/local/mysql/bin/mysql -uXXXX -p'xxxxxxxx' -D'mysql' -e "insert general_log_dba select * from general_log;
truncate general_log;
delete from general_log_dba where event_time < \"$NDaysAgo\";
insert slow_log_dba select * from slow_log;
truncate slow_log;
delete from slow_log_dba where start_time < \"$NDaysAgo\""
5、python脚本写统计每天sql操作和每天的mysql的慢查询(脚本中有部分是可以抽象的方法,请自己酌情处理)
统计mysql每日执行记录的脚本
# -*- coding: utf-8 -*- __author__ = 'river' import MySQLdb as mysql import re from datetime import datetime, timedelta import smtplib from email.mime.text import MIMEText def sendHtmlMail(mailcontent,myip): try: yestoday=(datetime.now()-timedelta(days=1)).strftime("%Y-%m-%d") sender = 'xxx@xxx.com' receiver = ['xxx@xxx.com'] subject = myip+' mysql operation report '+yestoday smtpserver = 'smtp.exmail.xx.com' username = 'xxx@xxx.com' password = 'xxxxx' msg = MIMEText(mailcontent,'html','utf-8')#'你好','text','utf-8' msg['Subject'] = subject msg['From'] = sender msg['To'] = 'xxx@xxxxxxxx.com' smtp = smtplib.SMTP() smtp.connect(smtpserver) smtp.login(username, password) smtp.sendmail(sender, receiver, msg.as_string()) smtp.quit() except Exception, e: print e,'send mail error' if __name__=='__main__': result=None htmlfile='mysqlLogMon.html' myiplist=['192.168.10.10','192.168.10.19'] yestoday=(datetime.now()-timedelta(days=1)).strftime("%Y-%m-%d 00:00:00") today=datetime.now().strftime("%Y-%m-%d 00:00:00") for myip in myiplist: sql="select user_host,argument from general_log_dba where event_time >='%s' and event_time <='%s'" %(yestoday,today) try: dbcon = mysql.connect(host=myip, user='xxxxx', passwd='xxxxx', db='mysql', port=3306,charset='utf8') cur = dbcon.cursor() print "step 1,"+myip+','+datetime.now().strftime("%Y-%m-%d %H:%M:%S") cur.execute(sql) result = cur.fetchall() cur.close() dbcon.close() except Exception, e: print e,'conn mysql error' user_host_set=set() print "step 2,"+myip+','+datetime.now().strftime("%Y-%m-%d %H:%M:%S") allhash={} if result: for user_host,argument in result: argument_delcom=re.compile(r'(\/\*(\s|.)*?\*\/)').sub("",argument).strip().replace(u"\x00",'').lower() if re.compile(r'^access.*').match(argument_delcom) or re.compile(r'^.*@.*on.*').match(argument_delcom) or re.compile(r'^grant.*').match(argument_delcom): tmpargument=argument_delcom.strip() else: tmpargument=argument_delcom.split(' ')[0].strip() if len(tmpargument)>30: #有些sql是u'select\n\t\t\t\t\tcount(m.enquirymainid)',可以使用print repr(tmpargument) tmpargument=argument_delcom.split('\n')[0].strip() #如果全是注释,那么就不统计这条目了 if not tmpargument or tmpargument.strip()=='' or tmpargument.strip()==' ': continue if allhash.has_key(user_host): allhash[user_host][tmpargument]=allhash[user_host].get(tmpargument,0)+1 else: allhash[user_host]={tmpargument:1} print "step 3,"+myip+','+datetime.now().strftime("%Y-%m-%d %H:%M:%S") headhtml=''' ''' print "step 4,"+myip+','+datetime.now().strftime("%Y-%m-%d %H:%M:%S") with open(htmlfile,'w') as htmlfileobj: htmlfileobj.write(headhtml) htmlfileobj.flush() print "step 5,"+myip+','+datetime.now().strftime("%Y-%m-%d %H:%M:%S") with open(htmlfile,'a') as htmlfileobj: for hostkey in allhash.keys(): listtmp=sorted(allhash[hostkey].iteritems(),key=lambda labkey:labkey[1],reverse=True) rowspan=len(allhash[hostkey]) #htmlfileobj.write() tmpline='' %(rowspan,hostkey.encode('utf-8')) htmlfileobj.write(tmpline) countn=0 for runsql,count in listtmp: if countn==0: tmpline='' %(runsql.encode('utf-8'),count) else: tmpline='' %(runsql.encode('utf-8'),count) countn+=1 htmlfileobj.write(tmpline) tmpline=''' 用户 执行sql 执行次数 %s %s %s %s %s ''' htmlfileobj.write(tmpline) with open(htmlfile,'r') as htmlfileobj: mailcontent=htmlfileobj.read() sendHtmlMail(mailcontent,myip) else: print 'sql result is None,exit ing' print "step 6,"+myip+','+datetime.now().strftime("%Y-%m-%d %H:%M:%S")
以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持狼蚁SEO。