美文网首页
zabbix企业应用之定时获取监控数据做报表

zabbix企业应用之定时获取监控数据做报表

作者: summer_zhou | 来源:发表于2020-03-24 14:21 被阅读0次

最近某项目突然提出一个新需求,需要每周五18点,获取他们监控项目每天0-24点监控平均数据,以小时为单位的,输出文件是excel的,要求以每天为单位单独一个sheet,汇总邮件转给业务。

他们主要是做业务使用量报表,每周周报使用,虽然需求困难,但作为运维也得解决,下面是邮件的效果图。


效果图

每天一个sheet,获取这些项目自己每天0-24点的监控平均数据,以小时为单位。
主要是使用sql查看上面的监控数据,并通过python把数据汇总到excel里并使用crontab定时发送。

下面是脚本内容,大家改改自己需要获取项目组与发送邮箱信息就行(我是使用139邮箱)

#!/usr/bin/env python
# coding=utf-8
# Author: summer_zhl
# Email: zhouhongli@richinfo.cn

import MySQLdb
import datetime
import xlwt
import sys

from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.mime.text import MIMEText

from email.utils import COMMASPACE, formatdate
from email import encoders

import os

'''
发送邮件函数
'''
def send_mail(server, fro, to, subject, text, files=[]):
    assert type(server) == dict
    assert type(to) == list
    assert type(files) == list

    msg = MIMEMultipart()
    msg['From'] = fro
    msg['Subject'] = subject
    msg['To'] = COMMASPACE.join(to)  # COMMASPACE==', '
    msg['Date'] = formatdate(localtime=True)
    msg.attach(MIMEText(text,_charset="utf-8"))

    for file in files:
        part = MIMEBase('application', 'octet-stream')  # 'octet-stream': binary data
        part.set_payload(open(file, 'rb').read())
        encoders.encode_base64(part)
        part.add_header('Content-Disposition', 'p_w_upload; filename="%s"' % os.path.basename(file))
        msg.attach(part)

    import smtplib
    smtp = smtplib.SMTP(server['name'], server['port'])
    # smtp.ehlo()
    # smtp.starttls()
    # smtp.ehlo()
    smtp.login(server['user'], server['passwd'])
    smtp.sendmail(fro, to, msg.as_string())
    smtp.close()

'''
执行mysql语句,并返回全部结果
'''
def get_mysql_data(sql):
    cur.execute(sql)  #执行mysql语句
    results = cur.fetchall() #接收全部的返回结果行
    return results

'''
excel表格操作
'''
def cover_excel(msg, start_time):
    # wb = xlwt.Workbook()
    ws = wb_excel.add_sheet(start_time, cell_overwrite_ok=True) #创建一个worksheet,给excel新增sheet
    count = len(msg)
    print count
    x = msg
    title = ['时间'.encode('utf8'), '主机IP'.encode('utf8'), 'CPU逻辑核数(单位:个)'.encode('utf8'),
             'CPU空闲值(单位:%)'.encode('utf8'), '可用内存值(单位:GB)'.encode('utf8'), '总内存值(单位:GB)'.encode('utf8'),
             '公网进入流量(单位:kbps)'.encode('utf8'), '公网流出流量(单位:kbps)'.encode('utf8')]
    x.insert(0, title)
    for j in range(0, 8):  #i表示行,j表示列
        for i in range(0, count):
            if i == 0:
    # ws.write(i,j,title[j].decode('utf8'))
                value = x[0]
            else:
                value = x[i]
            if isinstance(value[j], long) or isinstance(value[j], int) or isinstance(value[j], float):
                ws.write(i, j, value[j])
            else:
                ws.write(i, j, value[j])
    # wb.save('/tmp/zabbix_log/chance_zabbix_monitor_test.xls')

'''
数据库操作
'''
def run_select(start_time, end_time):
    get_cpu_idle_sql = "select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,h.host as Host,round(hi.value_avg,2) as Cpu_Idle  \
                        from hosts_groups hg join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid \
                        join trends hi on  i.itemid = hi.itemid  where  i.key_='system.cpu.util[,idle]' and  hi.clock >= UNIX_TIMESTAMP('%s 00:00:00') \
                        and  hi.clock < UNIX_TIMESTAMP('%s 00:00:00') group by h.host;" % (start_time, end_time)
    cpu_idle_result = get_mysql_data(get_cpu_idle_sql)
    print ("cpu_idle_result:", cpu_idle_result)

    get_cpu_num_sql = "select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,h.host as Host,hi.value_avg as Cpu_Number \
                       from hosts_groups hg join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid \
                       join trends_uint hi on  i.itemid = hi.itemid  where  i.key_='system.cpu.num' and  hi.clock >= UNIX_TIMESTAMP('%s 00:00:00') \
                       and  hi.clock < UNIX_TIMESTAMP('%s 00:00:00') group by h.host;" % (start_time, end_time)
    cpu_num_result = get_mysql_data(get_cpu_num_sql)
    print ("cpu_num_result:", cpu_num_result)

    get_mem_avai_sql = "select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,h.host as Host,round(hi.value_avg/1024/1024/1024,2) as Memory_Avaiable\
                        from hosts_groups hg join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid \
                        join trends_uint hi on  i.itemid = hi.itemid  where  i.key_='vm.memory.size[available]'  and  hi.clock >= UNIX_TIMESTAMP('%s 00:00:00') \
                        and  hi.clock < UNIX_TIMESTAMP('%s 00:00:00') group by h.host;" % (start_time, end_time)
    mem_avai_result = get_mysql_data(get_mem_avai_sql)
    print ("mem_avai_result:", mem_avai_result)
    # get_mem_free_sql="select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,hi.value_avg/1024/1024/1024 as Memory_Avaiable  from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on  i.itemid = hi.itemid  where  i.key_='vm.memory.size[free]'  and  hi.clock >= UNIX_TIMESTAMP('%s') and  hi.clock < UNIX_TIMESTAMP('%s') and g.name like '%%广告%%';"%(start_time,end_time)
    # mem_free_result=get_mysql_data(get_mem_free_sql)

    get_mem_total_sql = "select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,h.host as Host,round(hi.value_avg/1024/1024/1024,2) as Memory_Total \
                         from hosts_groups hg join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid \
                         join trends_uint hi on  i.itemid = hi.itemid  where  i.key_='vm.memory.size[total]' and  hi.clock >= UNIX_TIMESTAMP('%s 00:00:00') \
                         and  hi.clock < UNIX_TIMESTAMP('%s 00:00:00') group by h.host;" % (start_time, end_time)
    mem_total_result = get_mysql_data(get_mem_total_sql)
    print ("mem_total_result:", mem_total_result)

    get_ens33_in_sql = "select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,h.host as Host,round(hi.value_avg/1000,2) as Network_Eth0_In \
                      from hosts_groups hg join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid \
                      join trends_uint hi on  i.itemid = hi.itemid  where  i.key_='net.if.in[\"ens33\"]' and  hi.clock >= UNIX_TIMESTAMP('%s 00:00:00')\
                      and  hi.clock < UNIX_TIMESTAMP('%s 00:00:00') group by h.host;" % (start_time, end_time)
    ens33_in_result = get_mysql_data(get_ens33_in_sql)
    print ("ens33_in_result:", ens33_in_result)

    get_ens33_out_sql = "select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,h.host as Host,round(hi.value_avg/1000,2) as Network_Eth0_Out \
                       from hosts_groups hg join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid \
                       join trends_uint hi on  i.itemid = hi.itemid  where  i.key_='net.if.out[\"ens33\"]' and  hi.clock >= UNIX_TIMESTAMP('%s 00:00:00') \
                       and  hi.clock < UNIX_TIMESTAMP('%s 00:00:00') group by h.host;" % (start_time, end_time)
    ens33_out_result = get_mysql_data(get_ens33_out_sql)
    print ("ens33_out_result:", ens33_out_result)

    msg = [list(i) for i in cpu_idle_result]  #将(u'2020-03-23 18:00:00', u'Linux_zabbix_server', Decimal('4.0000'))转化成列表
    # count = len(msg)
    # print count  # 2
    for i in msg:
        # print(u"我是i:",int(i[2]))   #i :[u'2020-03-23 18:00:00', u'Linux_zabbix_server', Decimal('4.0000')],[u'2020-03-23 18:00:00', u'mysql_agent', Decimal('1.0000')]
        for iii in mem_avai_result:
            if i[0] == iii[0] and i[1] == iii[1]:
                i.append(round(float(iii[2]), 2))
        for iiii in mem_total_result:
            if i[0] == iiii[0] and i[1] == iiii[1]:
                i.append(round(float(iiii[2]), 2))
        for a in ens33_in_result:
            if i[0] == a[0] and i[1] == a[1]:
                i.append(int(a[2]))
        if len(i) == 6:
            i.append(0)
        for b in ens33_out_result:
            if i[0] == b[0] and i[1] == b[1]:
                i.append(int(b[2]))
        if len(i) == 7:
            i.append(0)
    #print msg #msg[0]:[u'2020-03-23 18:00:00', u'Linux_zabbix_server', 4, 99, 1.11, 1, 2, 17],

    cover_excel(msg, start_time)

'''

'''
def main():
    for i in range(7, 0, -1):
        start_time = ((datetime.datetime.now() - datetime.timedelta(days=i))).strftime("%Y-%m-%d")
        end_time = ((datetime.datetime.now() - datetime.timedelta(days=i - 1))).strftime("%Y-%m-%d")
    #     print(start_time,end_time)  #('2020-03-17', '2020-03-18'),('2020-03-23', '2020-03-24')
    #start_time = '2020-03-23'
    #end_time = '2020-03-24'
    run_select(start_time, end_time)

if __name__ == "__main__":
    default_encoding = 'utf-8'
    if sys.getdefaultencoding() != default_encoding:
        reload(sys)
        sys.setdefaultencoding(default_encoding)
    if os.path.exists("/usr/local/python/") is False:
        os.mkdir("/usr/local/python/")
    conn = MySQLdb.connect(host='192.168.67.xx', user='root', passwd='xxx', port=3306, charset="utf8") #数据库连接
    cur = conn.cursor() #建立指针
    conn.select_db('zabbix') #选择操作的数据库
    wb_excel = xlwt.Workbook(encoding = 'utf-8') #创建一个workbook 设置编码
    main()

    wb_excel.save('/usr/local/python/zabbix_monitor_hour_avg.xls')
    cur.close()
    conn.close()

    # follow is send mail
    server = {'name': 'smtp.139.com', 'user': 'xxx@139.com', 'passwd': 'xxx', 'port': 25}
    fro = 'xxx@139.com'
    to = ['xxx@139.com']#, 'xxx@qq.com']
    now_time = ((datetime.datetime.now() - datetime.timedelta(days=1))).strftime("%Y/%m/%d")
    last_time = ((datetime.datetime.now() - datetime.timedelta(days=7))).strftime("%Y/%m/%d")
    subject = "Zabbix平台监控数据 [%s-%s]" %(last_time,now_time)
    text = "你好,附件是Zabbix平台前一天的监控数据,请查收!\n有问题请联系Summer."
    files = ['/usr/local/python/zabbix_monitor_hour_avg.xls']
    send_mail(server, fro, to, subject, text, files=files)

想修改获取其他的监控项,自行对应修改SQL,其他的也可对应参照修改。

想定时发送就把这个脚本放入crontab里,设置好时间允许即可。

下面是我的crontab

00 18 * * 5 /usr/bin/python /usr/local/python/zabbix_hour_avg_monitor.py

相关文章

  • zabbix企业应用之定时获取监控数据做报表

    最近某项目突然提出一个新需求,需要每周五18点,获取他们监控项目每天0-24点监控平均数据,以小时为单位的,输出文...

  • zabbix初级使用

    zabbix的使用,监控一台主机 一,添加主机 二,添加键值 在zabbix-server 端获取agent端数据...

  • 为什么使用zabbix?

    为什么使用zabbix zabbix 与其他监控对比: Zabbix优点:1 支持分布式监控2 自带绘图功能,获取...

  • Zabbix学习01---Nginx编译安装

    前言 1.1企业级Zabbix监控系统介绍 所有监控配置都WEB化,支持分布式监控 支持多种方式数据采集:简单监控...

  • IT配置项管理这样玩,太酷了

    zabbix是一款优秀的开源监控系统,使用的客户群很大,不足的是zabbix的运维数据可视化展示和报表不太符合国人...

  • 炫酷的zabbix运维大屏

    zabbix是一款优秀的开源监控系统,使用的客户群很大,不足的是zabbix的运维数据可视化展示和报表不太符合国人...

  • Zabbix图形化运维报表和一个运维密码武器

    zabbix是一款优秀的开源监控系统,使用的客户群很大,不足的是zabbix的运维数据可视化展示和报表不太符合国人...

  • 标签化的IT资产管理,很方便

    zabbix是一款优秀的开源监控系统,使用的客户群很大,不足的是zabbix的运维数据可视化展示和报表不太符合国人...

  • zabbix 键值

    zabbix键值简介: zabbix监控项是从主机收集的数据信息,zabbix监控项都是基于键值进行监控,键值有包...

  • zabbix监控阿里云RDS-mysql数据库

    阿里云的RDS自带的监控系统获取数据不怎么直观,想要通过API获取数据通过zabbix显示,因为网上资料缺乏和其他...

网友评论

      本文标题:zabbix企业应用之定时获取监控数据做报表

      本文链接:https://www.haomeiwen.com/subject/zeuoyhtx.html