diff --git a/README.md b/README.md
index 087f407..74a1ce4 100644
--- a/README.md
+++ b/README.md
@@ -6,3 +6,5 @@ sa-scripts
@……@ Enjoy yourself
博客:http://www.simlinux.com
By Geekwolf
+
+
diff --git a/ops-scripts/lua-ngx-log.md b/ops-scripts/lua-ngx-log.md
new file mode 100644
index 0000000..bcbeb7f
--- /dev/null
+++ b/ops-scripts/lua-ngx-log.md
@@ -0,0 +1,117 @@
+#### 1. install softs
+
+```
+wget https://github.com/openresty/lua-nginx-module/archive/v0.10.15.tar.gz
+wget http://tengine.taobao.org/download/tengine-2.3.2.tar.gz
+wget https://github.com/simplresty/ngx_devel_kit/archive/v0.3.1.tar.gz
+wget https://openresty.org/download/openresty-1.15.8.1.tar.gz
+```
+#### 2. install openrestry luajit2 lib
+```
+./configure --prefix=/user/local/openresty-1.15.8.1
+make
+make install
+ln -s /usr/local/openresty-1.15.8.1/luajit/lib/libluajit-5.1.so.2 /lib64/libluajit-5.1.so.2
+export LUAJIT_LIB=/usr/local/openresty-1.15.8.1/luajit/lib/
+export LUAJIT_INC=/usr/local/openresty-1.15.8.1/luajit/include/luajit-2.1/
+
+```
+#### 3. install tengine
+
+```
+./configure --prefix=/usr/local/tengine/ --with-http_gzip_static_module --add-module=/data/data/softs/lua-nginx-module-0.10.15/ --add-module=/data/data/softs/ngx_devel_kit-0.3.1/
+make
+make install
+```
+
+#### 4. Nginx Configure: log_by_lua_file
+```
+log_by_lua_file /usr/local/tengine/conf/lua/ngx_log.lua;
+```
+
+### 5. ngx_log.lua
+```
+
+
+-- local file_name
+
+-- if ngx.var.file_type == 'audit' then
+-- file_name = '/usr/local/apps/nginx/logs/audit.log'
+-- elseif ngx.var.file_type == 'mapi'then
+-- file_name = '/usr/local/apps/nginx/logs/access.log'
+-- end
+
+
+local file_audit = '/usr/local/apps/nginx/logs/audit.log'
+local file_mapi = '/usr/local/apps/nginx/logs/access.log'
+
+
+function write_content(file_name, content)
+ local f = assert(io.open(file_name,'a'))
+ f:write(content)
+ f:close()
+end
+
+function urlDecode(s)
+ if(nil ~= s)
+ then
+ s = string.gsub(s, '%%(%x%x)', function(h) return string.char(tonumber(h, 16)) end)
+ return s
+ end
+end
+
+function handle_body(s)
+ s = urlDecode(s)
+ if(nil ~= s)
+ then
+ return string.gsub(s,'payMethodInfo=(.-)&','payMethodInfo=***&')
+ end
+end
+
+
+local remote_addr = ngx.var.remote_addr
+local http_x_forwarded_for = ngx.var.http_x_forwarded_for
+local time_local = ngx.var.time_local
+local server_name = ngx.var.server_name
+local request_method = ngx.var.request_method
+local scheme = ngx.var.scheme
+local host = ngx.var.host
+local request_uri = ngx.var.request_uri
+local status = ngx.var.status
+local referer = ngx.var.referer
+local body = handle_body(ngx.var.request_body)
+local http_user_agent = ngx.var.http_user_agent
+local upstream_status = ngx.var.upstream_status
+local request_time = ngx.var.request_time
+local upstream_response_time = ngx.var.upstream_response_time
+local http_host = ngx.var.http_host
+local scheme_http_host_request_uri = ngx.var.scheme..'://'..http_host..request_uri
+local body_bytes_sent = ngx.var.body_bytes_sent
+local http_referer = ngx.var.http_referer
+local upstream_addr = ngx.var.upstream_addr
+
+
+
+-- if ngx.var.file_type == 'audit' then
+-- local extend = string.format('srcip=%s&x_srcip=%s&time="%s"&server=%s&server_ip=%s&method=%s&link="%s://%s%s&status=%s&referer="%s"&post="%s"&user_agent="%s"',remote_addr,http_x_forwarded_for,time_local,server_name,server_addr,request_method,scheme,host,request_uri,status,referer,body,http_user_agent)
+-- if(ngx.req.get_method() == 'POST')
+-- then
+-- write_content(file_audit, extend..'\n')
+-- end
+-- elseif ngx.var.file_type == 'mapi' then
+-- local extend = string.format('{"upstream_status":%s,"request_time":%s,"upstream_response_time":%s,"remote_addr":%s,"time_local":%s,"scheme_http_host_request_uri":%s,"status":%s,"body_bytes_sent":%s,"http_referer":%s,"request_body":%s,"http_user_agent":%s,"http_x_forwarded_for":%s,"upstream_addr":%s}',upstream_status,request_time,upstream_response_time,remote_addr,time_local,scheme_http_host_request_uri,status,body_bytes_sent,http_referer,request_body,http_user_agent,http_x_forwarded_for,upstream_addr)
+-- write_content(file_mapi, extend..'\n')
+-- end
+
+local extend_audit = string.format('srcip=%s&x_srcip=%s&time="%s"&server=%s&server_ip=%s&method=%s&link="%s://%s%s&status=%s&referer="%s"&post="%s"&user_agent="%s"',remote_addr,http_x_forwarded_for,time_local,server_name,server_addr,request_method,scheme,host,request_uri,status,referer,body,http_user_agent)
+local extend_mapi = string.format('{"upstream_status":%s,"request_time":%s,"upstream_response_time":%s,"remote_addr":%s,"time_local":%s,"scheme_http_host_request_uri":%s,"status":%s,"body_bytes_sent":%s,"http_referer":%s,"request_body":%s,"http_user_agent":%s,"http_x_forwarded_for":%s,"upstream_addr":%s}',upstream_status,request_time,upstream_response_time,remote_addr,time_local,scheme_http_host_request_uri,status,body_bytes_sent,http_referer,request_body,http_user_agent,http_x_forwarded_for,upstream_addr)
+
+if(ngx.req.get_method() == 'POST')
+then
+ write_content(file_audit, extend_audit..'\n')
+end
+write_content(file_mapi, extend_mapi..'\n')
+
+```
+#### 6.Lua加载顺序
+
diff --git a/ops-scripts/migratetoconfluence.py b/ops-scripts/migratetoconfluence.py
new file mode 100644
index 0000000..f8e40ad
--- /dev/null
+++ b/ops-scripts/migratetoconfluence.py
@@ -0,0 +1,164 @@
+# -*- coding: utf-8 -*-
+# @Author: Geekwolf
+# @Date: 2018-05-24 17:58:16
+# @Last Modified by: Geekwolf
+# @Last Modified time: 2018-05-25 19:50:37
+
+import pymysql
+import collections
+import requests
+import json
+import markdown
+
+
+class DBHelper(object):
+ """docstring for DBHelper"""
+
+ def __init__(self):
+ self.host = '192.168.1.1'
+ self.user = 'wiki'
+ self.password = 'password'
+ self.database = 'db'
+ self.conn = None
+ self.cur = None
+
+ def ConnDB(self):
+ try:
+ self.conn = pymysql.connect(self.host, self.user, self.password, self.database, charset='utf8')
+ except Exception as e:
+ print(str(e))
+ return False
+ self.cur = self.conn.cursor()
+ return True
+
+ def Close(self):
+ if self.conn and self.cur:
+ self.cur.close()
+ self.conn.close()
+ return True
+
+ def Execute(self, sql, params=None):
+ self.ConnDB()
+ try:
+ if self.conn and self.cur:
+ self.cur.execute(sql, params)
+ self.conn.commit()
+ except Exception as e:
+ print(str(e))
+ self.Close()
+ return False
+ return True
+
+ def Select(self, sql, params=None):
+ self.Execute(sql, params)
+ return self.cur.fetchall()
+
+
+class SyncWiki(object):
+ """docstring for SyncWiki"""
+
+ def __init__(self, ):
+
+ # The Space:autotest Key Name
+ self.space = 'ops'
+ self.url = 'http://confluence'
+ self.username = 'geekwolf'
+ self.password = 'geekwolf'
+ self.session = self.GetSession()
+ # self.home_page = '{} Home'.format(self.space)
+ self.home_page='ops'
+ self.headers = {'Content-Type': 'application/json'}
+ self.dbhelper = DBHelper()
+
+ def GetSession(self):
+ session = requests.session()
+ data = {'os_username': self.username, 'os_password': self.password, 'login': 'Log in'}
+ res = session.post(self.url, data)
+ return session
+
+ def MarkdownToHtml(self, content):
+ # convert_url = "{}/rest/api/contentbody/convert/storage".format(self.url)
+ # print(convert_url)
+ # data = {"value": content, "representation": "wiki"}
+ # ret = self.session.post(convert_url, json.dumps(data), headers=self.headers)
+
+ ret = markdown.markdown(content, extensions=['fenced_code', 'codehilite', 'extra', 'abbr', 'attr_list', 'def_list', 'footnotes',
+ 'tables', 'smart_strong', 'admonition', 'codehilite', 'headerid', 'meta', 'nl2br', 'sane_lists', 'smarty', 'toc', 'wikilinks'])
+ return ret
+
+ def GetPageId(self, title):
+ '''
+ 通过分类名称获取在Confluence中的id
+ '''
+ content_url = '{}/rest/api/content?spaceKey={}&title={}'.format(self.url, self.space, title)
+ data = self.session.get(content_url).json()
+ id = data['results'][0]['id']
+ return id
+
+ def CreatePageMethod(self, id, title, value=None):
+
+ page_url = '{}/rest/api/content'.format(self.url)
+ data = {"type": "page", "ancestors": [{"id": id}], "title": title, "space": {
+ "key": self.space}, "body": {"storage": {"value": value, "representation": "storage"}}}
+ self.session.post(page_url, json.dumps(data), headers=self.headers)
+
+ def CreateTypePage(self):
+ '''
+ 创建分类页面(二级分类)
+ '''
+ group_page_url = '{}/rest/api/content'.format(self.url)
+ group_info = self.GetGroupInfo()
+
+ try:
+ for k, v in group_info.items():
+ self.CreatePageMethod(self.GetPageId(self.home_page), k)
+ for i in v:
+ self.CreatePageMethod(self.GetPageId(k), i)
+ ret = True
+ except Exception as e:
+ print(str(e))
+ ret = False
+ return ret
+
+ def CreatePage(self):
+ '''
+ 根据标题和内容创建对应子类的页面
+ '''
+ content = self.GetWiki()
+ for i in content:
+ try:
+ id = self.GetPageId(i[0])
+ title = i[1]
+ value = self.MarkdownToHtml(i[2])
+ self.CreatePageMethod(id, title, value=value)
+ print('{}------{}已经创建'.format(i[0], i[1]))
+ except Exception as e:
+ print('{}------{}创建失败:{}'.format(i[0], i[1], str(e)))
+
+ def GetGroupInfo(self):
+
+ sql = 'select * from wiki_group;'
+ result = self.dbhelper.Select(sql)
+ _group_info = collections.defaultdict(list)
+ _group_dict = dict([(r[0], r[1]) for r in result])
+
+ for r in result:
+ if r[3] is None:
+ _group_info[r[1]] = []
+ else:
+ _group_info[_group_dict[r[3]]].append(r[1])
+ return _group_info
+
+ def GetWiki(self):
+
+ sql = 'SELECT g.name,w.title,w.content from wiki_wiki as w LEFT JOIN wiki_group as g ON w.group_id = g.id'
+ result = self.dbhelper.Select(sql)
+ return result
+
+
+if __name__ == '__main__':
+
+ ins = SyncWiki()
+ if ins.CreateTypePage():
+ ins.CreatePage()
+
diff --git a/ops-scripts/plogstash/README.md b/ops-scripts/plogstash/README.md
new file mode 100644
index 0000000..f0e4c85
--- /dev/null
+++ b/ops-scripts/plogstash/README.md
@@ -0,0 +1,10 @@
+#### 用途
+
+- 基于Redis List日志消息,归档日志文件
+- 解决Logstash(新版本单线程可解决)归档乱序问题
+- 架构: Filebeat->Redis->Plogstash->Files
+#### 用法:
+```
+python3 plogstash.py
+Usage: plogstash.py [start|stop|restart|status]
+```
diff --git a/ops-scripts/plogstash/plogstash.py b/ops-scripts/plogstash/plogstash.py
new file mode 100644
index 0000000..a9b34b7
--- /dev/null
+++ b/ops-scripts/plogstash/plogstash.py
@@ -0,0 +1,244 @@
+# -*- coding: utf-8 -*-
+# @Author: Geekwolf
+# @Date: 2018-01-29 14:23:04
+# @Last Modified by: Geekwolf
+# @Last Modified time: 2018-01-31 10:55:01
+
+#!/usr/bin/env python3
+# daemon.py
+
+import os
+import sys
+import time
+import redis
+import json
+import re
+import atexit
+import signal
+# import collections
+
+
+class Base(object):
+
+ def __init__(self, *args, **kwargs):
+
+ self.pidfile = '/var/run/plogstash.pid'
+ self.service_name = 'Plogstash'
+ self.path = '/var/log/plogstash'
+ os.makedirs(self.path, exist_ok=True)
+ self.logfile = '%s/%s.log' % (self.path, self.service_name)
+
+ self.redis_host = '127.0.0.1'
+ self.redis_password = 'geekwolf'
+ self.redis_port = 5044
+ self.redis_db = 0
+ self.redis_key = 'filebeat'
+ self.batch_size = 5000
+ self.expires = 5 # second
+ self.archive_time = 1 # how long time to archive
+ self.base_dir = '/data/logs'
+ # self._tmp = '/tmp/.%s' % self.service_name
+
+
+class Daemon(Base):
+
+ def __init__(self, *args, **kwargs):
+ super(Daemon, self).__init__(*args, **kwargs)
+
+ def daemonize(self):
+
+ # First fork (detaches from parent)
+ try:
+ if os.fork() > 0:
+ raise SystemExit(0) # Parent exit
+ except OSError as e:
+ raise RuntimeError('fork #1 failed.')
+
+ os.chdir('/')
+ # set this will 777
+ # os.umask(0)
+ os.setsid()
+ # Second fork (relinquish session leadership)
+ try:
+ if os.fork() > 0:
+ raise SystemExit(0)
+ except OSError as e:
+ raise RuntimeError('fork #2 failed.')
+
+ # Flush I/O buffers
+ sys.stdout.flush()
+ sys.stderr.flush()
+
+ # Replace file descriptors for stdin, stdout, and stderr
+ with open(self.logfile, 'ab', 0) as f:
+ os.dup2(f.fileno(), sys.stdout.fileno())
+ with open(self.logfile, 'ab', 0) as f:
+ os.dup2(f.fileno(), sys.stderr.fileno())
+ with open(self.logfile, 'rb', 0) as f:
+ os.dup2(f.fileno(), sys.stdin.fileno())
+
+ # Write the PID file
+ print(os.getpid())
+ with open(self.pidfile, 'w') as f:
+ print(os.getpid(), file=f)
+
+ # Arrange to have the PID file removed on exit/signal
+ atexit.register(lambda: os.remove(self.pidfile))
+
+ # Signal handler for termination (required)
+ def sigterm_handler(signo, frame):
+ raise SystemExit(1)
+
+ signal.signal(signal.SIGTERM, sigterm_handler)
+
+ def get_now_date(self):
+
+ return time.strftime('%Y-%m-%d', time.localtime(time.time()))
+
+ def get_now_timestamp(self):
+
+ return time.time()
+
+ def get_now_time(self):
+ return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+
+ def logging(self, msg):
+
+ with open(self.logfile) as f:
+ print('%s %s' % (self.get_now_time(), msg))
+
+ def append_log(self):
+ pass
+
+ def start(self):
+
+ if os.path.exists(self.pidfile):
+ raise RuntimeError('Already running')
+ else:
+ try:
+ self.daemonize()
+ self.append_log()
+ self.status()
+ except RuntimeError as e:
+ print(e, file=sys.stderr)
+ raise SystemExit(1)
+
+ def stop(self):
+
+ # f = os.open(self.pipe_path, os.O_RDONLY | os.O_NONBLOCK)
+ # ret = os.read(f, 1024).decode('utf-8')
+ # print(ret.split('\n'))
+ # os.close(f)
+
+ if os.path.exists(self.pidfile):
+ # with open(self._tmp) as f:
+ # _data = f.read()
+ # if _data is not None and len(eval(_data)) > 0:
+ # for k, v in eval(_data).items():
+ # v = v['fd'].rstrip('\n')
+ # v.close()
+ with open(self.pidfile) as f:
+ os.kill(int(f.read()), signal.SIGTERM)
+ print('Plogstash is stopped')
+ else:
+ print('Not running', file=sys.stderr)
+ raise SystemExit(1)
+
+ def restart(self):
+
+ self.stop()
+ self.start()
+
+ def status(self):
+
+ try:
+ with open(self.pidfile, 'r') as f:
+ pid = int(f.read().strip())
+ except:
+ pid = None
+
+ if pid:
+ print('%s is running as pid:%s' % (self.service_name, pid))
+ else:
+ print('%s is not running' % self.service_name)
+
+
+class Worker(Daemon):
+
+ def __init__(self, *args, **kwargs):
+ super(Worker, self).__init__(self, *args, **kwargs)
+
+ def _redis(self):
+
+ pool = redis.ConnectionPool(host=self.redis_host, password=self.redis_password, port=self.redis_port, db=self.redis_db, socket_timeout=10000)
+ rc = redis.StrictRedis(connection_pool=pool)
+ return rc
+
+ def get_redis_data(self):
+
+ _data = self._redis().lrange(self.redis_key, 0, self.batch_size - 1)
+ # 删除数据(可考虑处理完再删除)
+ return _data
+
+ def del_redis_data(self):
+
+ _data = self._redis().ltrim(self.redis_key, self.batch_size, -1)
+
+ def append_log(self):
+
+ file_meta = {}
+ # file_handler = collections.defaultdict(dict)
+ # try:
+ # os.mkfifo(self.pipe_path)
+ # except Exception as e:
+ # print(str(e))
+
+ # pipe_ins = os.open(self.pipe_path, os.O_SYNC | os.O_CREAT | os.O_RDWR)
+ while True:
+ time.sleep(self.archive_time)
+ _data = self.get_redis_data()
+ if _data:
+ for _d in _data:
+ try:
+ _d = json.loads(_d.decode('utf-8'))
+ _path = '%s/%s/%s/%s' % (self.base_dir, _d['fields']['env'], self.get_now_date(), _d['fields']['ip_address'])
+ os.makedirs(_path + '/logs', exist_ok=True)
+ file_name = _d['source'].split('/')[-1]
+ # _path = '%s/%s/%s/%s' % (self.base_dir, _d['fields']['env'],self.get_now_date(), _d['fields']['ip_address'])
+
+ if re.match('nohup', file_name):
+ file_path = '%s/%s' % (_path, file_name)
+ else:
+ file_path = '%s/logs/%s' % (_path, file_name)
+
+ with open(file_path, 'a') as f:
+ f.write(_d['message'] + '\n')
+ # if 'fd' not in file_handler[file_path]:
+ # f = open(file_path, 'a', buffering=1024000)
+ # file_handler[file_path]['fd'] = str(f)
+ # file_handler[file_path]['time'] = self.get_now_timestamp()
+ except Exception as e:
+ self.logging(str(e))
+ self.del_redis_data()
+ # with open(self._tmp, 'w') as f:
+ # f.write(json.dumps(file_handler))
+
+if __name__ == '__main__':
+
+ if len(sys.argv) != 2:
+ print('Usage: {} [start|stop|restart|status]'.format(sys.argv[0]), file=sys.stderr)
+ raise SystemExit(1)
+
+ daemon = Worker()
+ if sys.argv[1] == 'start':
+ daemon.start()
+ elif sys.argv[1] == 'stop':
+ daemon.stop()
+ elif sys.argv[1] == 'restart':
+ print("Restart ...")
+ daemon.restart()
+ elif sys.argv[1] == 'status':
+ daemon.status()
+ else:
+ print('Unknown command {!r}'.format(sys.argv[1]), file=sys.stderr)
+ raise SystemExit(1)
diff --git a/ops-scripts/zabbix/zabbix_report_email/README.md b/ops-scripts/zabbix/zabbix_report_email/README.md
new file mode 100644
index 0000000..7cc3f98
--- /dev/null
+++ b/ops-scripts/zabbix/zabbix_report_email/README.md
@@ -0,0 +1,16 @@
+#### 版本
+```
+ Python2.7
+```
+#### 安装依赖
+```
+ pip install -r requirements.txt
+```
+#### 使用说明
+1. 邮件图文告警
+ 在Zabbix配置邮件发送告警脚本
+2. 报表
+ 在config.ini中graph配置info(要出报表的主机及对应的graphid)
+```
+ python report.py report
+```
diff --git a/ops-scripts/zabbix/zabbix_report_email/config.ini b/ops-scripts/zabbix/zabbix_report_email/config.ini
new file mode 100644
index 0000000..6cc567d
--- /dev/null
+++ b/ops-scripts/zabbix/zabbix_report_email/config.ini
@@ -0,0 +1,28 @@
+[monitor]
+zbx_url = http://zbx.simlinux.com/
+graph_url = chart2.php
+item_graph_url = chart.php
+username = geekwolf
+password = geekwolf
+temp_dir = tmp
+log_file = zbx.log
+
+[graph]
+#显示执行时前一天的数据,报表使用
+period = 86400
+width = 580
+height = 600
+info = [{"name":"HOST1","graphids":[1304,1306,1301,1302]},{"name":"HOST2","graphids":[1296,1298,1293,1294]},{"name":"HOST3","graphids":[1247,1263,1251,1267]}]
+
+[ftp]
+host = 1.1.1.1
+port = 21
+username = ftpuser
+password = ftpuser
+
+[email]
+smtpserver = email.simlinux.com
+username = geekwolf@simlinux.com
+password = test
+port = 25
+
diff --git a/ops-scripts/zabbix/zabbix_report_email/default.docx b/ops-scripts/zabbix/zabbix_report_email/default.docx
new file mode 100644
index 0000000..85201dd
Binary files /dev/null and b/ops-scripts/zabbix/zabbix_report_email/default.docx differ
diff --git a/ops-scripts/zabbix/zabbix_report_email/report.py b/ops-scripts/zabbix/zabbix_report_email/report.py
new file mode 100644
index 0000000..b6a94e4
--- /dev/null
+++ b/ops-scripts/zabbix/zabbix_report_email/report.py
@@ -0,0 +1,241 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# @Author: Geekwolf
+# @Date: 2018-05-07 13:26:12
+# @Last Modified by: Geekwolf
+# @Last Modified time: 2018-07-11 14:11:00
+
+import ConfigParser
+import cookielib
+import urllib2
+import urllib
+import ast
+import datetime,time
+from docx import Document
+from docx.shared import Pt, RGBColor
+from docx.enum.text import WD_ALIGN_PARAGRAPH
+from docx.oxml.ns import qn
+from docx.enum.style import WD_STYLE_TYPE
+from docx.shared import Inches
+from io import BytesIO
+import collections
+from ftplib import FTP
+import os
+import mimetypes
+import sys
+import smtplib
+from email.header import Header
+from email.mime.text import MIMEText
+from email.mime.image import MIMEImage
+from email.mime.multipart import MIMEMultipart
+from email.utils import parseaddr, formataddr
+
+config = ConfigParser.RawConfigParser()
+config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)),'./config.ini'))
+
+class ZabbixGraph(object):
+
+ def __init__(self):
+
+ self.url = config.get('monitor', 'zbx_url')
+ self.username = config.get('monitor', 'username')
+ self.password = config.get('monitor', 'password')
+ self.graph_url = self.url + config.get('monitor', 'graph_url')
+ self.item_graph_url = self.url + config.get('monitor', 'item_graph_url')
+ self.width = config.get('graph', 'width')
+ self.height = config.get('graph', 'height')
+ self.period = config.get('graph', 'period')
+ self.temp_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)),config.get('monitor', 'temp_dir'))
+ self.log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),config.get('monitor','log_file'))
+ if not os.path.exists(self.temp_dir):
+ os.makedirs(self.temp_dir)
+ self.urlOpener = self.GetSession()
+
+ def GetSession(self):
+
+ cookiejar = cookielib.CookieJar()
+ urlOpener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookiejar))
+ values = {"name": self.username, 'password': self.password, 'autologin': 1, "enter": 'Sign in'}
+ data = urllib.urlencode(values)
+ request = urllib2.Request(self.url, data)
+ try:
+ urlOpener.open(request, timeout=10)
+ self.urlOpener = urlOpener
+ except urllib2.HTTPError, e:
+ print e
+ return urlOpener
+
+ def Log(self,rec):
+ t = time.strftime('%Y-%m-%d %H:%M:%S')
+ with open(self.log_file,'a') as f:
+ f.write('{} {}'.format(str(t),str(rec)))
+
+
+ def GetRequest(self,values,url,id):
+
+ _data = urllib.urlencode(values)
+ request = urllib2.Request(url, _data)
+ url = self.urlOpener.open(request)
+ ext = mimetypes.guess_extension(url.headers['content-type'])
+ imagename = '{}/{}{}'.format(self.temp_dir, str(id), ext)
+ with open(imagename, 'wb') as f:
+ f.write(url.read())
+ return imagename
+
+ def GetItemGraph(self,id):
+
+ values = {'itemids': id, 'width': self.width, 'height': self.height, 'period': self.period}
+ imagename = self.GetRequest(values,self.item_graph_url,id)
+ return imagename
+
+ def GetGraph(self):
+
+ info = ast.literal_eval(config.get('graph', 'info'))
+ data = collections.defaultdict(list)
+ for i in info:
+ values = {}
+ for j in i['graphids']:
+ values = {'graphid': j, 'width': self.width, 'height': self.height, 'period': self.period}
+ imagename = self.GetRequest(values,self.graph_url,j)
+ # image = BytesIO()
+ # image.write(url.read())
+ data[i['name']].append(imagename)
+ # imagename = "%s/%s.png" % (self.temp_dir, str(j) + i['name'])
+ # f = open(imagename, 'wb')
+ # f.write(image)
+ self.WriteDoc(data)
+
+ def GetStyles(self):
+
+ # doc = Document()
+ # 在脚本打包成二进制时,需要指定default.docx路径,否则会报错
+ doc = Document(docx=os.path.join(os.getcwd(), 'default.docx'))
+ style_head = doc.styles.add_style('style_head', WD_STYLE_TYPE.PARAGRAPH)
+ style_head.font.size = Pt(25)
+ style_head.font.name = u'微软雅黑'
+ style_head.font.bold = True
+ style_head._element.rPr.rFonts.set(qn('w:eastAsia'), u'微软雅黑')
+ style_head.paragraph_format.alignment = WD_ALIGN_PARAGRAPH.CENTER
+
+ style_title = doc.styles.add_style('style_title', WD_STYLE_TYPE.PARAGRAPH)
+ style_title.font.size = Pt(15)
+ style_title.font.name = u'微软雅黑'
+ style_title.font.bold = True
+ style_title._element.rPr.rFonts.set(qn('w:eastAsia'), u'微软雅黑')
+
+ sub_title = doc.styles.add_style('sub_title', WD_STYLE_TYPE.PARAGRAPH)
+ sub_title.font.size = Pt(10)
+ sub_title.font.name = u'微软雅黑'
+ sub_title.font.bold = True
+ sub_title._element.rPr.rFonts.set(qn('w:eastAsia'), u'微软雅黑')
+
+ return doc, style_head, style_title, sub_title
+
+ @staticmethod
+ def GetYesterdayTime():
+
+ _time = datetime.date.today() - datetime.timedelta(days=1)
+ return str(_time)
+
+ def WriteDoc(self, data):
+
+ doc, style_head, style_title, sub_title = self.GetStyles()
+ _dict = {0: '一', 1: '二', 2: '三'}
+ _time = ZabbixGraph.GetYesterdayTime()
+ head = doc.add_paragraph(u'zbx监控报表', style='style_head')
+ sub_head = doc.add_paragraph(_time)
+ sub_head.paragraph_format.alignment = WD_ALIGN_PARAGRAPH.CENTER
+ for d in enumerate(data):
+ title = '{}、{}'.format(_dict[d[0]], d[1])
+ doc.add_paragraph(title.decode('utf8'), style='style_title')
+ for idx, val in enumerate(data[d[1]]):
+ #sub_title = u'内存' if idx%2 == 1 else u'CPU'
+ if idx >=2:
+ sub_title = u'内存'
+ else:
+ sub_title = u'CPU'
+ if idx%2 != 1:
+ doc.add_paragraph(sub_title, style='sub_title')
+ doc.add_picture(val, width=Inches(6.5), height=Inches(3))
+ file = 'report-{}.docx'.format(('').join(_time.split('-')))
+ doc.save(file)
+ #如果将报表上传ftp,可以去掉注释
+ #self.FtpUpload(file)
+
+ def DelTemp(self):
+
+ os.system('rm -rf report-* {}'.format(self.temp_dir))
+
+ def FtpUpload(self, file):
+
+ host = config.get('ftp', 'host')
+ port = config.get('ftp', 'port')
+ username = config.get('ftp', 'username')
+ password = config.get('ftp', 'password')
+ ftp = FTP(host=host)
+ ftp.login(user=username, passwd=password)
+ ftp.storbinary('STOR ' + file, open(file, 'rb'))
+ ftp.quit()
+ self.DelTemp()
+
+class AlarmInfo(ZabbixGraph):
+
+ def format(content):
+
+ name, addr = parseaddr(content)
+ return formataddr((Header(name, 'utf-8').encode(), addr))
+
+ def Email(self):
+
+ smtpserver = config.get('email', 'smtpserver')
+ username = config.get('email','username')
+ password = config.get('email','password')
+ port = config.get('email','password')
+ try:
+ smtp = smtplib.SMTP()
+ smtp.connect(smtpserver)
+ smtp.login(username,password)
+ return smtp
+ except Exception as e:
+ self.Log(str(e))
+
+ def SendEmail(self,_info):
+
+ itemid = _info[2].split('|')[0]
+ imagename = self.GetItemGraph(itemid)
+ fro = config.get('email','username')
+ _content = ('
').join(_info[3].split('\n'))
+ content = '{}
'.format(_content)
+ msg = MIMEMultipart()
+ #msg['From'] = '监控告警<{}>'.format(fro).decode('utf-8')
+ msg['From'] = "%s<%s>" % (Header("监控告警","utf-8"),fro)
+ msg['Subject'] = Header((': ').join(_info[2].split('|')[-2:]),'utf-8')
+ msg['To'] = _info[1]
+ msg.attach(MIMEText(content,'html','utf-8'))
+ with open(imagename,'rb') as f:
+ img = MIMEImage(f.read())
+ img.add_header('Content-ID', '')
+ msg.attach(img)
+ try:
+ email = self.Email()
+ email.sendmail(fro,_info[1],msg.as_string())
+ email.quit()
+ except Exception as e:
+ self.Log(str(e))
+
+ def main(self,_info):
+ try:
+ if len(_info) == 4:
+ rec = '{}\t{}\n{}\n'.format(_info[1],_info[2],_info[3])
+ self.Log(rec)
+ self.SendEmail(_info)
+ elif len(_info) == 2 and _info[1] == 'report':
+ self.GetGraph()
+ except Exception as e:
+ self.Log(str(e))
+
+
+if __name__ == '__main__':
+
+ ins = AlarmInfo()
+ ins.main(sys.argv)
diff --git a/ops-scripts/zabbix/zabbix_report_email/requirements.txt b/ops-scripts/zabbix/zabbix_report_email/requirements.txt
new file mode 100644
index 0000000..cbc7840
--- /dev/null
+++ b/ops-scripts/zabbix/zabbix_report_email/requirements.txt
@@ -0,0 +1,3 @@
+ConfigParser
+python-docx
+email
diff --git a/ops-scripts/zabbix/zabbix_report.py b/ops-scripts/zabbix/zabbix_report_excel.py
similarity index 100%
rename from ops-scripts/zabbix/zabbix_report.py
rename to ops-scripts/zabbix/zabbix_report_excel.py
diff --git "a/\345\274\200\346\272\220\345\267\245\345\205\267\344\270\200\350\247\210\350\241\250.md" "b/\345\274\200\346\272\220\345\267\245\345\205\267\344\270\200\350\247\210\350\241\250.md"
index 356add7..eb1a292 100644
--- "a/\345\274\200\346\272\220\345\267\245\345\205\267\344\270\200\350\247\210\350\241\250.md"
+++ "b/\345\274\200\346\272\220\345\267\245\345\205\267\344\270\200\350\247\210\350\241\250.md"
@@ -1,17 +1,18 @@
Blog:[http://www.simlinux.com](http://www.simlinux.com)
WeiXin: Geekwolf
+##### 欢迎补充~
**Bootstrapping:** [云霁X86装机工具](http://github.com/idcos/osinstall)、Kickstart、Cobbler、rpmbuild/xen、kvm、lxc、Openstack、 Cloudstack、Opennebula、Eucalyplus、RHEV
**配置类工具:** Capistrano、Chef、puppet、func、salstack、Ansible、rundeck、CFengine、Rudder
**web管理平台:** [Redis云管理平台-CacheCloud](https://github.com/sohutv/cachecloud)
**自动化构建和测试:** Ant、Maven、Selenium、PyUnit、QUnit、JMeter、Gradle、PHPUnit
-**监控类工具:** Cacti、Nagios(Icinga)、Zabbix([模板大全](https://monitoringartist.github.io/zabbix-searcher/))、基于时间监控前端Grafana、Mtop、MRTG(网络流量监控图形工具)、[Monit](https://mmonit.com/) 、Diamond+Graphite+Grafana
+**监控类工具:** Cacti、Nagios(Icinga)、Zabbix([模板大全](https://monitoringartist.github.io/zabbix-searcher/))、基于时间监控前端Grafana、Mtop、MRTG(网络流量监控图形工具)、[Monit](https://mmonit.com/) 、Diamond+Graphite+Grafana、[netdata](https://my-netdata.io/)
**微服务平台:** OpenShift、Cloud Foundry、Kubernetes、Mesosphere
**性能监控工具:** dstat(多类型资源统计)、atop(htop/top)、nmon(类Unix系统性能监控)、slabtop(内核slab缓存信息)、sar(性能监控和瓶颈检查)、sysdig(系统进程高级视图)、tcpdump(网络抓包)、iftop(类似top的网络连接工具)、iperf(网络性能工具)、smem)(高级内存报表工具)、collectl(性能监控工具)、[TCP优化监控工具tcpdive](https://github.com/fastos/tcpdive)
**响应时间统计工具:** [tcprstat](https://github.com/Lowercases/tcprstat)
-**免费APM工具:** [mmtrix(见过的最全面的分析工具)](http://www.mmtrix.com/evaluate/result)、[alibench](http://alibench.com/)、[JAVA性能监控pinpoint](https://github.com/naver/pinpoint)、[cat](https://github.com/dianping/cat)
+**免费APM工具:** [mmtrix(见过的最全面的分析工具)](http://www.mmtrix.com/evaluate/result)、[alibench](http://alibench.com/)、[JAVA性能监控pinpoint](https://github.com/naver/pinpoint)、[cat](https://github.com/dianping/cat)、[skywalking](http://skywalking.org/)、[UAVStack](https://uavorg.github.io/main/)、[Google Opencensus](http://opencensus.io/)、[OpenZipkin](https://zipkin.io/)
**进程监控:** [mmonit](http://mmonit.com/monit/documentation/monit.html)、Supervisor、[frigga](https://github.com/xiaomi-sa/frigga)、 [StrongLoop Process Manager](http://strong-pm.io/compare/)
**日志系统:** Logstash、Scribe、Graylog、ELKStack
**绘图工具:** RRDtool、Gnuplot
@@ -19,7 +20,7 @@ WeiXin: Geekwolf
**数据库可视化:** zeppelin、metabase、Heka、redash、superset
**流控系统:** Panabit、[在线数据包分析工具Pcap Analyzer](http://le4f.net/post/post/pcap-online-analyzer)
**安全检查:** chrootkit、rkhunter
-**PaaS:** Cloudify、Cloudfoundry、Openshift、[Deis](http://www.deis.io/) (Docker、CoreOS、[Atomic](https://access.redhat.com/articles/rhel-atomic-getting-started)、[ubuntu core/Snappy](http://www.ubuntu.com/cloud/tools/snappy)、[RancherOS](http://rancher.com))
+**PaaS:** Cloudify、Cloudfoundry、Openshift、[Deis](http://www.deis.io/) (Docker、CoreOS、[Atomic](https://access.redhat.com/articles/rhel-atomic-getting-started)、[ubuntu core/Snappy](http://www.ubuntu.com/cloud/tools/snappy)、[RancherOS](http://rancher.com))、[DomeOS](http://domeos.org)、[Rainbond](http://www.rainbond.com/)
**Troubleshooting:**[Sysdig](http://www.sysdig.org/) 、Systemtap、Perf
**服务发现:** [SmartStack](http://nerds.airbnb.com)、etcd
**持续集成:** [Go](http://www.go.cd)、Jenkins、Gitlab、[facebook代码审查工具phabricator](http://phabricator.org/)、[spinnaker](http://spinnaker.io/)、[PHP代码持续集成工具PHPCI](https://www.phptesting.org)
@@ -33,5 +34,9 @@ WeiXin: Geekwolf
**MySQL逻辑备份工具**: mysqldump、mysqlhotcopy、mydumper、MySQLDumper 、mk-parallel-dump/mk-parallel-restore
**MySQL物理备份工具**: Xtrabackup、LVM Snapshot
**MongoDB压测:** [iibench&sysbench](https://github.com/tmcallaghan)
-**数据库管理:** [数据库迁移工具flyway](https://flywaydb.org/)、表结构对比工具sqllog
-
+**数据库管理:** [数据库迁移工具flyway](https://flywaydb.org/)、表结构对比工具sqllog
+**大数据管理套件: ** [Ambari](http://incubator.apache.org/ambari/)、[Cloudera Manger(CDH)](https://www.cloudera.com)
+#### DevOps Tool
+**开源运维系统:** [autoops](https://github.com/hequan2017/autoops)、[OpsManage](https://github.com/welliamcao/OpsManage)、[opman-django](https://github.com/hgz6536/opman-django)
+**CMDB:** [CMDB(hequan)](https://github.com/pengzihe/cmdb)、[CMDB(voilet)](https://github.com/voilet/cmdb)、[roncoo-cmdb](https://github.com/roncoo/roncoo-cmdb)
+**故障管理&监控二次开发:** [fms](https://github.com/geekwolf/fms)