diff --git a/kippo.cfg.dist b/kippo.cfg.dist index ffd407f..084904c 100644 --- a/kippo.cfg.dist +++ b/kippo.cfg.dist @@ -199,3 +199,17 @@ interact_port = 5123 #[database_textlog] #logfile = kippo-textlog.log + +# ElasticSearch logging module +# +# Log login attempts to an ElasticSearch instance/cluster in realtime. +# To transfer existing Kippo data to ES see: http://bruteforce.gr/kippo2elasticsearch +# +# To enable this module, remove the comments below, including the +# [database_elasticsearch] line. + +#[database_elasticsearch] +#host = 127.0.0.1 +#port = 9200 +#index = kippo +#type = kippo \ No newline at end of file diff --git a/kippo/commands/base.py b/kippo/commands/base.py index 89a798e..0b21796 100644 --- a/kippo/commands/base.py +++ b/kippo/commands/base.py @@ -79,9 +79,10 @@ def call(self): class command_uname(HoneyPotCommand): def call(self): if len(self.args) and self.args[0].strip() in ('-a', '--all'): - self.writeln( - 'Linux %s 2.6.26-2-686 #1 SMP Wed Nov 4 20:45:37 UTC 2009 i686 GNU/Linux' % \ - self.honeypot.hostname) + uname_list = list(os.uname()) + uname_list[1] = self.honeypot.hostname + uname_str = " ".join(uname_list) + self.writeln(uname_str) else: self.writeln('Linux') commands['/bin/uname'] = command_uname diff --git a/kippo/dblog/elasticsearch.py b/kippo/dblog/elasticsearch.py new file mode 100644 index 0000000..17535d4 --- /dev/null +++ b/kippo/dblog/elasticsearch.py @@ -0,0 +1,154 @@ +import collections +import GeoIP +import time +import json +import uuid +import os +import datetime + +import pyes +import pyes.exceptions +from twisted.python import log + +from kippo.core import dblog + + +# This is the ES mapping, we mostly need it to mark specific fields as "not_analyzed" +kippo_mapping = { + "client": { + "type": "string", + "index": "not_analyzed" + }, + "country": { + "type": "string" + }, + "input": { + "type": "string", + "index": "not_analyzed" + }, + "ip": { + "type": "string", + "index": "not_analyzed", + "fields": { + "ipv4": { + "type": "ip", + } + } + }, + "log_type": { + "type": "string" + }, + "outfile": { + "type": "string", + "index": "not_analyzed" + }, + "password": { + "type": "string", + "index": "not_analyzed" + }, + "sensor": { + "type": "string", + "index": "not_analyzed" + }, + "session": { + "type": "string", + "index": "not_analyzed" + }, + "success": { + "type": "boolean" + }, + "timestamp": { + "type": "date", + "format": "dateOptionalTime" + }, + "url": { + "type": "string", + "index": "not_analyzed" + }, + "username": { + "type": "string", + "index": "not_analyzed" + } +} + + +class DBLogger(dblog.DBLogger): + def start(self, cfg): + self.es_host = cfg.get('database_elasticsearch', 'host') + self.es_port = cfg.get('database_elasticsearch', 'port') + self.es_index = cfg.get('database_elasticsearch', 'index') + self.es_type = cfg.get('database_elasticsearch', 'type') + self.es_conn = pyes.ES('{0}:{1}'.format(self.es_host, self.es_port)) + self.run(cfg) + + def run(self, cfg): + self.geoip = GeoIP.open(os.path.join(os.path.dirname(__file__), "geoip/GeoIP.dat"), GeoIP.GEOIP_STANDARD) + self.es_conn.indices.create_index_if_missing(self.es_index) + self.es_conn.indices.put_mapping(self.es_type, {'properties': kippo_mapping}, [self.es_index]) + + def createSession(self, peerIP, peerPort, hostIP, hostPort): + self.remote_ip = peerIP + self.sensor_ip = self.getSensor() or hostIP + sid = uuid.uuid1().hex + return sid + + def handleClientVersion(self, session, args): + self.client_version = args['version'] + + def send_to_elasticsearch(self, json_doc): + self.es_conn.index(json_doc, self.es_index, self.es_type) + + def handleLoginAttempt(self, session, args, success): + login_dict = collections.OrderedDict() + login_dict['log_type'] = "login_attempt" + login_dict['session'] = session + login_dict['success'] = success + login_dict['username'] = args['username'] + login_dict['password'] = args['password'] + login_dict['timestamp'] = datetime.datetime.utcnow().isoformat() + 'Z' + login_dict['country'] = self.geoip.country_code_by_addr(self.remote_ip) + login_dict['ip'] = self.remote_ip + login_dict['client'] = self.client_version + login_dict['sensor'] = self.sensor_ip + login_json = json.dumps(login_dict) + self.send_to_elasticsearch(login_json) + + def handleLoginFailed(self, session, args): + self.handleLoginAttempt(session, args, 0) + + def handleLoginSucceeded(self, session, args): + self.handleLoginAttempt(session, args, 1) + + def handleCommandAttempt(self, session, args, success): + command_dict = collections.OrderedDict() + command_dict['log_type'] = "command" + command_dict['session'] = session + command_dict['success'] = success + command_dict['input'] = args['input'] + command_dict['timestamp'] = datetime.datetime.utcnow().isoformat() + 'Z' + command_dict['country'] = self.geoip.country_code_by_addr(self.remote_ip) + command_dict['ip'] = self.remote_ip + command_dict['client'] = self.client_version + command_dict['sensor'] = self.sensor_ip + command_json = json.dumps(command_dict) + self.send_to_elasticsearch(command_json) + + def handleCommand(self, session, args): + self.handleCommandAttempt(session, args, 1) + + def handleUnknownCommand(self, session, args): + self.handleCommandAttempt(session, args, 0) + + def handleFileDownload(self, session, args): + download_dict = collections.OrderedDict() + download_dict['log_type'] = "download" + download_dict['session'] = session + download_dict['url'] = args['url'] + download_dict['outfile'] = args['outfile'] + download_dict['timestamp'] = datetime.datetime.utcnow().isoformat() + 'Z' + download_dict['country'] = self.geoip.country_code_by_addr(self.remote_ip) + download_dict['ip'] = self.remote_ip + download_dict['client'] = self.client_version + download_dict['sensor'] = self.sensor_ip + download_json = json.dumps(download_dict) + self.send_to_elasticsearch(download_json) diff --git a/kippo/dblog/geoip/GeoIP.dat b/kippo/dblog/geoip/GeoIP.dat new file mode 100644 index 0000000..3a5cccd Binary files /dev/null and b/kippo/dblog/geoip/GeoIP.dat differ