add: new attack family - proc

add: new attack for redis- discover-dbs
This commit is contained in:
cr0hn
2016-02-17 15:42:26 +01:00
parent 1e1b3ba36e
commit 46afa101cb
9 changed files with 392 additions and 268 deletions

View File

@@ -1,102 +0,0 @@
# -*- coding: utf-8 -*-
import pickle
import logging
from time import sleep
from modules import IModule
from kombu import Connection
from kombu.simple import Empty
from kombu.exceptions import SerializationError
from ...libs.core.structs import CommonData, AppSettings
from ...libs.core.models import IntegerField, StringField, SelectField, validators
log = logging.getLogger()
REDIS = "10.211.55.69"
class ModuleModel(CommonData):
interval = IntegerField(default=4)
target = StringField([validators.required()])
export_results = StringField(default="")
import_results = StringField(default=None)
broker_type = SelectField(default="redis", choices=[
("redis", "Redis server"),
("zmq", "ZeroMQ"),
("amqp", "RabbitMQ broker")
])
# ----------------------------------------------------------------------
class RemoteProcessModule(IModule):
"""
Try to extract information from remote processes
"""
__model__ = ModuleModel
name = "dump"
description = "connect to remote server/s and dumps all available information"
# ----------------------------------------------------------------------
def run(self, config):
# --------------------------------------------------------------------------
# Ver dirty monkey patch to avoid kombu write into screen
# --------------------------------------------------------------------------
try:
import sys
sys.stderr = open("/dev/null")
except IOError:
pass
dump_from_celery(config)
# ----------------------------------------------------------------------
def dump_from_celery(config):
"""
Start dumping information
"""
URL = '%s://%s' % (config.broker_type, config.target)
# with Connection('redis://%s' % REDIS) as conn:
with Connection(URL) as conn:
in_queue = conn.SimpleQueue('celery')
while 1:
try:
while 1:
message = in_queue.get(block=False, timeout=1)
# message = in_queue.get(block=False, timeout=1)
# --------------------------------------------------------------------------
# Try to deserialize
# --------------------------------------------------------------------------
# Is Pickle info?
try:
deserialized = pickle.loads(message.body)
except SerializationError:
pass
# Read info
remote_process = deserialized['task'].split(".")[-1]
remote_args = deserialized['args']
# Show info
_show_info(remote_process, remote_args)
except Empty:
# Queue is empty -> wait
log.error("No more messages from server. Waiting for %s seconds and try again.." % config.interval)
sleep(2)
# ----------------------------------------------------------------------
def _show_info(process, args):
log.error("Found process information:")
log.error(" - Remote process name: '%s'" % process)
log.error(" - Input parameters:")
for i, x in enumerate(args):
log.error(" -> P%s: %s" % (i, x))

View File

@@ -0,0 +1,57 @@
# -*- coding: utf-8 -*-
import logging
from modules import IModule
from libs.core.structs import CommonData
from libs.core.models import IntegerField, StringField, SelectField
from .proc_raw_dump import action_proc_raw_dump
from .cmd_actions import parser_proc_raw_dump
log = logging.getLogger()
# ----------------------------------------------------------------------
class ModuleModel(CommonData):
target = StringField(required=True)
export_results = StringField(default="")
import_results = StringField(default=None)
db = StringField(default=None, label="only for Redis: database to use")
broker_type = SelectField(default="redis", choices=[
("redis", "Redis server"),
("zmq", "ZeroMQ"),
("amqp", "RabbitMQ broker")
])
# ----------------------------------------------------------------------
class RemoteProcessModule(IModule):
"""
Try to extract information from remote processes
"""
__model__ = ModuleModel
__submodules__ = {
'raw-dump': dict(
help="dump raw remote information process",
cmd_args=parser_proc_raw_dump,
action=action_proc_raw_dump
),
}
name = "proc"
description = "try to discover and handle processes in remote MQ/Brokers"
# ----------------------------------------------------------------------
def run(self, config):
# --------------------------------------------------------------------------
# Ver dirty monkey patch to avoid kombu write into screen
# --------------------------------------------------------------------------
try:
import sys
sys.stderr = open("/dev/null")
except IOError:
pass
super(RemoteProcessModule, self).run(config)

View File

@@ -0,0 +1,13 @@
# -*- coding: utf-8 -*-
"""
This file contains command line actions for argparser
"""
# ----------------------------------------------------------------------
def parser_proc_raw_dump(parser):
parser.add_argument("--tail", action="store_true", dest="tail_mode", default=False,
help="although all information be dumped do not stop")
parser.add_argument("-I", dest="interval", type=float, default=4,
help="timeout interval between tow connections")

View File

@@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
import six
import logging
from time import sleep
from kombu import Connection
from kombu.simple import Empty
from six.moves.cPickle import loads
from kombu.exceptions import SerializationError
log = logging.getLogger()
# ----------------------------------------------------------------------
def action_proc_raw_dump(config):
url = '%s://%s' % (config.broker_type, config.target)
# with Connection('redis://%s' % REDIS) as conn:
with Connection(url) as conn:
in_queue = conn.SimpleQueue('celery')
to_inject = []
already_processed = set()
while 1:
try:
while 1:
message = in_queue.get(block=False, timeout=1)
# --------------------------------------------------------------------------
# Try to deserialize
# --------------------------------------------------------------------------
# Is Pickle info?
try:
deserialized = loads(message.body)
except SerializationError:
pass
msg_id = deserialized['id']
# Read info
if msg_id not in already_processed:
remote_process = deserialized['task'].split(".")[-1]
remote_args = deserialized['args']
# Show info
log.error("Found process information:")
log.error(" - Remote process name: '%s'" % remote_process)
log.error(" - Input parameters:")
for i, x in enumerate(remote_args):
log.error(" -> P%s: %s" % (i, x))
# Store as processed
already_processed.add(msg_id)
# --------------------------------------------------------------------------
# Store message to re-send
# --------------------------------------------------------------------------
to_inject.append(deserialized)
except Empty:
# When Queue is Empty -> reinject all removed messages
for x in to_inject:
in_queue.put(x, serializer="pickle")
# Queue is empty -> wait
if config.tail_mode:
log.error("No more messages from server. Waiting for %s seconds and try again.." % config.interval)
sleep(config.interval)
else:
log.error("No more messages from server. Exiting...")
return

View File

@@ -12,6 +12,7 @@ from .redis_info import action_redis_server_info
from .redis_clients import action_redis_server_connected
from .redis_disconnect import action_redis_server_disconnect
from .redis_shell import action_redis_shell
from .redis_discover_db import action_redis_discover_dbs
log = logging.getLogger()
@@ -50,6 +51,10 @@ class RedisModule(IModule):
cmd_args=parser_redis_server_disconnect,
action=action_redis_server_disconnect
),
'discover-dbs': dict(
help="discover all redis DBs at server",
action=action_redis_discover_dbs
),
# 'shell': dict(
# help="open a remote os shell through the Redis server",
# action=action_redis_shell

View File

@@ -0,0 +1,26 @@
# -*- coding: utf-8 -*-
import six
import redis
import logging
log = logging.getLogger()
# ----------------------------------------------------------------------
def action_redis_discover_dbs(config):
"""
Dump all redis information
"""
log.warning("Trying to connect with redis server...")
# Connection with redis
con = redis.StrictRedis(host=config.target, port=config.port, db=config.db)
log.error("Discovered '%s' DBs at '%s':" % (config.target, con.config_get("databases")['databases']))
for db_name, db_content in six.iteritems(con.info("keyspace")):
log.error(" - %s - %s keys" % (db_name.upper(), db_content['keys']))
for i in six.moves.range((int(con.config_get("databases")['databases']) - len(con.info("keyspace")))):
log.error(" - DB%s - Empty" % str(i))

View File

@@ -7,6 +7,7 @@ import pprint
log = logging.getLogger()
# ----------------------------------------------------------------------
def dump_keys(con):
for key in con.keys('*'):