Retire repository
See http://lists.openstack.org/pipermail/openstack-discuss/2019-July/007708.html Change-Id: If97f10290c16903d550c66163b082bd23357fca2
This commit is contained in:
parent
df49ebf749
commit
8c881bcbe5
@ -1,3 +0,0 @@
|
||||
[report]
|
||||
include = syntribos/*
|
||||
omit = syntribos/tests/unit/*
|
67
.gitignore
vendored
67
.gitignore
vendored
@ -1,67 +0,0 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# pbr makes these
|
||||
ChangeLog
|
||||
AUTHORS
|
||||
|
||||
cover/
|
||||
.testrepository/
|
||||
|
||||
# other
|
||||
.DS_Store
|
@ -1,7 +0,0 @@
|
||||
[DEFAULT]
|
||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
|
||||
${PYTHON:-python} -m subunit.run discover ./tests $LISTOPT $IDOPTION
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
@ -1,7 +0,0 @@
|
||||
- project:
|
||||
templates:
|
||||
- openstack-python-jobs
|
||||
- openstack-python35-jobs
|
||||
- openstack-python36-jobs
|
||||
- publish-openstack-docs-pti
|
||||
- check-requirements
|
@ -1,43 +0,0 @@
|
||||
=======================
|
||||
Contributing Guidelines
|
||||
=======================
|
||||
|
||||
Syntribos is an open source project and contributions are always
|
||||
welcome, if you have any questions, we can be found in the
|
||||
#openstack-security channel on Freenode IRC.
|
||||
|
||||
1. Follow all the `OpenStack Style Guidelines <https://docs.openstack.org/hacking/latest/>`__
|
||||
(e.g. PEP8, Py3 compatibility)
|
||||
2. All new classes/functions should have appropriate docstrings in
|
||||
`RST format <https://pythonhosted.org/an_example_pypi_project/sphinx.html>`__
|
||||
3. All new code should have appropriate unittests (place them in the
|
||||
``tests/unit`` folder)
|
||||
4. Any change you make can be tested using tox:
|
||||
|
||||
::
|
||||
|
||||
pip install tox
|
||||
tox -e pep8
|
||||
tox -e py27
|
||||
tox -e py35
|
||||
tox -e cover
|
||||
|
||||
Anyone wanting to contribute to OpenStack must follow
|
||||
`the OpenStack development workflow <https://docs.openstack.org/infra/manual/developers.html#development-workflow>`__
|
||||
|
||||
All changes should be submitted through the code review process in Gerrit
|
||||
described above. All pull requests on Github will be closed/ignored.
|
||||
|
||||
Bugs should be filed on the `syntribos launchpad site <https://bugs.launchpad.net/syntribos>`__,
|
||||
and not on Github. All Github issues will be closed/ignored.
|
||||
|
||||
Breaking changes, feature requests, and other unprioritized work should first be
|
||||
submitted as a blueprint `here <https://blueprints.launchpad.net/syntribos>`__
|
||||
for review.
|
||||
|
||||
|
||||
**Note:** README.rst is an auto generated file, from the rst files in the
|
||||
docs directory. The file can be generated by running ``python readme.py``
|
||||
from the ``syntribos/scripts`` directory. When the README needs to be
|
||||
updated; modify the corresponding rst file in ``syntribos/doc/source``
|
||||
and generate it by running the script.
|
202
LICENSE
202
LICENSE
@ -1,202 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
@ -1 +0,0 @@
|
||||
include README.md LICENSE requirements.txt HISTORY.rst
|
1073
README.rst
1073
README.rst
File diff suppressed because it is too large
Load Diff
@ -1,3 +0,0 @@
|
||||
# Extraction from Python source files
|
||||
[python: **.py]
|
||||
encoding = utf-8
|
@ -1,183 +0,0 @@
|
||||
=================================================
|
||||
Syntribos, An Automated API Security Testing Tool
|
||||
=================================================
|
||||
|
||||
Syntribos is an open source automated API security testing tool that is
|
||||
maintained by members of the `OpenStack Security Project <https://wiki.openstack.org/wiki/Security>`_.
|
||||
|
||||
Given a simple configuration file and an example HTTP request, syntribos
|
||||
can replace any API URL, URL parameter, HTTP header and request body
|
||||
field with a given set of strings. Syntribos iterates through each position
|
||||
in the request automatically. Syntribos aims to automatically detect common
|
||||
security defects such as SQL injection, LDAP injection, buffer overflow, etc.
|
||||
In addition, syntribos can be used to help identify new security defects
|
||||
by automated fuzzing.
|
||||
|
||||
Syntribos has the capability to test any API, but is designed with
|
||||
`OpenStack <https://www.openstack.org/>`__ applications in mind.
|
||||
|
||||
List of Tests
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
With syntribos, you can initiate automated testing of any API with minimal
|
||||
configuration effort. Syntribos is ideal for testing the OpenStack API as it
|
||||
will help you in automatically downloading a set of templates of some of the
|
||||
bigger OpenStack projects like nova, neutron, keystone, etc.
|
||||
|
||||
A short list of tests that can be run using syntribos is given below:
|
||||
|
||||
* Buffer Overflow
|
||||
* Command Injection
|
||||
* CORS Wildcard
|
||||
* Integer Overflow
|
||||
* LDAP Injection
|
||||
* SQL Injection
|
||||
* String Validation
|
||||
* XML External Entity
|
||||
* Cross Site Scripting (XSS)
|
||||
* Regex Denial of Service (ReDoS)
|
||||
* JSON Parser Depth Limit
|
||||
* User Defined
|
||||
|
||||
Buffer Overflow
|
||||
---------------
|
||||
|
||||
`Buffer overflow`_ attacks, in the context of a web application,
|
||||
force an application to handle more data than it can hold in a buffer.
|
||||
In syntribos, a buffer overflow test is attempted by injecting a large
|
||||
string into the body of an HTTP request.
|
||||
|
||||
Command Injection
|
||||
-----------------
|
||||
|
||||
`Command injection`_ attacks are done by injecting arbitrary commands in an
|
||||
attempt to execute these commands on a remote system. In syntribos, this is
|
||||
achieved by injecting a set of strings that have been proven as successful
|
||||
executors of injection attacks.
|
||||
|
||||
CORS Wildcard
|
||||
-------------
|
||||
|
||||
`CORS wildcard`_ tests are used to verify if a web server allows cross-domain
|
||||
resource sharing from any external URL (wild carding of
|
||||
`Access-Control-Allow-Origin` header), rather than a white list of URLs.
|
||||
|
||||
Integer Overflow
|
||||
----------------
|
||||
|
||||
`Integer overflow`_ tests in syntribos attempt to inject numeric values that
|
||||
the remote application may fail to represent within its storage. For example,
|
||||
injecting a 64 bit number into a 32 bit integer type.
|
||||
|
||||
LDAP Injection
|
||||
--------------
|
||||
|
||||
Syntribos attempts `LDAP injection`_ attacks by injecting LDAP statements
|
||||
into HTTP requests; if an application fails to properly sanitize the
|
||||
request content, it may be possible to execute arbitrary commands.
|
||||
|
||||
SQL Injection
|
||||
-------------
|
||||
|
||||
`SQL injection`_ attacks are one of the most common web application attacks.
|
||||
If the user input is not properly sanitized, it is fairly easy to
|
||||
execute SQL queries that may result in an attacker reading sensitive
|
||||
information or gaining control of the SQL server. In syntribos,
|
||||
an application is tested for SQL injection vulnerabilities by injecting
|
||||
SQL strings into the HTTP request.
|
||||
|
||||
String Validation
|
||||
-----------------
|
||||
|
||||
Some string patterns are not sanitized effectively by the input validator and
|
||||
may cause the application to crash. String validation attacks in syntribos
|
||||
try to exploit this by inputting characters that may cause string validation
|
||||
vulnerabilities. For example, special unicode characters, emojis, etc.
|
||||
|
||||
XML External Entity
|
||||
-------------------
|
||||
|
||||
`XML external entity`_ attacks target the web application's XML parser.
|
||||
If an XML parser allows processing of external entities referenced in an
|
||||
XML document then an attacker might be able to cause a denial of service,
|
||||
or leakage of information, etc. Syntribos tries to inject a few malicious
|
||||
strings into an XML body while sending requests to an application in an
|
||||
attempt to obtain an appropriate response.
|
||||
|
||||
Cross Site Scripting (XSS)
|
||||
----------------------------
|
||||
|
||||
`XSS`_ attacks inject malicious JavaScript into a web
|
||||
application. Syntribos tries to find potential XSS issues by injecting
|
||||
string containing "script" and other HTML tags into request fields.
|
||||
|
||||
Regex Denial of Service (ReDoS)
|
||||
-------------------------------
|
||||
|
||||
`ReDoS`_ attacks attempt to produce a denial of service by
|
||||
providing a regular expression that takes a very long time to evaluate.
|
||||
This can cause the regex engine to backtrack indefinitely, which can
|
||||
slow down some parsers or even cause a processing halt. The attack
|
||||
exploits the fact that most regular expression implementations have
|
||||
an exponential time worst case complexity.
|
||||
|
||||
JSON Parser Depth Limit
|
||||
-----------------------
|
||||
|
||||
There is a possibility that the JSON parser will reach depth limit and crash,
|
||||
resulting in a successful overflow of the JSON parsers depth limit, leading
|
||||
to a DoS vulnerability. Syntribos tries to check for this, and raises an issue
|
||||
if the parser crashes.
|
||||
|
||||
User defined Test
|
||||
-----------------
|
||||
|
||||
This test gives users the ability to fuzz using user defined fuzz data and
|
||||
provides an option to look for failure strings provided by the user. The fuzz
|
||||
data needs to be provided using the config option :option:`[user_defined]`.
|
||||
|
||||
Example::
|
||||
|
||||
[user_defined]
|
||||
payload=<payload_file>
|
||||
failure_strings=<[list_of_failure_strings] # optional
|
||||
|
||||
Other than these built-in tests, you can extend syntribos by writing
|
||||
your own custom tests. To do this, download the source code and look at
|
||||
the tests in the ``syntribos/tests`` directory. The CORS test may be an easy
|
||||
one to emulate. In the same way, you can also add different extensions
|
||||
to the tests. To see how extensions can be written please see the
|
||||
``syntribos/extensions`` directory.
|
||||
|
||||
.. _buffer overflow: https://en.wikipedia.org/wiki/Buffer_overflow
|
||||
.. _Command injection: https://www.owasp.org/index.php/Command_Injection
|
||||
.. _CORS wildcard: https://www.owasp.org/index.php/Test_Cross_Origin_Resource_Sharing_(OTG-CLIENT-007)
|
||||
.. _Integer overflow: https://en.wikipedia.org/wiki/Integer_overflow
|
||||
.. _LDAP injection: https://www.owasp.org/index.php/LDAP_injection
|
||||
.. _SQL injection: https://www.owasp.org/index.php/SQL_Injection
|
||||
.. _XML external entity: https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
|
||||
.. _XSS: https://www.owasp.org/index.php/Cross-site_Scripting_(XSS)
|
||||
.. _ReDoS: https://en.wikipedia.org/wiki/ReDoS
|
||||
|
||||
**Details**
|
||||
|
||||
* `Documentation`_
|
||||
* Free software: `Apache license`_
|
||||
* `Launchpad project`_
|
||||
* `Blueprints`_
|
||||
* `Bugs`_
|
||||
* `Source code`_
|
||||
|
||||
Supported Operating Systems
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Syntribos has been developed primarily in Linux and Mac environments and would
|
||||
work on most Unix and Linux based Operating Systems. At this point, we are not
|
||||
supporting Windows, but this may change in the future.
|
||||
|
||||
.. _Documentation: https://docs.openstack.org/developer/syntribos/
|
||||
.. _Apache license: https://github.com/openstack/syntribos/blob/master/LICENSE
|
||||
.. _Launchpad project: https://launchpad.net/syntribos
|
||||
.. _Blueprints: https://blueprints.launchpad.net/syntribos
|
||||
.. _Bugs: https://bugs.launchpad.net/syntribos
|
||||
.. _Source code: https://github.com/openstack/syntribos
|
@ -1,144 +0,0 @@
|
||||
============================
|
||||
Syntribos Code Documentation
|
||||
============================
|
||||
|
||||
Configuration
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
This section describes the configuration specified in the second argument to
|
||||
the runner, your configuration file.
|
||||
|
||||
.. automodule:: syntribos.config
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
..
|
||||
.. automodule:: syntribos.arguments
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
.. automodule:: syntribos.runner
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Signals
|
||||
~~~~~~~
|
||||
|
||||
This section describes Signals (:class:`syntribos.signal.SynSignal`) and
|
||||
SignalHolders (:class:`syntribos.signal.SignalHolder`).
|
||||
|
||||
.. autoclass:: syntribos.signal.SynSignal
|
||||
:members:
|
||||
|
||||
.. autoclass:: syntribos.signal.SignalHolder
|
||||
:members:
|
||||
:special-members: __init__, __contains__
|
||||
|
||||
Checks
|
||||
~~~~~~
|
||||
|
||||
This section describes the checks, which analyze the HTTP response and
|
||||
returns a signal if it detects something that it knows about. It's intended
|
||||
to make it easier to inspect HTTP responses.
|
||||
|
||||
.. automodule:: syntribos.checks.content_validity
|
||||
:members:
|
||||
:undoc-members:
|
||||
.. automodule:: syntribos.checks.fingerprint
|
||||
:members:
|
||||
:undoc-members:
|
||||
.. automodule:: syntribos.checks.header
|
||||
:members:
|
||||
:undoc-members:
|
||||
.. automodule:: syntribos.checks.http
|
||||
:members:
|
||||
:undoc-members:
|
||||
.. automodule:: syntribos.checks.length
|
||||
:members:
|
||||
:undoc-members:
|
||||
.. automodule:: syntribos.checks.ssl
|
||||
:members:
|
||||
:undoc-members:
|
||||
.. automodule:: syntribos.checks.stacktrace
|
||||
:members:
|
||||
:undoc-members:
|
||||
.. automodule:: syntribos.checks.string
|
||||
:members:
|
||||
:undoc-members:
|
||||
.. automodule:: syntribos.checks.time
|
||||
:members:
|
||||
:undoc-members:
|
||||
|
||||
Tests
|
||||
~~~~~
|
||||
|
||||
This section describes the components involved with writing your own tests with
|
||||
syntribos.
|
||||
|
||||
All syntribos tests inherit from :class:`syntribos.tests.base.BaseTestCase`,
|
||||
either directly, or through a subclass such as
|
||||
:class:`syntribos.tests.fuzz.base_fuzz.BaseFuzzTestCase`.
|
||||
|
||||
All tests are aggregated in the ``syntribos.tests.base.test_table`` variable.
|
||||
|
||||
.. automodule:: syntribos.tests.base
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
.. automodule:: syntribos.tests.fuzz.datagen
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Issues
|
||||
~~~~~~
|
||||
|
||||
This section describes the representation of issues that are uncovered by
|
||||
syntribos.
|
||||
|
||||
.. automodule:: syntribos.issue
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Results
|
||||
~~~~~~~
|
||||
|
||||
This section describes the representation of results (collections of issues)
|
||||
from a given syntribos run.
|
||||
|
||||
.. automodule:: syntribos.result
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
HTTP Requests
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
This section describes the components related to generating, fuzzing, and
|
||||
making HTTP requests.
|
||||
|
||||
.. automodule:: syntribos.clients.http.client
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
.. automodule:: syntribos.clients.http.parser
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Extensions
|
||||
~~~~~~~~~~
|
||||
|
||||
This section describes syntribos extensions, which are called by the
|
||||
``CALL_EXTERNAL`` field in the request template.
|
||||
|
||||
.. automodule:: syntribos.extensions.identity.models.base
|
||||
:members:
|
||||
:undoc-members:
|
||||
:private-members:
|
||||
:show-inheritance:
|
@ -1,69 +0,0 @@
|
||||
========
|
||||
Commands
|
||||
========
|
||||
|
||||
Below are the set of commands that can be specified while
|
||||
using syntribos:
|
||||
|
||||
- **init**
|
||||
|
||||
This command sets up the syntribos environment after installation. Running
|
||||
this command creates the necessary folders for templates, payloads,
|
||||
and logs; as well a sample configuration file.
|
||||
|
||||
::
|
||||
|
||||
$ syntribos init
|
||||
|
||||
To learn more about ``syntribos init``, see the installation instructions
|
||||
`here <installation.html>`_.
|
||||
|
||||
- **run**
|
||||
|
||||
This command runs syntribos with the given config options.
|
||||
|
||||
::
|
||||
|
||||
$ syntribos --config-file keystone.conf -t SQL run
|
||||
|
||||
- **dry_run**
|
||||
|
||||
This command ensures that the template files given for this run parse
|
||||
successfully and without errors. It then runs a debug test which sends no
|
||||
requests of its own.
|
||||
|
||||
::
|
||||
|
||||
$ syntribos --config-file keystone.conf dry_run
|
||||
|
||||
.. Note::
|
||||
If any external calls referenced inside the template file do make
|
||||
requests, the parser will still make those requests even for a dry run.
|
||||
|
||||
- **list_tests**
|
||||
|
||||
This command will list the names of all the tests
|
||||
that can be executed by the ``run`` command with their description.
|
||||
|
||||
::
|
||||
|
||||
$ syntribos --config-file keystone.conf list_tests
|
||||
|
||||
- **download**
|
||||
|
||||
This command will download templates and payload files. By default, it will
|
||||
download a set of OpenStack template files (with the ``--templates``
|
||||
flag), or a set of payloads (with the ``--payloads`` flag) to your
|
||||
syntribos root directory. However, the behavior of this command can be
|
||||
configured in the ``[remote]`` section of your config file.
|
||||
|
||||
::
|
||||
|
||||
$ syntribos download --templates
|
||||
|
||||
.. Important::
|
||||
All these commands, except ``init``, will only work if a configuration file
|
||||
is specified. If a configuration file is present in the default
|
||||
path ( ``~/.syntribos/syntribos.conf`` ), then you
|
||||
do not need to explicitly specify a config file and
|
||||
can run syntribos using the command ``syntribos run``.
|
@ -1,80 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath("../../"))
|
||||
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named "sphinx.ext.*") or your custom ones.
|
||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx", "oslosphinx"]
|
||||
|
||||
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||
# text edit cycles.
|
||||
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = "syntribos"
|
||||
copyright = "2015-present, OpenStack Foundation"
|
||||
|
||||
# If true, "()" will be appended to :func: etc. cross-reference text.
|
||||
add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# -- Options for man page output --------------------------------------------
|
||||
|
||||
# Grouping the document tree for man pages.
|
||||
# List of tuples "sourcefile", "target", u"title", u"Authors name", "manual"
|
||||
|
||||
man_pages = [("man/syntribos", "syntribos",
|
||||
"Automated API security testing tool",
|
||||
["OpenStack Security Group"], 1)]
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently "default" and "sphinxdoc".
|
||||
# html_theme_path = ["."]
|
||||
# html_theme = "_theme"
|
||||
# html_static_path = ["static"]
|
||||
html_theme_options = {}
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "%sdoc" % project
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass
|
||||
# [howto/manual]).
|
||||
latex_documents = [("index", "%s.tex" % project, "%s Documentation" % project,
|
||||
"OpenStack Foundation", "manual"), ]
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
# intersphinx_mapping = {"http://docs.python.org/": None}
|
||||
intersphinx_mapping = {
|
||||
"requests": ("http://docs.python-requests.org/en/master", None)
|
||||
}
|
@ -1,152 +0,0 @@
|
||||
=============
|
||||
Configuration
|
||||
=============
|
||||
|
||||
All configuration files should have a ``[syntribos]`` section.
|
||||
Add other sections depending on what extensions you are using
|
||||
and what you are testing. For example, if you are using the
|
||||
built-in identity extension, you would need the ``[user]``
|
||||
section. The sections ``[logging]`` and ``[remote]`` are optional.
|
||||
|
||||
The basic structure of a syntribos configuration
|
||||
file is given below::
|
||||
|
||||
[syntribos]
|
||||
#
|
||||
# End point URLs and versions of the services to be tested.
|
||||
#
|
||||
endpoint=http://localhost:5000
|
||||
# Set payload and templates path
|
||||
templates=<location_of_templates_dir/file>
|
||||
payloads=<location_of_payloads_dir>
|
||||
|
||||
[user]
|
||||
#
|
||||
# User credentials and endpoint URL to get an AUTH_TOKEN
|
||||
# This section is only needed if you are using the identity extension.
|
||||
#
|
||||
endpoint=
|
||||
username=<yourusername>
|
||||
password=<yourpassword>
|
||||
|
||||
[remote]
|
||||
#
|
||||
# Optional, to define remote URI and cache_dir explicitly
|
||||
#
|
||||
templates_uri=<URI to a tar file of set of templates>
|
||||
payloads_uri=<URI to a tar file of set of payloads>
|
||||
cache_dir=<a local path to save the downloaded files>
|
||||
|
||||
[logging]
|
||||
log_dir=<location_to_save_debug_logs>
|
||||
|
||||
The endpoint URL specified in the ``[syntribos]`` section is the endpoint URL
|
||||
tested by syntribos. The endpoint URL in the ``[user]`` section is used to
|
||||
get an AUTH_TOKEN. To test any project, update the endpoint URL under
|
||||
``[syntribos]`` to point to the API and also modify the user
|
||||
credentials if needed.
|
||||
|
||||
Downloading templates and payloads remotely
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Payload and template files can be downloaded remotely in syntribos.
|
||||
In the config file under the ``[syntribos]`` section, if the ``templates``
|
||||
and ``payloads`` options are not set, by default syntribos will
|
||||
download all the latest payloads and the templates for a few OpenStack
|
||||
projects.
|
||||
|
||||
To specify a URI to download custom templates and payloads
|
||||
from, use the ``[remotes]`` section in the config file.
|
||||
Available options under ``[remotes]`` are ``cache_dir``, ``templates_uri``,
|
||||
``payloads_uri``, and ``enable_cache``. The ``enable_cache`` option is
|
||||
``True`` by default; set to ``False`` to disable caching of remote
|
||||
content while syntribos is running. If the ``cache_dir`` set to a path,
|
||||
syntribos will attempt to use that as a base directory to save downloaded
|
||||
template and payload files.
|
||||
|
||||
The advantage of using these options are that you will be able to get
|
||||
the latest payloads from the official repository and if you are
|
||||
using syntribos to test OpenStack projects, then, in most cases you
|
||||
could directly use the well defined templates available with this option.
|
||||
|
||||
This option also helps to easily manage different versions of templates
|
||||
remotely, without the need to maintain a set of different versions offline.
|
||||
|
||||
Testing OpenStack keystone API
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
A sample config file is given in ``examples/configs/keystone.conf``.
|
||||
Copy this file to a location of your choice (the default file path for the
|
||||
configuration file is: ``~/.syntribos/syntribos.conf``) and update the
|
||||
necessary fields, such as user credentials, log, template directory, etc.
|
||||
|
||||
::
|
||||
|
||||
$ vi examples/configs/keystone.conf
|
||||
|
||||
|
||||
|
||||
[syntribos]
|
||||
#
|
||||
# As keystone is being tested in the example, enter your
|
||||
#
|
||||
# keystone auth endpoint url.
|
||||
endpoint=http://localhost:5000
|
||||
# Set payload and templates path
|
||||
templates=<location_of_templates_dir/file>
|
||||
payloads=<location_of_payloads_dir>
|
||||
|
||||
[user]
|
||||
#
|
||||
# User credentials
|
||||
#
|
||||
endpoint=http://localhost:5000
|
||||
username=<yourusername>
|
||||
password=<yourpassword>
|
||||
# Optional, only needed if Keystone V3 API is used
|
||||
#user_id=<youruserid>
|
||||
# Optional, api version if required
|
||||
#version=v2.0
|
||||
# Optional, for getting scoped tokens
|
||||
#user_id=<alt_userid>
|
||||
# If user id is not known
|
||||
# For V3 API
|
||||
#domain_name=<name_of_the_domain>
|
||||
#project_name=<name_of_the_project>
|
||||
# For Keystone V2 API
|
||||
#tenant_name=<name_of_the_project>
|
||||
|
||||
#[alt_user]
|
||||
#
|
||||
# Optional, Used for cross auth tests (-t AUTH)
|
||||
#
|
||||
#endpoint=http://localhost:5000
|
||||
#username=<alt_username>
|
||||
#password=<alt_password>
|
||||
# Optional, for getting scoped tokens
|
||||
#user_id=<alt_userid>
|
||||
# If user id is not known
|
||||
# For V3 API
|
||||
#domain_name=<name_of_the_domain>
|
||||
#project_name=<name_of_the_project>
|
||||
# For Keystone V2 API
|
||||
#tenant_name=<name_of_the_project>
|
||||
|
||||
[remote]
|
||||
#
|
||||
# Optional, Used to specify URLs of templates and payloads
|
||||
#
|
||||
#cache_dir=<a local path to save the downloaded files>
|
||||
#templates_uri=https://github.com/your_project/templates.tar
|
||||
#payloads_uri=https://github.com/your_project/payloads.tar
|
||||
# To disable caching of these remote contents, set the following variable to False
|
||||
#enable_caching=True
|
||||
|
||||
[logging]
|
||||
#
|
||||
# Logger options go here
|
||||
#
|
||||
log_dir=<location_to_store_log_files>
|
||||
# Optional, compresses http_request_content,
|
||||
# if you don't want this, set this option to False.
|
||||
http_request_compression=True
|
@ -1,42 +0,0 @@
|
||||
=======================
|
||||
Contributing Guidelines
|
||||
=======================
|
||||
|
||||
Syntribos is an open source project and contributions are always
|
||||
welcome. If you have any questions, we can be found in the
|
||||
#openstack-security channel on Freenode IRC.
|
||||
|
||||
1. Follow all the `OpenStack Style Guidelines <https://docs.openstack.org/developer/hacking/>`__
|
||||
(e.g. PEP8, Py3 compatibility)
|
||||
2. Follow `secure coding guidelines <https://security.openstack.org/#secure-development-guidelines>`__
|
||||
3. Ensure all classes/functions have appropriate `docstrings <https://www.python.org/dev/peps/pep-0257/>`__
|
||||
in `RST format <http://docutils.sourceforge.net/docs/user/rst/quickref.html>`__
|
||||
4. Include appropriate unit tests for all new code(place them in the
|
||||
``tests/unit`` folder)
|
||||
5. Test any change you make using tox:
|
||||
|
||||
::
|
||||
|
||||
pip install tox
|
||||
tox -e pep8
|
||||
tox -e py27
|
||||
tox -e py35
|
||||
tox -e cover
|
||||
|
||||
Anyone wanting to contribute to OpenStack must follow
|
||||
`the OpenStack development workflow <https://docs.openstack.org/infra/manual/developers.html#development-workflow>`__
|
||||
|
||||
Submit all changes through the code review process in Gerrit
|
||||
described above. All pull requests on Github will be closed/ignored.
|
||||
|
||||
File bugs on the `syntribos launchpad site <https://bugs.launchpad.net/syntribos>`__,
|
||||
and not on Github. All Github issues will be closed/ignored.
|
||||
|
||||
Submit blueprints `here <https://blueprints.launchpad.net/syntribos>`__ for all
|
||||
breaking changes, feature requests, and other unprioritized work.
|
||||
|
||||
|
||||
.. Note:: README.rst is a file that can be generated by running
|
||||
``python readme.py`` from the ``syntribos/scripts`` directory. When the
|
||||
README file needs to be updated; modify the corresponding rst file in
|
||||
``syntribos/doc/source`` and have it generate by running the script.
|
@ -1,58 +0,0 @@
|
||||
=========
|
||||
Syntribos
|
||||
=========
|
||||
|
||||
Syntribos is an automated API security testing tool.
|
||||
|
||||
Given a simple configuration file and an example HTTP request, syntribos
|
||||
can replace any API URL, URL parameter, HTTP header and request body
|
||||
field with a given set of strings. Syntribos iterates through each position
|
||||
in the request automatically. Syntribos aims to automatically detect common
|
||||
security defects such as SQL injection, LDAP injection, buffer overflow, etc. In
|
||||
addition, syntribos can be used to help identify new security defects
|
||||
by automated fuzzing.
|
||||
|
||||
Syntribos has the capability to test any API, but is designed with
|
||||
`OpenStack <https://www.openstack.org/>`__ applications in mind.
|
||||
|
||||
Index
|
||||
~~~~~
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
about
|
||||
installation
|
||||
configuration
|
||||
commands
|
||||
running
|
||||
logging
|
||||
test-anatomy
|
||||
|
||||
For Developers
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
structure
|
||||
contributing
|
||||
code-docs
|
||||
unittests
|
||||
|
||||
Project information
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* `Documentation`_
|
||||
* Free software: `Apache license`_
|
||||
* `Launchpad project`_
|
||||
* `Blueprints`_
|
||||
* `Bugs`_
|
||||
* `Source code`_
|
||||
|
||||
.. _Documentation: https://docs.openstack.org/developer/syntribos/
|
||||
.. _Apache license: https://github.com/openstack/syntribos/blob/master/LICENSE
|
||||
.. _Launchpad project: https://launchpad.net/syntribos
|
||||
.. _Blueprints: https://blueprints.launchpad.net/syntribos
|
||||
.. _Bugs: https://bugs.launchpad.net/syntribos
|
||||
.. _Source code: https://github.com/openstack/syntribos
|
@ -1,62 +0,0 @@
|
||||
============
|
||||
Installation
|
||||
============
|
||||
|
||||
Syntribos can be installed directly from `pypi with pip <https://pypi.python.org/pypi/pip>`__.
|
||||
|
||||
::
|
||||
|
||||
pip install syntribos
|
||||
|
||||
For the latest changes, install syntribos from `source <https://www.github.com/openstack/syntribos.git>`__
|
||||
with `pip <https://pypi.python.org/pypi/pip>`__.
|
||||
|
||||
Clone the repository::
|
||||
|
||||
$ git clone https://github.com/openstack/syntribos.git
|
||||
|
||||
Change directory into the repository clone and install with pip::
|
||||
|
||||
$ cd syntribos
|
||||
$ pip install .
|
||||
|
||||
======================================
|
||||
Initializing the syntribos Environment
|
||||
======================================
|
||||
|
||||
Once syntribos is installed, you must initialize the syntribos environment.
|
||||
This can be done manually, or with the ``init`` command.
|
||||
|
||||
::
|
||||
|
||||
$ syntribos init
|
||||
|
||||
.. Note::
|
||||
By default, ``syntribos init`` fetches a set of default payload files
|
||||
from a `remote repository <https://github.com/openstack/syntribos-payloads>`_
|
||||
maintained by our development team. These payload files are necessary for
|
||||
our fuzz tests to run. To disable this behavior, run syntribos with the
|
||||
``--no_downloads`` flag. Payload files can also be fetched by running
|
||||
``syntribos download --payloads`` at any time.
|
||||
|
||||
To specify a custom root for syntribos to be installed in,
|
||||
specify the ``--custom_root`` flag. This will skip
|
||||
prompts for information from the terminal, which can be handy for
|
||||
Jenkins jobs and other situations where user input cannot be retrieved.
|
||||
|
||||
If you've already run the ``init`` command but want to start over with a fresh
|
||||
environment, you can specify the ``--force`` flag to overwrite existing files.
|
||||
The ``--custom_root`` and ``--force`` flags can be combined to
|
||||
overwrite files in a custom install root.
|
||||
|
||||
Note: if you install syntribos to a custom install root, you must supply the
|
||||
``--custom_root`` flag when running syntribos.
|
||||
|
||||
**Example:**
|
||||
|
||||
::
|
||||
|
||||
$ syntribos --custom_root /your/custom/path init --force
|
||||
$ syntribos --custom_root /your/custom/path run
|
||||
|
||||
|
@ -1,173 +0,0 @@
|
||||
===================
|
||||
Logging and Results
|
||||
===================
|
||||
|
||||
There are two types of logs generated by syntribos:
|
||||
|
||||
#. The results log is a collection of issues generated at the end of a
|
||||
syntribos run to represent results.
|
||||
#. The debug log contains debugging information captured during a particular
|
||||
run. Debug logs may include exception messages, warnings, raw
|
||||
but sanitized request/response data, and a few more details. A modified
|
||||
version of Python logger is used for collecting debug logs in syntribos.
|
||||
|
||||
Results Log
|
||||
~~~~~~~~~~~
|
||||
|
||||
The results log is displayed at the end of every syntribos run, it can be
|
||||
written to a file by using the ``-o`` flag on the command line.
|
||||
|
||||
The results log includes failures and errors. The ``"failures"`` key represents
|
||||
tests that have failed, indicating a possible security vulnerability. The
|
||||
``"errors"`` key gives us information on any unhandled exceptions, such as
|
||||
connection errors, encountered on that run.
|
||||
|
||||
Example failure object:
|
||||
|
||||
::
|
||||
|
||||
{
|
||||
"defect_type": "xss_strings",
|
||||
"description": "The string(s): '[\"<STYLE>@import'http://xss.rocks/xss.css';</STYLE>\"]',
|
||||
known to be commonly returned after a successful XSS attack, have been found in the
|
||||
response. This could indicate a vulnerability to XSS attacks.",
|
||||
"failure_id": 33,
|
||||
"instances": [
|
||||
{
|
||||
"confidence": "LOW",
|
||||
"param": {
|
||||
"location": "data",
|
||||
"method": "POST",
|
||||
"type": null,
|
||||
"variables": [
|
||||
"type",
|
||||
"details/name",
|
||||
]
|
||||
},
|
||||
"severity": "LOW",
|
||||
"signals": {
|
||||
"diff_signals": [
|
||||
"LENGTH_DIFF_OVER"
|
||||
],
|
||||
"init_signals": [
|
||||
"HTTP_CONTENT_TYPE_JSON",
|
||||
"HTTP_STATUS_CODE_2XX_201"
|
||||
],
|
||||
"test_signals": [
|
||||
"FAILURE_KEYS_PRESENT",
|
||||
"HTTP_CONTENT_TYPE_JSON",
|
||||
"HTTP_STATUS_CODE_2XX_201",
|
||||
]
|
||||
},
|
||||
"strings": [
|
||||
"<STYLE>@import'http://xss.rocks/xss.css';</STYLE>"
|
||||
]
|
||||
}
|
||||
],
|
||||
"url": "127.0.0.1/test"
|
||||
}
|
||||
|
||||
|
||||
Error form:
|
||||
|
||||
::
|
||||
|
||||
ERROR:
|
||||
{
|
||||
"error": "Traceback (most recent call last):\n File \"/Users/test/syntribos/tests/fuzz/base_fuzz.py\",
|
||||
line 58, in tearDownClass\n super(BaseFuzzTestCase, cls).tearDownClass()\n
|
||||
File \"/Users/test/syntribos/tests/base.py\", line 166, in tearDownClass\n
|
||||
raise sig.data[\"exception\"]\nReadTimeout: HTTPConnectionPool(host='127.0.0.1', port=8080):
|
||||
Read timed out. (read timeout=10)\n",
|
||||
"test": "tearDownClass (syntribos.tests.fuzz.sql.image_data_image_data_get.template_SQL_INJECTION_HEADERS_sql-injection.txt_str21_model1)"
|
||||
}
|
||||
|
||||
|
||||
Debug Logs
|
||||
~~~~~~~~~~
|
||||
|
||||
Debug logs include details about HTTP requests, HTTP responses, and other
|
||||
debugging information such as errors and warnings across the project. The
|
||||
path where debug logs are saved by default is ``.syntribos/logs/``.
|
||||
Debug logs are arranged in directories based on the timestamp in these
|
||||
directories and files are named according to the templates.
|
||||
|
||||
For example:
|
||||
|
||||
::
|
||||
|
||||
$ ls .syntribos/logs/
|
||||
2016-09-15_11:06:37.198412 2016-09-16_10:11:37.834892 2016-09-16_13:31:36.362584
|
||||
2016-09-15_11:34:33.271606 2016-09-16_10:38:55.820827 2016-09-16_13:36:43.151048
|
||||
2016-09-15_11:41:53.859970 2016-09-16_10:39:50.501820 2016-09-16_13:40:23.203920
|
||||
|
||||
::
|
||||
|
||||
$ ls .syntribos/logs/2016-09-16_13:31:36.362584
|
||||
API_Versions::list_versions_template.log
|
||||
API_Versions::show_api_details_template.log
|
||||
availability_zones::get_availability_zone_detail_template.log
|
||||
availability_zones::get_availability_zone_template.log
|
||||
cells::delete_os_cells_template.log
|
||||
cells::get_os_cells_capacities_template.log
|
||||
cells::get_os_cells_data_template.log
|
||||
|
||||
Each log file includes some essential debugging information such as the string
|
||||
representation of the request object, signals, and checks used for tests, etc.
|
||||
|
||||
Example request::
|
||||
|
||||
------------
|
||||
REQUEST SENT
|
||||
------------
|
||||
request method.......: PUT
|
||||
request url..........: http://127.0.0.1/api
|
||||
request params.......:
|
||||
request headers size.: 7
|
||||
request headers......: {'Content-Length': '0', 'Accept-Encoding': 'gzip, deflate',
|
||||
'Accept': 'application/json',
|
||||
'X-Auth-Token': <uuid>, 'Connection': 'keep-alive',
|
||||
'User-Agent': 'python-requests/2.11.1', 'content-type': 'application/xml'}
|
||||
request body size....: 0
|
||||
request body.........: None
|
||||
|
||||
Example response::
|
||||
|
||||
-----------------
|
||||
RESPONSE RECEIVED
|
||||
-----------------
|
||||
response status..: <Response [415]>
|
||||
response headers.: {'Content-Length': '70',
|
||||
'X-Compute-Request-Id': <random id>,
|
||||
'Vary': 'OpenStack-API-Version, X-OpenStack-Nova-API-Version',
|
||||
'Openstack-Api-Version': 'compute 2.1', 'Connection': 'close',
|
||||
'X-Openstack-Nova-Api-Version': '2.1', 'Date': 'Fri, 16 Sep 2016 14:15:27 GMT',
|
||||
'Content-Type': 'application/json; charset=UTF-8'}
|
||||
response time....: 0.036277
|
||||
response size....: 70
|
||||
response body....: {"badMediaType": {"message": "Unsupported Content-Type", "code": 415}}
|
||||
-------------------------------------------------------------------------------
|
||||
[2590] : XSS_BODY
|
||||
(<syntribos.clients.http.client.SynHTTPClient object at 0x102c65f10>, 'PUT',
|
||||
'http://127.0.0.1/api')
|
||||
{'headers': {'Accept': 'application/json', 'X-Auth-Token': <uuid> },
|
||||
'params': {}, 'sanitize': False, 'data': '', 'requestslib_kwargs': {'timeout': 10}}
|
||||
Starting new HTTP connection (1): 127.0.0.1
|
||||
"PUT http://127.0.0.1/api HTTP/1.1" 501 93
|
||||
|
||||
Example signals captured::
|
||||
|
||||
Signals: ['HTTP_STATUS_CODE_4XX_400', 'HTTP_CONTENT_TYPE_JSON']
|
||||
Checks used: ['HTTP_STATUS_CODE', 'HTTP_CONTENT_TYPE']
|
||||
|
||||
Debug logs are sanitized to prevent storing secrets to log files.
|
||||
Passwords and other sensitive information are marked with asterisks using a
|
||||
slightly modified version of `oslo_utils.strutils.mask_password <https://docs.openstack.org/developer/oslo.utils/api/strutils.html#oslo_utils.strutils.mask_password>`__.
|
||||
|
||||
Debug logs also include string compression, wherein long fuzz strings are
|
||||
compressed before being written to the logs. The threshold to start data
|
||||
compression is set to 512 characters. Although it is not recommended to turn
|
||||
off compression, it is possible by setting the variable
|
||||
``"http_request_compression"``, under the logging section in the config file,
|
||||
to ``False``.
|
||||
|
@ -1,101 +0,0 @@
|
||||
=========
|
||||
syntribos
|
||||
=========
|
||||
|
||||
SYNOPSIS
|
||||
~~~~~~~~
|
||||
|
||||
syntribos [-h] [--colorize] [--config-dir DIR] [--config-file PATH]
|
||||
[--excluded-types EXCLUDED_TYPES] [--format OUTPUT_FORMAT]
|
||||
[--min-confidence MIN_CONFIDENCE]
|
||||
[--min-severity MIN_SEVERITY] [--nocolorize]
|
||||
[--outfile OUTFILE] [--test-types TEST_TYPES]
|
||||
[--syntribos-endpoint SYNTRIBOS_ENDPOINT]
|
||||
[--syntribos-exclude_results SYNTRIBOS_EXCLUDE_RESULTS]
|
||||
[--syntribos-payloads SYNTRIBOS_PAYLOADS_DIR]
|
||||
[--syntribos-templates SYNTRIBOS_TEMPLATES]
|
||||
{list_tests,run,dry_run} ...
|
||||
|
||||
DESCRIPTION
|
||||
~~~~~~~~~~~
|
||||
|
||||
Syntribos is an automated API security testing tool.
|
||||
|
||||
Given a simple configuration file and an example HTTP request, syntribos
|
||||
can replace any API URL, URL parameter, HTTP header and request body
|
||||
field with a given set of strings. Syntribos aims to automatically detect
|
||||
common security defects such as SQL injection, LDAP injection, buffer
|
||||
overflow, etc. In addition, syntribos can be used to help identifying new
|
||||
security defects by fuzzing.
|
||||
|
||||
Syntribos has the capability to test any API, but is designed with
|
||||
OpenStack applications in mind.
|
||||
|
||||
OPTIONS
|
||||
~~~~~~~
|
||||
|
||||
-h, --help show this help message and exit
|
||||
--colorize, -cl Enable color in syntribos terminal output
|
||||
--config-dir DIR Path to a config directory to pull ``*.conf`` files
|
||||
from. This file set is sorted, so as to provide a
|
||||
predictable parse order if individual options are
|
||||
over-ridden. The set is parsed after the file(s)
|
||||
specified via previous --config-file, arguments hence
|
||||
over-ridden options in the directory take precedence.
|
||||
--config-file PATH Path to a config file to use. Multiple config files
|
||||
can be specified, with values in later files taking
|
||||
precedence. Defaults to None.
|
||||
--excluded-types EXCLUDED_TYPES, -e EXCLUDED_TYPES
|
||||
Test types to be excluded from current run against the
|
||||
target API
|
||||
--format OUTPUT_FORMAT, -f OUTPUT_FORMAT
|
||||
The format for outputting results
|
||||
--min-confidence MIN_CONFIDENCE, -C MIN_CONFIDENCE
|
||||
Select a minimum confidence for reported defects
|
||||
--min-severity MIN_SEVERITY, -S MIN_SEVERITY
|
||||
Select a minimum severity for reported defects
|
||||
--nocolorize The inverse of --colorize
|
||||
--outfile OUTFILE, -o OUTFILE
|
||||
File to print output to
|
||||
--test-types TEST_TYPES, -t TEST_TYPES
|
||||
Test types to run against the target API
|
||||
|
||||
Main Syntribos Config:
|
||||
--syntribos-endpoint SYNTRIBOS_ENDPOINT
|
||||
The target host to be tested
|
||||
--syntribos-exclude_results SYNTRIBOS_EXCLUDE_RESULTS
|
||||
Defect types to exclude from the results output
|
||||
--syntribos-payloads SYNTRIBOS_PAYLOADS_DIR
|
||||
The location where we can find syntribos' payloads
|
||||
--syntribos-templates SYNTRIBOS_TEMPLATES
|
||||
A directory of template files, or a single template
|
||||
file, to test on the target API
|
||||
|
||||
Syntribos Commands:
|
||||
{list_tests,run,dry_run}
|
||||
Available commands
|
||||
list_tests List all available tests
|
||||
run Run syntribos with given config options
|
||||
dry_run Dry run syntribos with given config options
|
||||
|
||||
FILES
|
||||
~~~~~
|
||||
|
||||
~/.syntribos/syntribos.conf
|
||||
syntribos configuration file
|
||||
|
||||
EXAMPLES
|
||||
~~~~~~~~
|
||||
|
||||
To run syntribos against all the available tests, just specify the
|
||||
command ``syntribos run`` with the configuration file without
|
||||
specifying any test type.
|
||||
|
||||
::
|
||||
|
||||
$ syntribos --config-file keystone.conf run
|
||||
|
||||
SEE ALSO
|
||||
~~~~~~~~
|
||||
|
||||
bandit(1)
|
@ -1,40 +0,0 @@
|
||||
=================
|
||||
Running syntribos
|
||||
=================
|
||||
|
||||
By default, syntribos looks in the syntribos home directory (the directory
|
||||
specified when running the ``syntribos init`` command on install) for config
|
||||
files, payloads, and templates. This can all be overridden through command
|
||||
line options. For a full list of command line options available, run
|
||||
``syntribos --help`` from the command line.
|
||||
|
||||
To run syntribos against all the available tests, specify the
|
||||
command ``syntribos``, with the configuration file (if needed), without
|
||||
specifying any test type.
|
||||
|
||||
::
|
||||
|
||||
$ syntribos --config-file keystone.conf run
|
||||
|
||||
Fuzzy-matching test names
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
It is possible to limit syntribos to run a specific test type using
|
||||
the ``-t`` flag.
|
||||
|
||||
::
|
||||
|
||||
$ syntribos --config-file keystone.conf -t SQL run
|
||||
|
||||
|
||||
This will match all tests that contain ``SQL`` in their name. For example:
|
||||
``SQL_INJECTION_HEADERS``, ``SQL_INJECTION_BODY``, etc.
|
||||
|
||||
Specifying a custom root directory
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you set up the syntribos environment with a custom root (i.e. with
|
||||
``syntribos --custom_root init``), you can point to it with the
|
||||
``--custom_root`` configuration option. Syntribos will look for a
|
||||
``syntribos.conf`` file inside this directory, and will read further
|
||||
configuration information from there.
|
@ -1,33 +0,0 @@
|
||||
=================
|
||||
Project Structure
|
||||
=================
|
||||
|
||||
- ``data/`` (text files containing data for use by syntribos tests)
|
||||
- ``doc/source/`` (Sphinx documentation files)
|
||||
- ``examples/`` (example syntribos request templates, config files)
|
||||
- ``configs/`` (example syntribos configs)
|
||||
- ``templates/`` (examples request templates)
|
||||
- ``scripts/`` (helper Python scripts for managing the project)
|
||||
- ``readme.py`` (Python file for creating/updating the README.rst)
|
||||
- ``syntribos/`` (core syntribos code)
|
||||
- ``clients/`` (clients for making calls, e.g. HTTP)
|
||||
- ``http/`` (clients for making HTTP requests)
|
||||
- ``checks/`` (for analyzing an HTTP response and returning a signal if
|
||||
it detects something that it knows about)
|
||||
- ``extensions/`` (extensions that can be called in request templates)
|
||||
- ``identity/`` (extension for interacting with keystone/Identity)
|
||||
- ``random_data/`` (extension for generating random test data)
|
||||
- ``cinder/`` (extension for interacting with cinder/Block Storage)
|
||||
- ``glance/`` (extension for interacting with glance/Image)
|
||||
- ``neutron/`` (extension for interacting with neutron/Network)
|
||||
- ``nova/`` (extension for interacting with nova/Compute)
|
||||
- ``formatters/`` (output formatters, e.g. JSON, XML/XUnit)
|
||||
- ``tests/`` (location of tests that syntribos can run against a target)
|
||||
- ``auth/`` (tests related to authentication/authorization)
|
||||
- ``fuzz/`` (tests that "fuzz" API requests)
|
||||
- ``debug/`` (internal syntribos tests, these will not be included in a
|
||||
normal run of syntribos)
|
||||
- ``headers/`` (tests related to insecure HTTP headers)
|
||||
- ``transport_layer/`` (tests related to SSL and TLS vulnerabilities)
|
||||
- ``utils/`` (utility methods)
|
||||
- ``tests/unit/`` (unit tests for testing syntribos itself)
|
@ -1,286 +0,0 @@
|
||||
=============================
|
||||
Anatomy of a request template
|
||||
=============================
|
||||
|
||||
This section describes how to write templates and how to run specific tests.
|
||||
Templates are input files which have raw HTTP requests and may be
|
||||
supplemented with variable data using extensions.
|
||||
|
||||
In general, a request template is a marked-up raw HTTP request. It's possible
|
||||
for you to test your application by using raw HTTP requests as your request
|
||||
templates, but syntribos allows you to mark-up your request templates for
|
||||
further functionality.
|
||||
|
||||
A request template looks something like this:
|
||||
|
||||
::
|
||||
|
||||
POST /users/{user1} HTTP/1.1
|
||||
Content-Type: application/json
|
||||
X-Auth-Token: CALL_EXTERNAL|syntribos.extensions.vAPI.client:get_token:[]|
|
||||
|
||||
{"newpassword": "qwerty123"}
|
||||
|
||||
For fuzz tests, syntribos will automatically detect URL parameters, headers,
|
||||
and body content as fields to fuzz. It will not automatically detect URL path
|
||||
elements as fuzz fields, but they can be specified with curly braces ``{}``.
|
||||
|
||||
Note: The name of a template file must end with the extension ``.template``
|
||||
Otherwise, syntribos will skip the file and will not attempt to parse any files
|
||||
that do not adhere to this naming scheme.
|
||||
|
||||
Using external functions in templates
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Extensions can be used to supplement syntribos template files with variable
|
||||
data, or data retrieved from external sources.
|
||||
|
||||
Extensions are found in ``syntribos/extensions/``.
|
||||
|
||||
Calls to extensions are made in the form below:
|
||||
|
||||
::
|
||||
|
||||
CALL_EXTERNAL|{extension dot path}:{function name}:[arguments]
|
||||
|
||||
One example packaged with syntribos enables the tester to obtain an AUTH
|
||||
token from keystone. The code is located in ``identity/client.py``.
|
||||
|
||||
To use this extension, you can add the following to your template file:
|
||||
|
||||
::
|
||||
|
||||
X-Auth-Token: CALL_EXTERNAL|syntribos.extensions.identity.client:get_token_v3:["user"]|
|
||||
|
||||
The ``"user"`` string indicates the data from the configuration file we
|
||||
added in ``examples/configs/keystone.conf``.
|
||||
|
||||
Another example is found in ``random_data/client.py``. This returns a
|
||||
UUID when random, but unique data is needed. The UUID can be used in place of
|
||||
usernames when fuzzing a create user call.
|
||||
|
||||
::
|
||||
|
||||
"username": "CALL_EXTERNAL|syntribos.extensions.random_data.client:get_uuid:[]|"
|
||||
|
||||
The extension function can return one value, or be used as a generator if
|
||||
you want it to change for each test.
|
||||
|
||||
Built in functions
|
||||
------------------
|
||||
|
||||
Syntribos comes with a slew of utility functions/extensions, these functions
|
||||
can be used to dynamically inject data into templates.
|
||||
|
||||
.. list-table:: **Utility Functions**
|
||||
:widths: 15 35 40
|
||||
:header-rows: 1
|
||||
|
||||
* - Method
|
||||
- Parameters
|
||||
- Description
|
||||
* - hash_it
|
||||
- [data, hash_type (optional hash type, default being SHA256)]
|
||||
- Returns hashed value of data
|
||||
* - hmac_it
|
||||
- [data, key, hash_type (optional hash type, default being SHA256)]
|
||||
- Returns HMAC based on the has algorithm, data and the key provided
|
||||
* - epoch_time
|
||||
- [offset (optional integer offset value, default is zero)]
|
||||
- Returns the current time minus offset since epoch
|
||||
* - utc_datetime
|
||||
- []
|
||||
- Returns current UTC date time
|
||||
* - base64_encode
|
||||
- [data]
|
||||
- Returns base 64 encoded value of data supplied
|
||||
* - url_encode
|
||||
- [url]
|
||||
- Returns encoded URL
|
||||
|
||||
All these utility functions can be called using the following syntax:
|
||||
|
||||
::
|
||||
|
||||
CALL_EXTERNAL|common_utils.client.{method_name}:{comma separated parameters in square brackets}
|
||||
|
||||
For example:
|
||||
|
||||
::
|
||||
|
||||
"encoded_url": "CALL_EXTERNAL|common_utils.client:url_encode:['http://localhost:5000']|
|
||||
|
||||
Other functions that return random values can be seen below:
|
||||
|
||||
.. list-table:: **Random Functions**
|
||||
:widths: 15 35 40
|
||||
:header-rows: 1
|
||||
|
||||
* - Method
|
||||
- Parameters
|
||||
- Description
|
||||
* - get_uuid
|
||||
- []
|
||||
- Returns a random UUID
|
||||
* - random_port
|
||||
- []
|
||||
- Returns random port number between 0 and 65535
|
||||
* - random_ip
|
||||
- []
|
||||
- Returns random ipv4 address
|
||||
* - random_mac
|
||||
- []
|
||||
- Returns random mac address
|
||||
* - random_integer
|
||||
- [beg (optional beginning value, default is 0), end (optional end value)]
|
||||
- Returns an integer value between 0 and 1468029570 by default
|
||||
* - random_utc_datetime
|
||||
- []
|
||||
- Returns random UTC datetime
|
||||
|
||||
These can be called using:
|
||||
|
||||
::
|
||||
|
||||
CALL_EXTERNAL|random_data.client.{method_name}:{comma separated parameters in square brackets}
|
||||
|
||||
For example:
|
||||
|
||||
::
|
||||
|
||||
"address": "CALL_EXTERNAL|random_data.client:random_ip:[]|"
|
||||
|
||||
Action Field
|
||||
~~~~~~~~~~~~
|
||||
|
||||
While syntribos is designed to test all fields in a request, it can also
|
||||
ignore specific fields through the use of Action Fields. If you want to
|
||||
fuzz against a static object ID, use the Action Field indicator as
|
||||
follows:
|
||||
|
||||
::
|
||||
|
||||
"ACTION_FIELD:id": "1a16f348-c8d5-42ec-a474-b1cdf78cf40f"
|
||||
|
||||
The ID provided will remain static for every test.
|
||||
|
||||
Meta Variable File
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Syntribos allows for templates to read in variables from a user-specified
|
||||
meta variable file. These files contain JSON objects that define variables
|
||||
to be used in one or more request templates.
|
||||
|
||||
The file must be named `meta.json`, and they take the form:
|
||||
::
|
||||
|
||||
{
|
||||
"user_password": {
|
||||
"val": 1234
|
||||
},
|
||||
"user_name": {
|
||||
"type": config,
|
||||
"val": "user.username"
|
||||
"fuzz_types": ["ascii"]
|
||||
},
|
||||
"user_token": {
|
||||
"type": "function",
|
||||
"val": "syntribos.extensions.identity:get_scoped_token_v3",
|
||||
"args": ["user"],
|
||||
"fuzz": false
|
||||
}
|
||||
}
|
||||
|
||||
To reference a meta variable from a request template, reference the variable
|
||||
name surrounded by `|` (pipe). An example request template with meta
|
||||
variables is as follows:
|
||||
::
|
||||
|
||||
POST /user HTTP/1.1
|
||||
X-Auth-Token: |user_token|
|
||||
|
||||
{
|
||||
"user": {
|
||||
"username": "|user_name|",
|
||||
"password": "|user_password|"
|
||||
}
|
||||
}
|
||||
|
||||
Note: Meta-variable usage in templates should take the form `|user_name|`, not
|
||||
`user_|name|` or `|user|_|name|`. This is to avoid ambiguous behavior when the
|
||||
value is fuzzed.
|
||||
|
||||
Meta Variable Attributes
|
||||
------------------------
|
||||
* val - All meta variable objects must define a value, which can be of any json
|
||||
DataType. Unlike the other attributes, this attribute is not optional.
|
||||
* type - Defining a type instructs syntribos to interpret the variable in a
|
||||
certain way. Any variables without a type defined will be read in directly
|
||||
from the value. The following types are allowed:
|
||||
|
||||
* config - syntribos reads the config value specified by the "val"
|
||||
attribute and returns that value.
|
||||
* function - syntribos calls the function named in the "val" attribute
|
||||
with any arguments given in the optional "args" attribute, and returns the
|
||||
value from calling the function. This value is cached, and will be returned
|
||||
on subsequent calls.
|
||||
* generator - Works the same way as the function type, but its results are
|
||||
not cached and the function will be called every time.
|
||||
|
||||
* args - A list of function arguments (if any) which can be defined here if the
|
||||
variable is a generator or a function
|
||||
* fuzz - A boolean value that, if set to false, instructs syntribos to
|
||||
ignore this variable for any fuzz tests
|
||||
* fuzz_types - A list of strings which instructs syntribos to only use certain
|
||||
fuzz strings when fuzzing this variable. More than one fuzz type can be
|
||||
defined. The following fuzz types are allowed:
|
||||
|
||||
* ascii - strings that can be encoded as ascii
|
||||
* url - strings that contain only url safe characters
|
||||
|
||||
* min_length/max_length - An integer that instructs syntribos to only use fuzz
|
||||
strings that meet certain length requirements
|
||||
|
||||
Inheritence
|
||||
-----------
|
||||
|
||||
Meta variable files inherit based on the directory it's in. That is, if you
|
||||
have `foo/meta.json` and `foo/bar/meta.json`, templates in `foo/bar/` will take
|
||||
their meta variable values from `foo/bar/meta.json`, but they can also
|
||||
reference meta variables that are defined only in `foo/meta.json`. This also
|
||||
means that templates in `foo/baz/` cannot reference variables defined only in
|
||||
`foo/bar/meta.json`.
|
||||
|
||||
Each directory can have no more than one file named `meta.json`.
|
||||
|
||||
Running a specific test
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
As mentioned above, some tests included with syntribos by default
|
||||
are: LDAP injection, SQL injection, integer overflow, command injection,
|
||||
XML external entity, reflected cross-site scripting,
|
||||
Cross Origin Resource Sharing (CORS), SSL, Regex Denial of Service,
|
||||
JSON Parser Depth Limit, and User defined.
|
||||
|
||||
In order to run a specific test, use the :option:`-t, --test-types`
|
||||
option and provide ``syntribos`` with a keyword, or keywords, to match from
|
||||
the test files located in ``syntribos/tests/``.
|
||||
|
||||
For SQL injection tests, see below:
|
||||
|
||||
::
|
||||
|
||||
$ syntribos --config-file keystone.conf -t SQL run
|
||||
|
||||
To run SQL injection tests against the template body only, see below:
|
||||
|
||||
::
|
||||
|
||||
$ syntribos --config-file keystone.conf -t SQL_INJECTION_BODY run
|
||||
|
||||
For all tests against HTTP headers only, see below:
|
||||
|
||||
::
|
||||
|
||||
$ syntribos --config-file keystone.conf -t HEADERS run
|
||||
|
@ -1,26 +0,0 @@
|
||||
============
|
||||
Unit testing
|
||||
============
|
||||
|
||||
To execute unit tests automatically, navigate to the ``syntribos`` root
|
||||
directory and install the test requirements.
|
||||
|
||||
::
|
||||
|
||||
$ pip install -r test-requirements.txt
|
||||
|
||||
Now, run the ``unittest`` as below:
|
||||
|
||||
::
|
||||
|
||||
$ python -m unittest discover tests/unit -p "test_*.py"
|
||||
|
||||
If you have configured tox you could also run the following:
|
||||
|
||||
::
|
||||
|
||||
$ tox -e py27
|
||||
$ tox -e py35
|
||||
|
||||
This will run all the unit tests and give you a result output
|
||||
containing the status and coverage details of each test.
|
@ -1,54 +0,0 @@
|
||||
[syntribos]
|
||||
# As keystone is being tested in the example, enter your
|
||||
# keystone auth endpoint url.
|
||||
endpoint=http://localhost:5000
|
||||
# Set payload and templates path
|
||||
templates=<location_of_templates_dir/file>
|
||||
payloads=<location_of_payloads_dir>
|
||||
|
||||
[user]
|
||||
#
|
||||
# User credentials
|
||||
#
|
||||
endpoint=http://localhost:5000
|
||||
username=<yourusername>
|
||||
password=<yourpassword>
|
||||
# Optional, only needed if Keystone V3 API is used
|
||||
#user_id=<youruserid>
|
||||
# Optional, api version if required
|
||||
#version=v2.0
|
||||
# Optional, for getting scoped tokens
|
||||
#user_id=<alt_userid>
|
||||
# If user id is not known
|
||||
# For V3 API
|
||||
#domain_name=<name_of_the_domain>
|
||||
#project_name=<name_of_the_project>
|
||||
# For Keystone V2 API
|
||||
#tenant_name=<name_of_the_project>
|
||||
|
||||
#[alt_user]
|
||||
#
|
||||
# Optional, Used for cross auth tests (-t AUTH)
|
||||
#
|
||||
|
||||
#endpoint=http://localhost:5000
|
||||
#username=<alt_username>
|
||||
#password=<alt_password>
|
||||
# Optional, for getting scoped tokens
|
||||
#user_id=<alt_userid>
|
||||
# If user id is not known
|
||||
# For V3 API
|
||||
#domain_name=<name_of_the_domain>
|
||||
#project_name=<name_of_the_project>
|
||||
# For Keystone V2 API
|
||||
#tenant_name=<name_of_the_project>
|
||||
|
||||
[logging]
|
||||
#
|
||||
# Logger option goes here
|
||||
#
|
||||
|
||||
log_dir=<location_to_store_log_files>
|
||||
# Optional, compresses http_request_content,
|
||||
# if you don't want this, set this option to False.
|
||||
http_request_compression=True
|
@ -1,2 +0,0 @@
|
||||
GET /examples?query=yes HTTP/1.1
|
||||
Accept: application/json
|
@ -1,13 +0,0 @@
|
||||
POST /examples HTTP/1.1
|
||||
Accept: application/json
|
||||
Content-type: application/json
|
||||
|
||||
{
|
||||
"id": 24601,
|
||||
"name": "myname",
|
||||
"password": "letmein",
|
||||
"params": {
|
||||
"string": "aaa",
|
||||
"array": [1,2,3,4,5]
|
||||
}
|
||||
}
|
187
pylintrc
187
pylintrc
@ -1,187 +0,0 @@
|
||||
[MASTER]
|
||||
|
||||
# Specify a configuration file.
|
||||
ignore=git
|
||||
ignore-patterns=
|
||||
persistent=yes
|
||||
load-plugins=
|
||||
jobs=4
|
||||
unsafe-load-any-extension=no
|
||||
extension-pkg-whitelist=
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
#disable=missing-docstring,invalid-name,too-many-locals,too-many-branches,no-self-use,too-many-nested-blocks,too-many-arguments,superfluous-parens,redefined-variable-type,blacklisted-name,bad-mcs-classmethod-argument,abstract-method,protected-access,broad-except,logging-format-interpolation,global-variable-not-assigned,unused-variable,fixme,redefined-outer-name,too-many-format-args,global-statement,arguments-differ,import-error,cyclic-import,attribute-defined-outside-init,unpacking-non-sequence,too-many-instance-attributes,no-member,unused-argument,unexpected-keyword-arg,undefined-loop-variable,unused-import,dangerous-default-value,undefined-loop-variable,fixme
|
||||
|
||||
disable=all
|
||||
|
||||
enable=bad-indentation,bad-builtin,pointless-statement,bad-continuation,unidiomatic-typecheck,method-hidden,lost-exception,attribute-defined-outside-init,expression-not-assigned,anomalous-backslash-in-string,wildcard-import,unreachable,blacklisted-name,logging-format-interpolation,cylic-import
|
||||
[REPORTS]
|
||||
output-format=text
|
||||
reports=yes
|
||||
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
[BASIC]
|
||||
|
||||
good-names=i,j,k,ex,Run,val,key,item_
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
# bad-names=foo,bar,baz,toto,tutu,tata
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name
|
||||
include-naming-hint=yes
|
||||
|
||||
# Regular expression matching correct attribute names
|
||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for attribute names
|
||||
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct module names
|
||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Naming hint for module names
|
||||
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Regular expression matching correct constant names
|
||||
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Naming hint for constant names
|
||||
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Regular expression matching correct method names
|
||||
method-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for method names
|
||||
method-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct argument names
|
||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for argument names
|
||||
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct variable names
|
||||
variable-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for variable names
|
||||
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct function names
|
||||
function-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for function names
|
||||
function-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct inline iteration names
|
||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Naming hint for inline iteration names
|
||||
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Regular expression matching correct class names
|
||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Naming hint for class names
|
||||
class-name-hint=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Regular expression matching correct class attribute names
|
||||
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Naming hint for class attribute names
|
||||
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
|
||||
[ELIF]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=79
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
single-line-if-stmt=no
|
||||
|
||||
no-space-check=trailing-comma,dict-separator
|
||||
|
||||
max-module-lines=1000
|
||||
indent-string=' '
|
||||
indent-after-paren=4
|
||||
expected-line-ending-format=
|
||||
|
||||
[LOGGING]
|
||||
|
||||
logging-modules=logging
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
notes=FIXME,XXX,TODO
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
min-similarity-lines=10
|
||||
ignore-comments=yes
|
||||
ignore-docstrings=yes
|
||||
ignore-imports=no
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
ignore-mixin-members=yes
|
||||
|
||||
ignored-modules=
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local
|
||||
generated-members=
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
init-import=no
|
||||
dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
|
||||
additional-builtins=
|
||||
callbacks=cb_,_cb
|
||||
redefining-builtins-modules=six.moves,future.builtins
|
||||
|
||||
[CLASSES]
|
||||
|
||||
defining-attr-methods=__init__,__new__,setUp
|
||||
valid-classmethod-first-arg=cls
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make
|
||||
|
||||
[DESIGN]
|
||||
|
||||
max-args=10
|
||||
ignored-argument-names=_.*
|
||||
max-locals=15
|
||||
max-returns=6
|
||||
max-branches=12
|
||||
max-statements=100
|
||||
max-parents=7
|
||||
max-attributes=10
|
||||
min-public-methods=0
|
||||
max-public-methods=20
|
||||
max-bool-expr=5
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
deprecated-modules=optparse
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
overgeneral-exceptions=Exception
|
@ -1,13 +0,0 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
oslo.i18n>=3.15.3 # Apache-2.0
|
||||
six>=1.10.0 # MIT
|
||||
requests>=2.14.2 # Apache-2.0
|
||||
oslo.config>=5.2.0 # Apache-2.0
|
||||
oslo.utils>=3.33.0 # Apache-2.0
|
||||
python-cinderclient>=3.3.0 # Apache-2.0
|
||||
python-glanceclient>=2.8.0 # Apache-2.0
|
||||
python-neutronclient>=6.7.0 # Apache-2.0
|
||||
python-novaclient>=9.1.0 # Apache-2.0
|
||||
PyYAML>=3.12 # MIT
|
@ -1,81 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import os
|
||||
|
||||
repository_tags = """
|
||||
========================
|
||||
Team and repository tags
|
||||
========================
|
||||
|
||||
.. image:: https://governance.openstack.org/tc/badges/syntribos.svg
|
||||
:target: https://governance.openstack.org/tc/reference/tags/index.html
|
||||
|
||||
|
||||
.. image:: https://img.shields.io/badge/docs-latest-brightgreen.svg?style=flat
|
||||
:target: https://docs.openstack.org/syntribos/latest/
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/syntribos.svg
|
||||
:target: https://pypi.python.org/pypi/syntribos/
|
||||
|
||||
.. image:: https://img.shields.io/pypi/pyversions/syntribos.svg
|
||||
:target: https://pypi.python.org/pypi/syntribos/
|
||||
|
||||
.. image:: https://img.shields.io/pypi/wheel/syntribos.svg
|
||||
:target: https://pypi.python.org/pypi/syntribos/
|
||||
|
||||
.. image:: https://img.shields.io/irc/%23openstack-security.png
|
||||
:target: https://webchat.freenode.net/?channels=openstack-security
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def find_docs():
|
||||
"""Yields files as per the whitelist."""
|
||||
loc = "../doc/source/{}.rst"
|
||||
whitelist = [
|
||||
"about", "installation",
|
||||
"configuration", "commands",
|
||||
"running", "logging",
|
||||
"test-anatomy", "unittests",
|
||||
"contributing"]
|
||||
|
||||
for fname in whitelist:
|
||||
fpath = loc.format(fname)
|
||||
if os.path.isfile(fpath):
|
||||
yield fpath
|
||||
|
||||
|
||||
def concat_docs():
|
||||
"""Concatinates files yielded by the generator `find_docs`."""
|
||||
file_path = os.path.dirname(os.path.realpath(__file__))
|
||||
head, tail = os.path.split(file_path)
|
||||
outfile = head + "/README.rst"
|
||||
if not os.path.isfile(outfile):
|
||||
print("../README.rst not found, exiting!")
|
||||
exit(1)
|
||||
with open(outfile, 'w') as readme_handle:
|
||||
readme_handle.write(repository_tags)
|
||||
for doc in find_docs():
|
||||
with open(doc, 'r') as doc_handle:
|
||||
for line in doc_handle:
|
||||
readme_handle.write(line)
|
||||
readme_handle.write("\n")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
"""Generate README.rst from docs."""
|
||||
concat_docs()
|
||||
print("\nREADME.rst created!\n")
|
54
setup.cfg
54
setup.cfg
@ -1,54 +0,0 @@
|
||||
[metadata]
|
||||
name = syntribos
|
||||
summary = API Security Scanner
|
||||
description-file =
|
||||
README.rst
|
||||
license = Apache License, Version 2.0
|
||||
author = OpenStack Security Group
|
||||
author-email = openstack-dev@lists.openstack.org
|
||||
home-page = https://docs.openstack.org/syntribos/latest
|
||||
classifier =
|
||||
Environment :: Console
|
||||
Intended Audience :: Information Technology
|
||||
Intended Audience :: Developers
|
||||
License :: OSI Approved :: Apache Software License
|
||||
Natural Language :: English
|
||||
Operating System :: POSIX :: Linux
|
||||
Operating System :: MacOS :: MacOS X
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 2
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.5
|
||||
Topic :: Security
|
||||
Topic :: Software Development :: Testing
|
||||
Topic :: Utilities
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
syntribos = syntribos.runner:entry_point
|
||||
|
||||
oslo.config.opts =
|
||||
syntribos.config = syntribos.config:list_opts
|
||||
|
||||
[build_sphinx]
|
||||
all_files = 1
|
||||
build-dir = doc/build
|
||||
source-dir = doc/source
|
||||
|
||||
[files]
|
||||
packages = syntribos
|
||||
|
||||
[compile_catalog]
|
||||
directory = syntribos/locale
|
||||
domain = syntribos
|
||||
|
||||
[update_catalog]
|
||||
domain = syntribos
|
||||
output_dir = syntribos/locale
|
||||
input_file = syntribos/locale/syntribos.pot
|
||||
|
||||
[extract_messages]
|
||||
keywords = _ gettext ngettext l_ lazy_gettext
|
||||
mapping_file = babel.cfg
|
||||
output_file = syntribos/locale/syntribos.pot
|
29
setup.py
29
setup.py
@ -1,29 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
|
||||
import setuptools
|
||||
|
||||
# In python < 2.7.4, a lazy loading of package `pbr` will break
|
||||
# setuptools if some other modules registered functions in `atexit`.
|
||||
# solution from: http://bugs.python.org/issue15881#msg170215
|
||||
try:
|
||||
import multiprocessing # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
setuptools.setup(
|
||||
setup_requires=['pbr>=2.0.0'],
|
||||
pbr=True)
|
@ -1,17 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint: skip-file
|
||||
from syntribos.issue import Issue # noqa
|
||||
from syntribos.constants import * # noqa
|
||||
from syntribos.result import IssueTestResult # noqa
|
@ -1,48 +0,0 @@
|
||||
# Copyright 2017 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""oslo.i18n integration module.
|
||||
|
||||
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
|
||||
|
||||
"""
|
||||
|
||||
import oslo_i18n
|
||||
|
||||
DOMAIN = 'syntribos'
|
||||
|
||||
_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN)
|
||||
|
||||
# The translation function using the well-known name "_"
|
||||
_ = _translators.primary
|
||||
|
||||
# The contextual translation function using the name "_C"
|
||||
# requires oslo.i18n >=2.1.0
|
||||
_C = _translators.contextual_form
|
||||
|
||||
# The plural translation function using the name "_P"
|
||||
# requires oslo.i18n >=2.1.0
|
||||
_P = _translators.plural_form
|
||||
|
||||
|
||||
def enable_lazy():
|
||||
return oslo_i18n.enable_lazy()
|
||||
|
||||
|
||||
def translate(value, user_locale):
|
||||
return oslo_i18n.translate(value, user_locale)
|
||||
|
||||
|
||||
def get_available_languages():
|
||||
return oslo_i18n.get_available_languages(DOMAIN)
|
@ -1,20 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# flake8: noqa
|
||||
from syntribos.checks.length import max_body_length as max_length
|
||||
from syntribos.checks.length import percentage_difference as length_diff
|
||||
from syntribos.checks.ssl import https_check as https_check
|
||||
from syntribos.checks.string import has_string as has_string
|
||||
from syntribos.checks.time import percentage_difference as time_diff
|
||||
from syntribos.checks.time import absolute_time as time_abs
|
@ -1,75 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import json
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
import syntribos.signal
|
||||
|
||||
|
||||
def valid_content(test):
|
||||
"""Checks if the response.content is valid.
|
||||
|
||||
Checks if the response.content is either xml or json
|
||||
and returns a signal based on if the content is valid
|
||||
or not.
|
||||
|
||||
:returns: SynSignal
|
||||
"""
|
||||
check_name = "VALID_CONTENT"
|
||||
strength = 1.0
|
||||
tags = []
|
||||
validity = "VALID"
|
||||
|
||||
if not test.init_signals.ran_check(check_name):
|
||||
resp = test.init_resp
|
||||
else:
|
||||
resp = test.test_resp
|
||||
|
||||
data = {"response_content": resp.content}
|
||||
|
||||
if "Content-type" in resp.headers:
|
||||
content_type = resp.headers["Content-type"]
|
||||
data["content_type"] = content_type
|
||||
|
||||
if "application/xml" in content_type or "text/html" in content_type:
|
||||
try:
|
||||
etree.fromstring(resp.text)
|
||||
except Exception as e:
|
||||
validity = "INVALID"
|
||||
tags = ['APPLICATION_FAIL']
|
||||
text = str(e)
|
||||
|
||||
text = "\n\tContent is: {0} xml".format(validity.lower())
|
||||
slug = "{0}_XML".format(validity)
|
||||
|
||||
elif "application/json" in content_type or "text/json" in content_type:
|
||||
try:
|
||||
json.loads(resp.text)
|
||||
except Exception as e:
|
||||
validity = "INVALID"
|
||||
tags = ['APPLICATION_FAIL']
|
||||
text = str(e)
|
||||
|
||||
text = "\n\tContent is: {0} json".format(validity.lower())
|
||||
slug = "{0}_JSON".format(validity)
|
||||
|
||||
else:
|
||||
return None
|
||||
return syntribos.signal.SynSignal(
|
||||
data=data,
|
||||
tags=tags,
|
||||
text=text,
|
||||
slug=slug,
|
||||
strength=strength,
|
||||
check_name=check_name)
|
@ -1,93 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import syntribos.signal
|
||||
|
||||
|
||||
def server_software(test):
|
||||
"""Fingerprints the server and possible version.
|
||||
|
||||
Reads response headers and if server software information is present,
|
||||
returns a signal with server software slug.
|
||||
|
||||
:returns: SynSignal
|
||||
"""
|
||||
check_name = "FINGERPRINT"
|
||||
strength = 1.0
|
||||
|
||||
if not test.init_signals.ran_check(check_name):
|
||||
resp = test.init_resp
|
||||
else:
|
||||
resp = test.test_resp
|
||||
|
||||
servers = {
|
||||
'Apache': 'APACHE',
|
||||
'nginx': 'NGINX',
|
||||
'Microsoft-IIS': 'IIS',
|
||||
'Oracle': 'ORACLE',
|
||||
'IBM_HTTP_Server': 'IBM',
|
||||
'AmazonS3': 'AMAZON',
|
||||
'GSE': 'GSE',
|
||||
'lightpd': 'LIGHTPD',
|
||||
'WSGIServer': 'WSGI',
|
||||
'Express': 'EXPRESS',
|
||||
'Servlet': 'TOMCAT',
|
||||
'Unknown': 'UNKNOWN'
|
||||
}
|
||||
|
||||
if 'Server' in resp.headers:
|
||||
server = resp.headers['Server']
|
||||
elif 'Powered-by' in resp.headers:
|
||||
server = resp.headers['Powered-by']
|
||||
elif 'x-server-name' in resp.headers:
|
||||
server = resp.headers['x-server-name']
|
||||
else:
|
||||
server = 'Unknown'
|
||||
|
||||
server_name = servers.get(server, 'UNKNOWN')
|
||||
|
||||
if '/' in server:
|
||||
version = server.split('/')[1]
|
||||
else:
|
||||
version = 0
|
||||
|
||||
text = (
|
||||
"Server Details:\n"
|
||||
"\tServer Software: {0}\n"
|
||||
"\tServer Version: {1}\n").format(server_name, version)
|
||||
|
||||
slug = "SERVER_SOFTWARE_{0}".format(server_name)
|
||||
|
||||
return syntribos.signal.SynSignal(text=text, slug=slug,
|
||||
strength=strength, check_name=check_name)
|
||||
|
||||
|
||||
def remote_os(test):
|
||||
"""Returns remote OS info.
|
||||
|
||||
Tries to identity which OS is running on the remote server
|
||||
|
||||
:returns: SynSignal
|
||||
"""
|
||||
check_name = "REMOTE_OS"
|
||||
strength = 1.0
|
||||
remote_os = test.init_resp.headers.get('X-Distribution', 'UNKNOWN')
|
||||
remote_os = remote_os.replace(' ', '_').upper()
|
||||
|
||||
text = (
|
||||
'Remote OS Details:\n'
|
||||
'\tServer OS: {0}\n').format(remote_os)
|
||||
slug = 'SERVER_OS_{0}'.format(remote_os)
|
||||
|
||||
return syntribos.signal.SynSignal(text=text, slug=slug,
|
||||
strength=strength, check_name=check_name)
|
@ -1,16 +0,0 @@
|
||||
# Copyright 2017 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# flake8: noqa
|
||||
from syntribos.checks.header.header import cors as cors
|
||||
from syntribos.checks.header.xst import validate_content as xst
|
@ -1,46 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import syntribos.signal
|
||||
|
||||
|
||||
def cors(test):
|
||||
"""Checks if the response header has any CORS headers.
|
||||
|
||||
If any cross origin resource sharing headers (CORS) are found,
|
||||
checks if any is set to wild characters, if so returns a Signal.
|
||||
|
||||
:param test object
|
||||
:returns: Signal if cors vulnerability is found, other wise None
|
||||
:rtype: :class:`syntribos.signal.SynSignal, None`
|
||||
"""
|
||||
check_name = "HEADER_CORS"
|
||||
strength = 1.0
|
||||
slug = "HEADER_CORS{0}_WILDCARD"
|
||||
cors_type = ""
|
||||
places = ['Origin', 'Methods', 'Headers']
|
||||
cors_headers = ["Access-Control-Allow-{0}".format(p) for p in places]
|
||||
headers = test.test_resp.headers
|
||||
|
||||
for cors_header in cors_headers:
|
||||
if headers.get(cors_header) == '*':
|
||||
cors_type += "_" + cors_header.upper().split('-')[-1]
|
||||
text = ("A wildcard CORS header policy with these details "
|
||||
"was detected: {head}: {value}.\n".format(
|
||||
head=cors_header, value=headers[cors_header]))
|
||||
if cors_type == "":
|
||||
return None
|
||||
|
||||
slug = slug.format(cors_type)
|
||||
return syntribos.signal.SynSignal(text=text, slug=slug, strength=strength,
|
||||
check_name=check_name)
|
@ -1,52 +0,0 @@
|
||||
# Copyright 2017 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import syntribos.signal
|
||||
|
||||
|
||||
def validate_content(test):
|
||||
"""Checks if the API is responding to TRACE requests
|
||||
|
||||
Checks if the response body contains the request header
|
||||
"TRACE_THIS".
|
||||
|
||||
:returns: SynSignal
|
||||
"""
|
||||
check_name = "VALID_CONTENT"
|
||||
strength = 1.0
|
||||
tags = []
|
||||
|
||||
if not test.init_signals.ran_check(check_name):
|
||||
resp = test.init_resp
|
||||
else:
|
||||
resp = test.test_resp
|
||||
|
||||
data = {"response_content": resp.text}
|
||||
# vulnerable to XST if response body has the request header
|
||||
xst_header = "TRACE_THIS: XST_Vuln"
|
||||
if "Content-type" in resp.headers:
|
||||
content_type = resp.headers["Content-type"]
|
||||
data["content_type"] = content_type
|
||||
|
||||
if data["response_content"]:
|
||||
if data["response_content"].find(xst_header) != -1:
|
||||
text = "Request header in response: {}".format(xst_header)
|
||||
slug = "HEADER_XST"
|
||||
|
||||
return syntribos.signal.SynSignal(
|
||||
data=data,
|
||||
tags=tags,
|
||||
text=text,
|
||||
slug=slug,
|
||||
strength=strength,
|
||||
check_name=check_name)
|
@ -1,193 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import re
|
||||
|
||||
import requests.exceptions as rex
|
||||
from six.moves import http_client as httplib
|
||||
|
||||
import syntribos.signal
|
||||
|
||||
|
||||
def check_fail(exception):
|
||||
"""Checks for a requestslib exception, returns a signal if found.
|
||||
|
||||
If this Exception is an instance of
|
||||
:class:`requests.exceptions.RequestException`, determine what kind of
|
||||
exception was raised. If not, return the results of from_generic_exception.
|
||||
|
||||
:param Exception exception: An Exception object
|
||||
:returns: Signal with exception details
|
||||
:rtype: :class:`syntribos.signal.SynSignal`
|
||||
"""
|
||||
check_name = "HTTP_CHECK_FAIL"
|
||||
|
||||
def uncamel(string):
|
||||
string = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", string)
|
||||
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", string).upper()
|
||||
|
||||
if not isinstance(exception, rex.RequestException):
|
||||
return syntribos.signal.from_generic_exception(exception)
|
||||
|
||||
data = {
|
||||
"response": exception.response,
|
||||
"request": exception.request,
|
||||
"exception": exception,
|
||||
"exception_name": uncamel(exception.__class__.__name__)
|
||||
}
|
||||
text = "An exception was encountered when sending the request. {desc}"
|
||||
slug = "HTTP_FAIL_{exc}".format(exc=data["exception_name"])
|
||||
tags = set(["EXCEPTION_RAISED"])
|
||||
|
||||
invalid_request_exceptions = (rex.URLRequired, rex.MissingSchema,
|
||||
rex.InvalidSchema, rex.InvalidURL)
|
||||
|
||||
if exception.__doc__:
|
||||
text = text.format(desc=exception.__doc__)
|
||||
else:
|
||||
text = text.format(
|
||||
desc="An unknown exception was raised. Please report this.")
|
||||
|
||||
# CONNECTION FAILURES
|
||||
if isinstance(exception, (rex.ProxyError, rex.SSLError,
|
||||
rex.ChunkedEncodingError, rex.ConnectionError)):
|
||||
tags.update(["CONNECTION_FAIL"])
|
||||
# TIMEOUTS
|
||||
elif isinstance(exception, (rex.ConnectTimeout, rex.ReadTimeout)):
|
||||
tags.update(["CONNECTION_TIMEOUT", "SERVER_FAIL"])
|
||||
# INVALID REQUESTS
|
||||
elif isinstance(exception, invalid_request_exceptions):
|
||||
tags.update(["INVALID_REQUEST", "CLIENT_FAIL"])
|
||||
|
||||
return syntribos.signal.SynSignal(
|
||||
text=text,
|
||||
slug=slug,
|
||||
strength=1.0,
|
||||
tags=list(tags),
|
||||
data=data,
|
||||
check_name=check_name)
|
||||
|
||||
|
||||
def check_status_code(response):
|
||||
"""Returns a signal with info about a response's HTTP status code
|
||||
|
||||
:param response: A `Response` object
|
||||
:type response: :class:`requests.Response`
|
||||
:returns: Signal with status code details
|
||||
:rtype: :class:`syntribos.signal.SynSignal`
|
||||
"""
|
||||
check_name = "HTTP_STATUS_CODE"
|
||||
codes = httplib.responses
|
||||
|
||||
data = {
|
||||
"response": response,
|
||||
"status_code": response.status_code,
|
||||
"reason": response.reason,
|
||||
}
|
||||
if codes.get(response.status_code, None):
|
||||
data["details"] = codes[response.status_code]
|
||||
else:
|
||||
data["details"] = "Unknown"
|
||||
|
||||
text = ("A {code} HTTP status code was returned by the server, with reason"
|
||||
" '{reason}'. This status code usually means '{details}'.").format(
|
||||
code=data["status_code"],
|
||||
reason=data["reason"],
|
||||
details=data["details"])
|
||||
|
||||
slug = "HTTP_STATUS_CODE_{range}"
|
||||
tags = []
|
||||
|
||||
if data["status_code"] in range(200, 300):
|
||||
slug = slug.format(range="2XX")
|
||||
|
||||
elif data["status_code"] in range(300, 400):
|
||||
slug = slug.format(range="3XX")
|
||||
|
||||
# CCNEILL: 304 == use local cache; not really a redirect
|
||||
if data["status_code"] != 304:
|
||||
tags.append("SERVER_REDIRECT")
|
||||
|
||||
elif data["status_code"] in range(400, 500):
|
||||
slug = slug.format(range="4XX")
|
||||
tags.append("CLIENT_FAIL")
|
||||
|
||||
elif data["status_code"] in range(500, 600):
|
||||
slug = slug.format(range="5XX")
|
||||
tags.append("SERVER_FAIL")
|
||||
|
||||
slug = (slug + "_{code}").format(code=data["status_code"])
|
||||
|
||||
return syntribos.signal.SynSignal(
|
||||
text=text,
|
||||
slug=slug,
|
||||
strength=1,
|
||||
tags=tags,
|
||||
data=data,
|
||||
check_name=check_name)
|
||||
|
||||
|
||||
def check_content_type(response):
|
||||
"""Returns a signal with info about a response's content type
|
||||
|
||||
:param response:
|
||||
:type response: :class:`requests.Response`
|
||||
:returns: Signal with content type info
|
||||
:rtype: :class:`syntribos.signal.SynSignal`
|
||||
"""
|
||||
|
||||
check_name = "HTTP_CONTENT_TYPE"
|
||||
# LOOKUP MAPS
|
||||
known_subtypes = ["xml", "json", "javascript", "html", "plain"]
|
||||
known_suffixes = ["xml", "json"] # RFC6838
|
||||
|
||||
raw_type = response.headers.get("Content-Type", "unknown/unknown").lower()
|
||||
fuzzy_type = None
|
||||
|
||||
# valid headers should be in form type/subtype
|
||||
if "/" not in raw_type:
|
||||
raise Exception("Not a valid content type. What happened?")
|
||||
|
||||
# chop off encodings, etc (ex: application/json[; charset=utf-8])
|
||||
if ";" in raw_type:
|
||||
raw_type = raw_type.split(";")[0]
|
||||
|
||||
_, subtype = raw_type.split("/")
|
||||
|
||||
# if subtype is known, return that (ex: application/[json])
|
||||
if subtype in known_subtypes:
|
||||
fuzzy_type = subtype.upper()
|
||||
|
||||
# check for known 'suffixes' (ex: application/atom+[xml])
|
||||
elif "+" in subtype:
|
||||
_, suffix = subtype.split("+")
|
||||
if suffix in known_suffixes:
|
||||
fuzzy_type = suffix.upper()
|
||||
|
||||
# fuzzy search for other types (ex: text/[xml]-external-parsed-entity)
|
||||
else:
|
||||
for s in known_subtypes:
|
||||
if s in subtype:
|
||||
fuzzy_type = s.upper()
|
||||
break
|
||||
|
||||
text = ("The content type returned by the server was {raw}. We determined"
|
||||
" this is of the general type {fuzzy_type}.").format(
|
||||
raw=raw_type, fuzzy_type=fuzzy_type)
|
||||
|
||||
slug = "HTTP_CONTENT_TYPE_{fuzzy_type}".format(fuzzy_type=fuzzy_type)
|
||||
|
||||
data = {"raw_type": raw_type, "fuzzy_type": fuzzy_type}
|
||||
|
||||
return syntribos.signal.SynSignal(
|
||||
text=text, slug=slug, strength=1.0, data=data, check_name=check_name)
|
@ -1,109 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from oslo_config import cfg
|
||||
|
||||
import syntribos.signal
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def percentage_difference(test):
|
||||
"""Validates length of two responses
|
||||
|
||||
Compares the length of a fuzzed response with a response to the
|
||||
baseline request. If the response is longer than expected, returns
|
||||
a `LengthPercentageDiffSignal`
|
||||
|
||||
:returns: SynSignal or None
|
||||
"""
|
||||
check_name = "LENGTH_DIFF"
|
||||
data = {
|
||||
"req1": test.init_req,
|
||||
"req2": test.test_req,
|
||||
"resp1": test.init_resp,
|
||||
"resp2": test.test_resp,
|
||||
"req1_len": len(test.init_req.body or ""),
|
||||
"req2_len": len(test.test_req.body or ""),
|
||||
"resp1_len": len(test.init_resp.content or ""),
|
||||
"resp2_len": len(test.test_resp.content or ""),
|
||||
}
|
||||
data["req_diff"] = data["req2_len"] - data["req1_len"]
|
||||
data["resp_diff"] = data["resp2_len"] - data["resp1_len"]
|
||||
data["percent_diff"] = abs(
|
||||
float(data["resp_diff"]) / (data["resp1_len"] + 1)) * 100
|
||||
data["dir"] = "UNDER" if data["resp1_len"] > data["resp2_len"] else "OVER"
|
||||
|
||||
if data["resp1_len"] == data["resp2_len"]:
|
||||
# No difference in response lengths
|
||||
return None
|
||||
elif data["req_diff"] == data["resp_diff"]:
|
||||
# Response difference accounted for by difference in request lengths
|
||||
return None
|
||||
elif data["percent_diff"] < CONF.test.length_diff_percent:
|
||||
# Difference not larger than configured percentage
|
||||
return None
|
||||
|
||||
text = (
|
||||
"Validate Length:\n"
|
||||
"\tRequest 1 length: {0}\n"
|
||||
"\tResponse 1 length: {1}\n"
|
||||
"\tRequest 2 length: {2}\n"
|
||||
"\tResponse 2 length: {3}\n"
|
||||
"\tRequest difference: {4}\n"
|
||||
"\tResponse difference: {5}\n"
|
||||
"\tPercent difference: {6}%\n"
|
||||
"\tDifference direction: {7}"
|
||||
"\tConfig percent: {8}\n").format(
|
||||
data["req1_len"], data["resp1_len"], data["req2_len"],
|
||||
data["resp2_len"], data["req_diff"], data["resp_diff"],
|
||||
data["percent_diff"], data["dir"], CONF.test.length_diff_percent)
|
||||
|
||||
slug = "LENGTH_DIFF_{dir}".format(dir=data["dir"])
|
||||
|
||||
return syntribos.signal.SynSignal(
|
||||
text=text, slug=slug, strength=1.0, data=data, check_name=check_name)
|
||||
|
||||
|
||||
def max_body_length(test):
|
||||
"""Checks if the response body length is more than max size in the config.
|
||||
|
||||
Checks the response body to see if the length is more than the given length
|
||||
in the config. If it is, returns a Signal.
|
||||
|
||||
:returns: SynSignal or None
|
||||
"""
|
||||
check_name = "MAX_LENGTH"
|
||||
if test.init_signals.ran_check(check_name):
|
||||
resp = test.init_resp
|
||||
else:
|
||||
resp = test.test_resp
|
||||
data = {
|
||||
"req": resp.request,
|
||||
"resp": resp,
|
||||
"req_len": len(resp.request.body or ""),
|
||||
"resp_len": len(resp.content or ""),
|
||||
}
|
||||
text = ("Length:\n"
|
||||
"\tRequest length: {0}\n"
|
||||
"\tResponse length: {1}\n".format(data["req_len"],
|
||||
data["resp_len"]))
|
||||
slug = "OVER_MAX_LENGTH"
|
||||
|
||||
if data["resp_len"] > CONF.test.max_length:
|
||||
return syntribos.signal.SynSignal(
|
||||
text=text,
|
||||
slug=slug,
|
||||
strength=1.0,
|
||||
data=data,
|
||||
check_name=check_name)
|
@ -1,39 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import re
|
||||
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
import syntribos.signal
|
||||
|
||||
|
||||
def https_check(test):
|
||||
"""Checks if the returned response consists of non-secure endpoint URIs
|
||||
|
||||
:returns: syntribos.signal.SynSignal
|
||||
"""
|
||||
check_name = "HTTPS_CHECK"
|
||||
if not test.init_signals.ran_check(check_name):
|
||||
response_text = test.init_resp.text
|
||||
else:
|
||||
response_text = test.test_resp.text
|
||||
target = test.init_req.url
|
||||
domain = urlparse(target).hostname
|
||||
regex = r"\bhttp://{0}".format(domain)
|
||||
|
||||
if re.search(regex, response_text):
|
||||
text = "Non https endpoint URIs present in the response text"
|
||||
slug = "HTTP_LINKS_PRESENT"
|
||||
return syntribos.signal.SynSignal(text=text, slug=slug,
|
||||
strength=1.0, check_name=check_name)
|
@ -1,42 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import syntribos.signal
|
||||
|
||||
|
||||
def stacktrace(test):
|
||||
"""Checks if a stacktrace is returned by the response.
|
||||
|
||||
If a stacktrace is returned, attempts to identity if it was an
|
||||
application failure or a server failure and return appropriate
|
||||
tags.
|
||||
|
||||
returns a signal with the stacktrace slug.
|
||||
|
||||
:returns: SynSignal
|
||||
"""
|
||||
error_string = 'Traceback (most recent call last):'
|
||||
strength = 1.0
|
||||
tags = ["APPLICATION_FAIL"]
|
||||
slug = "STACKTRACE_PRESENT"
|
||||
check_name = "STACKTRACE"
|
||||
if not test.init_signals.ran_check(check_name):
|
||||
resp = test.init_resp
|
||||
else:
|
||||
resp = test.test_resp
|
||||
if error_string in resp.text:
|
||||
text = ("Stacktrace detected: {0}\n".format(
|
||||
resp.text[resp.text.index(error_string):]))
|
||||
return syntribos.signal.SynSignal(text=text, tags=tags,
|
||||
slug=slug, strength=strength,
|
||||
check_name=check_name)
|
@ -1,44 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import syntribos.signal
|
||||
|
||||
|
||||
def has_string(test):
|
||||
"""Checks if the response consists of any failure strings
|
||||
|
||||
:returns: syntribos.signal.SynSignal
|
||||
"""
|
||||
|
||||
slug = "FAILURE_KEYS_PRESENT"
|
||||
data = {
|
||||
"req": test.test_resp.request,
|
||||
"resp": test.test_resp,
|
||||
"failed_strings": []
|
||||
}
|
||||
|
||||
failure_keys = test.failure_keys
|
||||
if failure_keys:
|
||||
data["failed_strings"] = [key for key in failure_keys
|
||||
if key in test.test_resp.text]
|
||||
|
||||
if len(data["failed_strings"]) > 0:
|
||||
keys = "\n".join([str(s) for s in data["failed_strings"]])
|
||||
text = "Failed strings present " + keys
|
||||
return syntribos.signal.SynSignal(
|
||||
check_name="has_string",
|
||||
text=text,
|
||||
slug=slug,
|
||||
data=data,
|
||||
strength=1.0)
|
@ -1,102 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from oslo_config import cfg
|
||||
|
||||
import syntribos.signal
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def percentage_difference(test):
|
||||
"""Validates time taken for two responses
|
||||
|
||||
Compares the elapsed time of a fuzzed response with a response to the
|
||||
baseline request. If the response takes longer than expected, returns
|
||||
a `TimePercentageDiffSignal`
|
||||
|
||||
:returns: SynSignal or None
|
||||
"""
|
||||
check_name = "TIME_DIFF"
|
||||
data = {
|
||||
"req1": test.init_req,
|
||||
"req2": test.test_req,
|
||||
"resp1": test.init_resp,
|
||||
"resp2": test.test_resp,
|
||||
"resp1_time": test.init_resp.elapsed.total_seconds(),
|
||||
"resp2_time": test.test_resp.elapsed.total_seconds()
|
||||
}
|
||||
data["time_diff"] = data["resp2_time"] - data["resp1_time"]
|
||||
# CCNEILL: This is hacky. Exact match != 100% (due to +1)
|
||||
data["percent_diff"] = abs(
|
||||
float(data["time_diff"]) / (data["resp1_time"] + 1)) * 100
|
||||
data["dir"] = "UNDER"
|
||||
if data["resp1_time"] < data["resp2_time"]:
|
||||
data["dir"] = "OVER"
|
||||
|
||||
if data["percent_diff"] < CONF.test.time_diff_percent:
|
||||
# Difference not larger than configured percentage
|
||||
return None
|
||||
|
||||
text = ("Validate Time Differential:\n"
|
||||
"\tResponse 1 elapsed time: {0}\n"
|
||||
"\tResponse 2 elapsed time: {1}\n"
|
||||
"\tResponse difference: {2}\n"
|
||||
"\tPercent difference: {3}%\n"
|
||||
"\tDifference direction: {4}"
|
||||
"\tConfig percent: {5}\n").format(
|
||||
data["resp1_time"], data["resp2_time"], data["time_diff"],
|
||||
data["percent_diff"], data["dir"], CONF.test.time_diff_percent)
|
||||
|
||||
slug = "TIME_DIFF_{dir}".format(dir=data["dir"])
|
||||
|
||||
return syntribos.signal.SynSignal(
|
||||
text=text, slug=slug, strength=1.0, data=data, check_name=check_name)
|
||||
|
||||
|
||||
def absolute_time(test):
|
||||
"""Checks response takes less than `config.max_time` seconds
|
||||
|
||||
:returns: SynSignal or None
|
||||
"""
|
||||
check_name = "ABSOLUTE_TIME"
|
||||
|
||||
if not test.init_signals.ran_check(check_name):
|
||||
resp = test.init_resp
|
||||
else:
|
||||
resp = test.test_resp
|
||||
|
||||
data = {
|
||||
"request": resp.request,
|
||||
"response": resp,
|
||||
"elapsed": resp.elapsed.total_seconds(),
|
||||
"max_time": CONF.test.max_time
|
||||
}
|
||||
|
||||
if data["elapsed"] < data["max_time"]:
|
||||
return None
|
||||
|
||||
text = ("Check that response time doesn't exceed test.max_time:\n"
|
||||
"\tMax time: {0}\n"
|
||||
"\tElapsed time: {1}\n").format(data["elapsed"], data["max_time"])
|
||||
|
||||
slug = "TIME_OVER_MAX"
|
||||
tags = ["CONNECTION_TIMEOUT"]
|
||||
|
||||
return syntribos.signal.SynSignal(
|
||||
text=text,
|
||||
slug=slug,
|
||||
strength=1.0,
|
||||
tags=tags,
|
||||
data=data,
|
||||
check_name=check_name)
|
@ -1,17 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# flake8: noqa
|
||||
from syntribos.clients.http.parser import RequestCreator as parser
|
||||
from syntribos.clients.http.parser import VariableObject
|
||||
from syntribos.clients.http.client import SynHTTPClient as client
|
@ -1,83 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from requests.packages import urllib3
|
||||
|
||||
from syntribos.clients.http.debug_logger import log_http_transaction
|
||||
|
||||
urllib3.disable_warnings()
|
||||
|
||||
|
||||
class HTTPClient(object):
|
||||
|
||||
"""Allows clients to inherit requests.request.
|
||||
|
||||
@summary: Redefines request() so that keyword args are passed.
|
||||
The parameters are passed through a named dictionary
|
||||
instead of kwargs. Client methods can then take parameters
|
||||
that may overload request parameters, which allows client
|
||||
method calls to override parts of the request with parameters
|
||||
sent directly to requests, overriding the client method logic
|
||||
either in part or whole on the fly.
|
||||
|
||||
"""
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self):
|
||||
self.default_headers = {}
|
||||
|
||||
@log_http_transaction(log=LOG)
|
||||
def request(self, method, url, headers=None, params=None, data=None,
|
||||
sanitize=False, requestslib_kwargs=None):
|
||||
|
||||
# set requestslib_kwargs to an empty dict if None
|
||||
requestslib_kwargs = requestslib_kwargs if (
|
||||
requestslib_kwargs is not None) else {}
|
||||
|
||||
# Set defaults
|
||||
params = params if params is not None else {}
|
||||
verify = False
|
||||
sanitize = sanitize
|
||||
|
||||
# If headers are provided by both, headers "wins" over default_headers
|
||||
headers = dict(self.default_headers, **(headers or {}))
|
||||
|
||||
# Override url if present in requestslib_kwargs
|
||||
if 'url' in list(requestslib_kwargs.keys()):
|
||||
url = requestslib_kwargs.get('url', None) or url
|
||||
del requestslib_kwargs['url']
|
||||
|
||||
# Override method if present in requestslib_kwargs
|
||||
if 'method' in list(requestslib_kwargs.keys()):
|
||||
method = requestslib_kwargs.get('method', None) or method
|
||||
del requestslib_kwargs['method']
|
||||
|
||||
# The requests lib already removes None key/value pairs, but we force
|
||||
# it here in case that behavior ever changes
|
||||
for key in list(requestslib_kwargs.keys()):
|
||||
if requestslib_kwargs[key] is None:
|
||||
del requestslib_kwargs[key]
|
||||
|
||||
# Create the final parameters for the call to the base request()
|
||||
# Wherever a parameter is provided both by the calling method AND
|
||||
# the requests_lib kwargs dictionary, requestslib_kwargs "wins"
|
||||
requestslib_kwargs = dict(
|
||||
{'headers': headers, 'params': params, 'verify': verify,
|
||||
'data': data, 'allow_redirects': False}, **requestslib_kwargs)
|
||||
|
||||
# Make the request
|
||||
return requests.request(method, url, **requestslib_kwargs)
|
@ -1,71 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import syntribos.checks.http as http_checks
|
||||
from syntribos.clients.http.base_http_client import HTTPClient
|
||||
|
||||
|
||||
class SynHTTPClient(HTTPClient):
|
||||
|
||||
"""This is the basic HTTP client used by Syntribos.
|
||||
|
||||
It aliases `send_request` to `request` so logging/exception handling is
|
||||
done in one place, for all requests. Also checks for bad HTTP status codes
|
||||
and adds a signal if one is found.
|
||||
"""
|
||||
|
||||
def request(self, method, url, headers=None, params=None, data=None,
|
||||
sanitize=False, requestslib_kwargs=None):
|
||||
"""Sends a request (passes to `requests.request`)
|
||||
|
||||
:param str method: Request method
|
||||
:param str url: URL to request
|
||||
:param dict headers: Dictionary of headers in name:value format
|
||||
:param dict params: Dictionary of params in name:value format
|
||||
:param dict data: Data to send as part of request body
|
||||
:param dict requestslib_kwargs: Keyword arguments to pass to requests
|
||||
:returns: tuple of (response, signals)
|
||||
"""
|
||||
if not requestslib_kwargs:
|
||||
requestslib_kwargs = {"timeout": 10}
|
||||
elif not requestslib_kwargs.get("timeout", None):
|
||||
requestslib_kwargs["timeout"] = 10
|
||||
|
||||
response, signals = super(SynHTTPClient, self).request(
|
||||
method, url, headers=headers, params=params, data=data,
|
||||
sanitize=sanitize,
|
||||
requestslib_kwargs=requestslib_kwargs)
|
||||
|
||||
if response is not None:
|
||||
signals.register(http_checks.check_status_code(response))
|
||||
signals.register(http_checks.check_content_type(response))
|
||||
|
||||
return (response, signals)
|
||||
|
||||
def send_request(self, request_obj):
|
||||
"""This sends a request based on a RequestObject.
|
||||
|
||||
RequestObjects are generated by a parser (e.g.
|
||||
:class:`syntribos.clients.http.parser.RequestCreator`) from request
|
||||
template files, and passed to this method to send the request.
|
||||
|
||||
:param request_obj: A RequestObject generated by a parser
|
||||
:type request_obj: :class:`syntribos.clients.http.parser.RequestObject`
|
||||
:returns: tuple of (response, signals)
|
||||
"""
|
||||
response, signals = self.request(
|
||||
request_obj.method, request_obj.url,
|
||||
headers=request_obj.headers, params=request_obj.params,
|
||||
data=request_obj.data, sanitize=request_obj.sanitize)
|
||||
|
||||
return (response, signals)
|
@ -1,176 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Original from OpenCafe (https://github.com/openstack/opencafe)
|
||||
#
|
||||
# Changes copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
import threading
|
||||
from time import time
|
||||
|
||||
import requests
|
||||
import six
|
||||
|
||||
from syntribos._i18n import _
|
||||
import syntribos.checks.http as http_checks
|
||||
import syntribos.signal
|
||||
from syntribos.utils import string_utils
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
|
||||
def log_http_transaction(log, level=logging.DEBUG):
|
||||
"""Decorator used for logging requests/response in clients.
|
||||
|
||||
Takes a python Logger object and an optional logging level.
|
||||
"""
|
||||
|
||||
def _safe_decode(text, incoming='utf-8', errors='replace'):
|
||||
"""Decodes incoming text/bytes using `incoming` if not already unicode.
|
||||
|
||||
:param incoming: Text's current encoding
|
||||
:param errors: Errors handling policy. See here for valid
|
||||
values http://docs.python.org/2/library/codecs.html
|
||||
|
||||
:returns: text or a unicode `incoming` encoded
|
||||
representation of it.
|
||||
"""
|
||||
|
||||
if isinstance(text, six.text_type):
|
||||
return text
|
||||
|
||||
return text.decode(incoming, errors)
|
||||
|
||||
def _decorator(func):
|
||||
"""Accepts a function and returns wrapped version of that function."""
|
||||
def _wrapper(*args, **kwargs):
|
||||
"""Logging wrapper for any method that returns a requests response.
|
||||
|
||||
Logs requestslib response objects, and the args and kwargs
|
||||
sent to the request() method, to the provided log at the provided
|
||||
log level.
|
||||
"""
|
||||
|
||||
kwargs_copy = deepcopy(kwargs)
|
||||
if kwargs_copy.get("sanitize"):
|
||||
kwargs_copy = string_utils.sanitize_secrets(kwargs_copy)
|
||||
logline_obj = '{0} {1}'.format(args, string_utils.compress(
|
||||
kwargs_copy))
|
||||
|
||||
# Make the request and time its execution
|
||||
response = None
|
||||
no_resp_time = None
|
||||
signals = syntribos.signal.SignalHolder()
|
||||
try:
|
||||
start = time()
|
||||
response = func(*args, **kwargs)
|
||||
except requests.exceptions.RequestException as exc:
|
||||
signals.register(http_checks.check_fail(exc))
|
||||
log.log(level, _("A call to request() failed."))
|
||||
log.exception(exc)
|
||||
log.log(level, "=" * 80)
|
||||
except Exception as exc:
|
||||
log.critical('Call to Requests failed due to exception')
|
||||
log.exception(exc)
|
||||
signals.register(syntribos.signal.from_generic_exception(exc))
|
||||
raise exc
|
||||
|
||||
if len(signals) > 0 and response is None:
|
||||
no_resp_time = time() - start
|
||||
log.log(level,
|
||||
_(
|
||||
'Request failed, elapsed time....: %.6f sec.\n'
|
||||
), no_resp_time)
|
||||
return (response, signals)
|
||||
|
||||
# requests lib 1.0.0 renamed body to data in the request object
|
||||
request_body = ''
|
||||
if 'body' in dir(response.request):
|
||||
request_body = response.request.body
|
||||
elif 'data' in dir(response.request):
|
||||
request_body = response.request.data
|
||||
else:
|
||||
log.info("Unable to log request body, neither a 'data' nor a "
|
||||
"'body' object could be found")
|
||||
|
||||
# requests lib 1.0.4 removed params from response.request
|
||||
request_params = ''
|
||||
request_url = response.request.url
|
||||
if 'params' in dir(response.request):
|
||||
request_params = response.request.params
|
||||
elif '?' in request_url:
|
||||
request_url, request_params = request_url.split('?')
|
||||
|
||||
req_body_len = 0
|
||||
req_header_len = 0
|
||||
if response.request.headers:
|
||||
req_header_len = len(response.request.headers)
|
||||
request_headers = response.request.headers
|
||||
if response.request.body:
|
||||
req_body_len = len(response.request.body)
|
||||
response_content = response.content
|
||||
if kwargs_copy.get("sanitize"):
|
||||
response_content = string_utils.sanitize_secrets(
|
||||
response_content)
|
||||
request_params = string_utils.sanitize_secrets(request_params)
|
||||
request_headers = string_utils.sanitize_secrets(
|
||||
request_headers)
|
||||
request_body = string_utils.sanitize_secrets(request_body)
|
||||
logline_req = ''.join([
|
||||
'\n{0}\nREQUEST SENT\n{0}\n'.format('-' * 12),
|
||||
'request method.......: {0}\n'.format(response.request.method),
|
||||
'request url..........: {0}\n'.format(string_utils.compress(
|
||||
request_url)),
|
||||
'request params.......: {0}\n'.format(string_utils.compress
|
||||
(request_params)),
|
||||
'request headers size.: {0}\n'.format(req_header_len),
|
||||
'request headers......: {0}\n'.format(string_utils.compress(
|
||||
request_headers)),
|
||||
'request body size....: {0}\n'.format(req_body_len),
|
||||
'request body.........: {0}\n'.format(string_utils.compress
|
||||
(request_body))])
|
||||
logline_rsp = ''.join([
|
||||
'\n{0}\nRESPONSE RECEIVED\n{0}\n'.format('-' * 17),
|
||||
'response status..: {0}\n'.format(response),
|
||||
'response headers.: {0}\n'.format(response.headers),
|
||||
'response time....: {0}\n'.format
|
||||
(response.elapsed.total_seconds()),
|
||||
'response size....: {0}\n'.format(len(response.content)),
|
||||
'response body....: {0}\n'.format(response_content),
|
||||
'-' * 79])
|
||||
lock.acquire()
|
||||
try:
|
||||
log.log(level, _safe_decode(logline_req))
|
||||
except Exception as exception:
|
||||
# Ignore all exceptions that happen in logging, then log them
|
||||
log.log(level, '\n{0}\nREQUEST INFO\n{0}\n'.format('-' * 12))
|
||||
log.exception(exception)
|
||||
try:
|
||||
log.log(level, _safe_decode(logline_rsp))
|
||||
except Exception as exception:
|
||||
# Ignore all exceptions that happen in logging, then log them
|
||||
log.log(level, '\n{0}\nRESPONSE INFO\n{0}\n'.format('-' * 13))
|
||||
log.exception(exception)
|
||||
try:
|
||||
log.debug(_safe_decode(logline_obj))
|
||||
except Exception as exception:
|
||||
# Ignore all exceptions that happen in logging, then log them
|
||||
log.info('Exception occurred while logging signature of '
|
||||
'calling method in http client')
|
||||
log.exception(exception)
|
||||
lock.release()
|
||||
return (response, signals)
|
||||
return _wrapper
|
||||
return _decorator
|
@ -1,604 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import copy
|
||||
from functools import reduce
|
||||
import importlib
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
import types
|
||||
import uuid
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
|
||||
from oslo_config import cfg
|
||||
import six
|
||||
from six.moves import html_parser
|
||||
from six.moves.urllib import parse as urlparse
|
||||
import yaml
|
||||
|
||||
from syntribos._i18n import _
|
||||
|
||||
CONF = cfg.CONF
|
||||
_iterators = {}
|
||||
_string_var_objs = {}
|
||||
|
||||
|
||||
class RequestCreator(object):
|
||||
ACTION_FIELD = "ACTION_FIELD:"
|
||||
EXTERNAL = r"CALL_EXTERNAL\|([^:]+?):([^:]+?)(?::([^|]+?))?\|"
|
||||
METAVAR = r"(\|[^\|]*\|)"
|
||||
FUNC_WITH_ARGS = r"([^:]+):([^:]+):(\[.+\])"
|
||||
FUNC_NO_ARGS = r"([^:]+):([^:]+)"
|
||||
|
||||
@classmethod
|
||||
def create_request(cls, string, endpoint, meta_vars=None):
|
||||
"""Parse the HTTP request template into its components
|
||||
|
||||
:param str string: HTTP request template
|
||||
:param str endpoint: URL of the target to be tested
|
||||
:param dict meta_vars: Default None, dict parsed from meta.json
|
||||
:rtype: :class:`syntribos.clients.http.parser.RequestObject`
|
||||
:returns: RequestObject with method, url, params, etc. for use by
|
||||
runner
|
||||
"""
|
||||
cls.meta_vars = meta_vars
|
||||
string = cls.call_external_functions(string)
|
||||
action_field = str(uuid.uuid4()).replace("-", "")
|
||||
string = string.replace(cls.ACTION_FIELD, action_field)
|
||||
lines = string.splitlines()
|
||||
for index, line in enumerate(lines):
|
||||
if line == "":
|
||||
break
|
||||
if lines[index] != "":
|
||||
index = index + 1
|
||||
method, url, params, version = cls._parse_url_line(lines[0], endpoint)
|
||||
headers = cls._parse_headers(lines[1:index])
|
||||
content_type = ''
|
||||
for h in headers:
|
||||
if h.upper() == 'CONTENT-TYPE':
|
||||
content_type = headers[h]
|
||||
break
|
||||
data, data_type = cls._parse_data(lines[index + 1:], content_type)
|
||||
return RequestObject(
|
||||
method=method, url=url, headers=headers, params=params, data=data,
|
||||
action_field=action_field, data_type=data_type)
|
||||
|
||||
@classmethod
|
||||
def _create_var_obj(cls, var, prefix="", suffix=""):
|
||||
"""Given the name of a variable, creates VariableObject
|
||||
|
||||
:param str var: name of the variable in meta.json
|
||||
:rtype: :class:`syntribos.clients.http.parser.VariableObject`
|
||||
:returns: VariableObject holding the attributes defined in the JSON
|
||||
object read in from meta.json
|
||||
"""
|
||||
if not cls.meta_vars:
|
||||
msg = ("Template contains reference to meta variable of the form "
|
||||
"'|{}|', but no valid meta.json file was found in the "
|
||||
"templates directory. Check that your templates reference "
|
||||
"a meta.json file that is correctly formatted.".format(var))
|
||||
raise TemplateParseException(msg)
|
||||
|
||||
if var not in cls.meta_vars:
|
||||
msg = _("Expected to find %s in meta.json, but didn't. "
|
||||
"Check your templates") % var
|
||||
raise TemplateParseException(msg)
|
||||
var_dict = cls.meta_vars[var]
|
||||
if "type" in var_dict:
|
||||
var_dict["var_type"] = var_dict.pop("type")
|
||||
var_obj = VariableObject(var, prefix=prefix, suffix=suffix, **var_dict)
|
||||
return var_obj
|
||||
|
||||
@classmethod
|
||||
def replace_one_variable(cls, var_obj):
|
||||
"""Evaluate a VariableObject according to its type
|
||||
|
||||
A meta variable's type is optional. If a type is given, the parser will
|
||||
interpret the variable in one of 3 ways according to its type, and
|
||||
returns that value.
|
||||
|
||||
* Type config: The parser will attempt to read the config value
|
||||
specified by the "val" attribute and returns that value.
|
||||
* Type function: The parser will call the function named in the "val"
|
||||
attribute with arguments given in the "args" attribute, and returns
|
||||
the value from calling the function. This value is cached, and
|
||||
will be returned on subsequent calls.
|
||||
* Type generator: works the same way as the function type, but its
|
||||
results are not cached and the function will be called every time.
|
||||
|
||||
Otherwise, the parser will interpret the variable as a static variable,
|
||||
and will return whatever is in the "val" attribute.
|
||||
|
||||
:param var_obj: A :class:`syntribos.clients.http.parser.VariableObject`
|
||||
:returns: The evaluated value according to its meta variable type
|
||||
"""
|
||||
if var_obj.var_type == 'config':
|
||||
try:
|
||||
return reduce(getattr, var_obj.val.split("."), CONF)
|
||||
except AttributeError:
|
||||
msg = _("Meta json file contains reference to the config "
|
||||
"option %s, which does not appear to"
|
||||
"exist.") % var_obj.val
|
||||
raise TemplateParseException(msg)
|
||||
|
||||
elif var_obj.var_type == 'function':
|
||||
if var_obj.function_return_value:
|
||||
return var_obj.function_return_value
|
||||
if not var_obj.val:
|
||||
msg = _("The type of variable %s is function, but there is no "
|
||||
"reference to the function.") % var_obj.name
|
||||
raise TemplateParseException(msg)
|
||||
else:
|
||||
var_obj.function_return_value = cls.call_one_external_function(
|
||||
var_obj.val, var_obj.args)
|
||||
return var_obj.function_return_value
|
||||
|
||||
elif var_obj.var_type == 'generator':
|
||||
if not var_obj.val:
|
||||
msg = _("The type of variable %s is generator, but there is no"
|
||||
" reference to the function.") % var_obj.name
|
||||
raise TemplateParseException(msg)
|
||||
|
||||
return cls.call_one_external_function(var_obj.val, var_obj.args)
|
||||
else:
|
||||
return str(var_obj.val)
|
||||
|
||||
@classmethod
|
||||
def _replace_dict_variables(cls, dic):
|
||||
"""Recursively evaluates all meta variables in a given dict."""
|
||||
for (key, value) in dic.items():
|
||||
# Keys dont get fuzzed, so can handle them here
|
||||
match = re.search(cls.METAVAR, key)
|
||||
if match:
|
||||
replaced_key = match.group(0).strip("|")
|
||||
key_obj = cls._create_var_obj(replaced_key)
|
||||
replaced_key = cls.replace_one_variable(key_obj)
|
||||
new_key = re.sub(cls.METAVAR, replaced_key, key)
|
||||
del dic[key]
|
||||
dic[new_key] = value
|
||||
# Vals are fuzzed so they need to be passed to datagen as an object
|
||||
if isinstance(value, six.string_types):
|
||||
match = re.search(cls.METAVAR, value)
|
||||
if match:
|
||||
start, end = match.span()
|
||||
prefix = value[:start]
|
||||
suffix = value[end:]
|
||||
var_str = match.group(0).strip("|")
|
||||
val_obj = cls._create_var_obj(var_str, prefix, suffix)
|
||||
if key in dic:
|
||||
dic[key] = val_obj
|
||||
elif new_key in dic:
|
||||
dic[new_key] = val_obj
|
||||
elif isinstance(value, dict):
|
||||
cls._replace_dict_variables(value)
|
||||
return dic
|
||||
|
||||
@classmethod
|
||||
def _replace_str_variables(cls, string):
|
||||
"""Replaces all meta variable references in the string
|
||||
|
||||
For every meta variable reference found in the string, it generates
|
||||
a VariableObject. It then associates each VariableObject with a uuid,
|
||||
as a key value pair, which is storedin the global dict variable
|
||||
`_str_var_obs`. It then replaces all meta variable references in the
|
||||
string with the uuid key to the VariableObject
|
||||
|
||||
:param str string: String to be evaluated
|
||||
:returns: string with all metavariable references replaced
|
||||
"""
|
||||
while True:
|
||||
match = re.search(cls.METAVAR, string)
|
||||
if not match:
|
||||
break
|
||||
obj_ref_uuid = str(uuid.uuid4()).replace("-", "")
|
||||
var_name = match.group(1).strip("|")
|
||||
var_obj = cls._create_var_obj(var_name)
|
||||
_string_var_objs[obj_ref_uuid] = var_obj
|
||||
string = re.sub(cls.METAVAR, obj_ref_uuid, string, count=1)
|
||||
return string
|
||||
|
||||
@classmethod
|
||||
def _parse_url_line(cls, line, endpoint):
|
||||
"""Split first line of an HTTP request into its components
|
||||
|
||||
:param str line: the first line of the HTTP request
|
||||
:param str endpoint: the full URL of the endpoint to test
|
||||
:rtype: tuple
|
||||
:returns: HTTP method, URL, request parameters, HTTP version
|
||||
"""
|
||||
valid_methods = ["GET", "POST", "HEAD", "OPTIONS", "PUT", "DELETE",
|
||||
"TRACE", "CONNECT", "PATCH"]
|
||||
params = {}
|
||||
method, url, version = line.split()
|
||||
url = url.split("?", 1)
|
||||
if len(url) == 2:
|
||||
for param in url[1].split("&"):
|
||||
param = param.split("=", 1)
|
||||
if len(param) > 1:
|
||||
params[param[0]] = param[1]
|
||||
else:
|
||||
params[param[0]] = ""
|
||||
url = url[0]
|
||||
url = urlparse.urljoin(endpoint, url)
|
||||
if method not in valid_methods:
|
||||
raise ValueError(_("Invalid HTTP method: %s") % method)
|
||||
return (method, cls._replace_str_variables(url),
|
||||
cls._replace_dict_variables(params), version)
|
||||
|
||||
@classmethod
|
||||
def _parse_headers(cls, lines):
|
||||
"""Find and return headers in HTTP request
|
||||
|
||||
:param str lines: All but the first line of the HTTP request (list)
|
||||
:rtype: dict
|
||||
:returns: headers as key:value pairs
|
||||
"""
|
||||
headers = {}
|
||||
for line in lines:
|
||||
key, value = line.split(":", 1)
|
||||
headers[key] = value.strip()
|
||||
return cls._replace_dict_variables(headers)
|
||||
|
||||
@classmethod
|
||||
def _parse_data(cls, lines, content_type=""):
|
||||
"""Parse the body of the HTTP request (e.g. POST variables)
|
||||
|
||||
:param list lines: lines of the HTTP body
|
||||
:param content_type: Content-type header in template if any
|
||||
|
||||
:returns: object representation of body data (JSON or XML)
|
||||
"""
|
||||
postdat_regex = r"([\w%]+=[\w%]+&?)+"
|
||||
data = "\n".join(lines).strip()
|
||||
data_type = "text"
|
||||
if not data:
|
||||
return '', None
|
||||
|
||||
try:
|
||||
data = json.loads(data)
|
||||
# TODO(cneill): Make this less hacky
|
||||
if isinstance(data, list):
|
||||
data = json.dumps(data)
|
||||
if isinstance(data, dict):
|
||||
return cls._replace_dict_variables(data), 'json'
|
||||
else:
|
||||
return cls._replace_str_variables(data), 'str'
|
||||
except TemplateParseException:
|
||||
raise
|
||||
except (TypeError, ValueError):
|
||||
if 'json' in content_type:
|
||||
msg = ("The Content-Type header in this template is %s but "
|
||||
"syntribos cannot parse the request body as json" %
|
||||
content_type)
|
||||
raise TemplateParseException(msg)
|
||||
try:
|
||||
data = ElementTree.fromstring(data)
|
||||
data_type = 'xml'
|
||||
except Exception:
|
||||
if 'xml' in content_type:
|
||||
msg = ("The Content-Type header in this template is %s "
|
||||
"but syntribos cannot parse the request body as xml"
|
||||
% content_type)
|
||||
raise TemplateParseException(msg)
|
||||
try:
|
||||
data = yaml.safe_load(data)
|
||||
data_type = 'yaml'
|
||||
except yaml.YAMLError:
|
||||
if 'yaml' in content_type:
|
||||
msg = ("The Content-Type header in this template is %s"
|
||||
"but syntribos cannot parse the request body as"
|
||||
"yaml"
|
||||
% content_type)
|
||||
raise TemplateParseException(msg)
|
||||
if not re.match(postdat_regex, data):
|
||||
raise TypeError(_("Make sure that your request body is"
|
||||
"valid JSON, XML, or YAML data - be "
|
||||
"sure to check for typos."))
|
||||
except Exception:
|
||||
raise
|
||||
return data, data_type
|
||||
|
||||
@classmethod
|
||||
def call_external_functions(cls, string):
|
||||
"""Parse external function calls in the body of request templates
|
||||
|
||||
:param str string: full HTTP request template as a string
|
||||
:rtype: str
|
||||
:returns: the request, with EXTERNAL calls filled in with their values
|
||||
or UUIDs
|
||||
"""
|
||||
if not isinstance(string, six.string_types):
|
||||
return string
|
||||
while True:
|
||||
match = re.search(cls.EXTERNAL, string)
|
||||
if not match:
|
||||
break
|
||||
dot_path = match.group(1)
|
||||
func_name = match.group(2)
|
||||
arg_list = match.group(3) or "[]"
|
||||
mod = importlib.import_module(dot_path)
|
||||
func = getattr(mod, func_name)
|
||||
args = json.loads(arg_list)
|
||||
val = func(*args)
|
||||
if isinstance(val, types.GeneratorType):
|
||||
local_uuid = str(uuid.uuid4()).replace("-", "")
|
||||
string = re.sub(cls.EXTERNAL, local_uuid, string, count=1)
|
||||
_iterators[local_uuid] = val
|
||||
else:
|
||||
string = re.sub(cls.EXTERNAL, str(val), string, count=1)
|
||||
return string
|
||||
|
||||
@classmethod
|
||||
def call_one_external_function(cls, string, args):
|
||||
"""Calls one function read in from templates and returns the result."""
|
||||
if not isinstance(string, six.string_types):
|
||||
return string
|
||||
match = re.search(cls.FUNC_NO_ARGS, string)
|
||||
func_string_has_args = False
|
||||
if not match:
|
||||
match = re.search(cls.FUNC_WITH_ARGS, string)
|
||||
func_string_has_args = True
|
||||
|
||||
if match:
|
||||
try:
|
||||
dot_path = match.group(1)
|
||||
func_name = match.group(2)
|
||||
mod = importlib.import_module(dot_path)
|
||||
func = getattr(mod, func_name)
|
||||
|
||||
if func_string_has_args and not args:
|
||||
arg_list = match.group(3)
|
||||
args = json.loads(arg_list)
|
||||
|
||||
val = func(*args)
|
||||
except Exception:
|
||||
raise
|
||||
else:
|
||||
try:
|
||||
func_lst = string.split(":")
|
||||
if len(func_lst) == 2:
|
||||
args = func_lst[1]
|
||||
func_str = func_lst[0]
|
||||
dot_path = ".".join(func_str.split(".")[:-1])
|
||||
func_name = func_str.split(".")[-1]
|
||||
mod = importlib.import_module(dot_path)
|
||||
func = getattr(mod, func_name)
|
||||
val = func(*args)
|
||||
except Exception:
|
||||
msg = _("The reference to the function %s failed to parse "
|
||||
"correctly, please check the documentation to ensure "
|
||||
"your function import string adheres to the proper "
|
||||
"format") % string
|
||||
raise TemplateParseException(msg)
|
||||
|
||||
if isinstance(val, types.GeneratorType):
|
||||
return str(six.next(val))
|
||||
else:
|
||||
return str(val)
|
||||
|
||||
|
||||
class VariableObject(object):
|
||||
VAR_TYPES = ["function", "generator", "config"]
|
||||
FUZZ_TYPES = ["int", "ascii", "url"]
|
||||
|
||||
def __init__(self, name, var_type="", args=[], val="", fuzz=True,
|
||||
fuzz_types=[], min_length=0, max_length=sys.maxsize,
|
||||
url_encode=False, prefix="", suffix="", **kwargs):
|
||||
if var_type and var_type.lower() not in self.VAR_TYPES:
|
||||
msg = _("The meta variable %(name)s has a type of %(var)s which "
|
||||
"syntribos does not"
|
||||
"recognize") % {'name': name, 'var': var_type}
|
||||
raise TemplateParseException(msg)
|
||||
|
||||
self.name = name
|
||||
self.var_type = var_type.lower()
|
||||
self.val = val
|
||||
self.args = args
|
||||
self.fuzz_types = fuzz_types
|
||||
self.fuzz = fuzz
|
||||
self.min_length = min_length
|
||||
self.max_length = max_length
|
||||
self.url_encode = url_encode
|
||||
self.prefix = prefix
|
||||
self.suffix = suffix
|
||||
self.function_return_value = None
|
||||
|
||||
def __repr__(self):
|
||||
return str(vars(self))
|
||||
|
||||
|
||||
class TemplateParseException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class RequestHelperMixin(object):
|
||||
"""Class that helps with fuzzing requests."""
|
||||
|
||||
def __init__(self):
|
||||
self.data = ""
|
||||
self.headers = ""
|
||||
self.params = ""
|
||||
self.data = ""
|
||||
self.url = ""
|
||||
|
||||
@classmethod
|
||||
def _run_iters(cls, data, action_field):
|
||||
"""Recursively fuzz variables in `data` and its children
|
||||
|
||||
:param data: The request data to be modified
|
||||
:param action_field: The name of the field to be replaced
|
||||
:returns: object or string with action_field fuzzed
|
||||
:rtype: `dict` OR `str` OR :class:`ElementTree.Element`
|
||||
"""
|
||||
if isinstance(data, dict):
|
||||
return cls._run_iters_dict(data, action_field)
|
||||
elif isinstance(data, ElementTree.Element):
|
||||
return cls._run_iters_xml(data, action_field)
|
||||
elif isinstance(data, VariableObject):
|
||||
return RequestCreator.replace_one_variable(data)
|
||||
elif isinstance(data, six.string_types):
|
||||
data = data.replace(action_field, "")
|
||||
return cls._replace_iter(data)
|
||||
else:
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def _run_iters_dict(cls, dic, action_field=""):
|
||||
"""Run fuzz iterators for a dict type."""
|
||||
for key, val in dic.items():
|
||||
dic[key] = val = cls._replace_iter(val)
|
||||
if isinstance(key, six.string_types):
|
||||
new_key = cls._replace_iter(key).replace(action_field, "")
|
||||
if new_key != key:
|
||||
del dic[key]
|
||||
dic[new_key] = val
|
||||
if isinstance(val, VariableObject):
|
||||
if key in dic:
|
||||
repl_val = RequestCreator.replace_one_variable(val)
|
||||
dic[key] = val.prefix + repl_val + val.suffix
|
||||
elif new_key in dic:
|
||||
repl_val = RequestCreator.replace_one_variable(val)
|
||||
dic[new_key] = val.prefix + repl_val + val.suffix
|
||||
if isinstance(val, dict):
|
||||
cls._run_iters_dict(val, action_field)
|
||||
elif isinstance(val, list):
|
||||
cls._run_iters_list(val, action_field)
|
||||
return dic
|
||||
|
||||
@classmethod
|
||||
def _run_iters_list(cls, val, action_field=""):
|
||||
"""Run fuzz iterators for a list type."""
|
||||
for i, v in enumerate(val):
|
||||
if isinstance(v, six.string_types):
|
||||
val[i] = v = cls._replace_iter(v).replace(action_field, "")
|
||||
if isinstance(v, VariableObject):
|
||||
val[i] = v = RequestCreator.replace_one_variable(v)
|
||||
elif isinstance(v, dict):
|
||||
val[i] = cls._run_iters_dict(v, action_field)
|
||||
elif isinstance(v, list):
|
||||
cls._run_iters_list(v, action_field)
|
||||
|
||||
@classmethod
|
||||
def _run_iters_xml(cls, ele, action_field=""):
|
||||
"""Run fuzz iterators for an XML element type."""
|
||||
if isinstance(ele.text, six.string_types):
|
||||
ele.text = cls._replace_iter(ele.text).replace(action_field, "")
|
||||
cls._run_iters_dict(ele.attrib, action_field)
|
||||
for i, v in enumerate(list(ele)):
|
||||
ele[i] = cls._run_iters_xml(v, action_field)
|
||||
return ele
|
||||
|
||||
@staticmethod
|
||||
def _string_data(data, data_type):
|
||||
"""Replace various objects types with string representations."""
|
||||
if data_type == 'json':
|
||||
return json.dumps(data)
|
||||
elif data_type == 'xml':
|
||||
if isinstance(data, str):
|
||||
return data
|
||||
str_data = ElementTree.tostring(data)
|
||||
# No way to stop tostring from HTML escaping even if we wanted
|
||||
h = html_parser.HTMLParser()
|
||||
return h.unescape(str_data.decode())
|
||||
elif data_type == 'yaml':
|
||||
return yaml.dump(data)
|
||||
else:
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def _replace_iter(string):
|
||||
"""Replaces action field IDs and meta-variable references."""
|
||||
if not isinstance(string, six.string_types):
|
||||
return string
|
||||
for k, v in list(_iterators.items()):
|
||||
if k in string:
|
||||
string = string.replace(k, six.next(v))
|
||||
for k, v in _string_var_objs.items():
|
||||
if k in string:
|
||||
str_val = str(RequestCreator.replace_one_variable(v))
|
||||
string = string.replace(k, str_val)
|
||||
return string
|
||||
|
||||
@staticmethod
|
||||
def _remove_braces(string):
|
||||
"""Remove braces from strings (in request templates)."""
|
||||
return re.sub(r"{([^}]*)}", "\\1", string)
|
||||
|
||||
@staticmethod
|
||||
def _remove_attr_names(string):
|
||||
"""removes identifiers from string substitution
|
||||
|
||||
If we are fuzzing example.com/{userid:123}, this method removes the
|
||||
identifier name so that the client only sees example.com/{123} when
|
||||
it sends the request
|
||||
"""
|
||||
return re.sub(r"(?!{urn:){[\w]+:", "{", string)
|
||||
|
||||
def prepare_request(self):
|
||||
"""Prepare a request for sending off
|
||||
|
||||
It should be noted this function does not make a request copy,
|
||||
destroying iterators in request. A copy should be made if making
|
||||
multiple requests.
|
||||
"""
|
||||
self.data = self._run_iters(self.data, self.action_field)
|
||||
self.headers = self._run_iters(self.headers, self.action_field)
|
||||
self.params = self._run_iters(self.params, self.action_field)
|
||||
self.data = self._string_data(self.data, self.data_type)
|
||||
self.url = self._run_iters(self.url, self.action_field)
|
||||
self.url = self._remove_braces(self._remove_attr_names(self.url))
|
||||
|
||||
def get_prepared_copy(self):
|
||||
"""Create a copy of `self`, and prepare it for use by a fuzzer
|
||||
|
||||
:returns: Copy of request object that has been prepared for sending
|
||||
:rtype: :class:`RequestHelperMixin`
|
||||
"""
|
||||
local_copy = copy.deepcopy(self)
|
||||
local_copy.prepare_request()
|
||||
return local_copy
|
||||
|
||||
def get_copy(self):
|
||||
return copy.deepcopy(self)
|
||||
|
||||
|
||||
class RequestObject(RequestHelperMixin):
|
||||
"""An object that holds information about an HTTP request.
|
||||
|
||||
:ivar str method: Request method
|
||||
:ivar str url: URL to request
|
||||
:ivar dict action_field: Action Fields
|
||||
:ivar dict headers: Dictionary of headers in name:value format
|
||||
:ivar dict params: Dictionary of params in name:value format
|
||||
:ivar data: Data to send as part of request body
|
||||
:ivar bool sanitize: Boolean variable used to filter secrets
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
method,
|
||||
url,
|
||||
action_field=None,
|
||||
headers=None,
|
||||
params=None,
|
||||
data=None,
|
||||
sanitize=False,
|
||||
data_type=None):
|
||||
self.method = method
|
||||
self.url = url
|
||||
self.action_field = action_field
|
||||
self.headers = headers
|
||||
self.params = params
|
||||
self.data = data
|
||||
self.sanitize = sanitize
|
||||
self.data_type = data_type
|
@ -1,346 +0,0 @@
|
||||
# Copyright 2015-2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint: skip-file
|
||||
import logging
|
||||
from oslo_config import cfg
|
||||
|
||||
import syntribos
|
||||
from syntribos._i18n import _
|
||||
from syntribos.utils.file_utils import ContentType
|
||||
from syntribos.utils.file_utils import ExistingDirType
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
OPTS_REGISTERED = False
|
||||
|
||||
|
||||
def handle_config_exception(exc):
|
||||
msg = ""
|
||||
|
||||
if not any(LOG.handlers):
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
if isinstance(exc, cfg.RequiredOptError):
|
||||
msg = "Missing option '{opt}'".format(opt=exc.opt_name)
|
||||
if exc.group:
|
||||
msg += " in group '{}'".format(exc.group)
|
||||
CONF.print_help()
|
||||
|
||||
elif isinstance(exc, cfg.ConfigFilesNotFoundError):
|
||||
if CONF._args[0] == "init":
|
||||
return
|
||||
|
||||
msg = (_("Configuration file specified ('%s') wasn't "
|
||||
"found or was unreadable.") % ",".join(
|
||||
CONF.config_file))
|
||||
|
||||
if msg:
|
||||
LOG.warning(msg)
|
||||
print(syntribos.SEP)
|
||||
else:
|
||||
LOG.exception(exc)
|
||||
|
||||
|
||||
syntribos_group = cfg.OptGroup(name="syntribos", title="Main syntribos Config")
|
||||
user_group = cfg.OptGroup(name="user", title="Identity Config")
|
||||
test_group = cfg.OptGroup(name="test", title="Test Config")
|
||||
logger_group = cfg.OptGroup(name="logging", title="Logger config")
|
||||
remote_group = cfg.OptGroup(name="remote", title="Remote config")
|
||||
|
||||
|
||||
def sub_commands(sub_parser):
|
||||
init_parser = sub_parser.add_parser(
|
||||
"init",
|
||||
help=_("Initialize syntribos environment after "
|
||||
"installation. Should be run before any other "
|
||||
"commands."))
|
||||
init_parser.add_argument(
|
||||
"--force", dest="force", action="store_true",
|
||||
help=_(
|
||||
"Skip prompts for configurable options, force initialization "
|
||||
"even if syntribos believes it has already been initialized. If "
|
||||
"--custom_root isn't specified, we will use the default "
|
||||
"options. WARNING: This is potentially destructive! Use with "
|
||||
"caution."))
|
||||
init_parser.add_argument(
|
||||
"--custom_install_root", dest="custom_install_root",
|
||||
help=_("(DEPRECATED) Skip prompts for configurable options, and "
|
||||
"initialize syntribos in the specified directory. Can be "
|
||||
"combined with --force to overwrite existing files."))
|
||||
init_parser.add_argument(
|
||||
"--no_downloads", dest="no_downloads", action="store_true",
|
||||
help=_("Disable the downloading of payload files as part of the "
|
||||
"initialization process"))
|
||||
|
||||
download_parser = sub_parser.add_parser(
|
||||
"download",
|
||||
help=_(
|
||||
"Download payload and template files. This command is "
|
||||
"configurable according to the remote section of your "
|
||||
"config file"))
|
||||
download_parser.add_argument(
|
||||
"--templates", dest="templates", action="store_true",
|
||||
help=_("Download templates"))
|
||||
download_parser.add_argument(
|
||||
"--payloads", dest="payloads", action="store_true",
|
||||
help=_("Download payloads"))
|
||||
|
||||
sub_parser.add_parser("list_tests",
|
||||
help=_("List all available tests"))
|
||||
sub_parser.add_parser("run",
|
||||
help=_("Run syntribos with given config"
|
||||
"options"))
|
||||
sub_parser.add_parser("dry_run",
|
||||
help=_("Dry run syntribos with given config"
|
||||
"options"))
|
||||
sub_parser.add_parser("root",
|
||||
help=_("Print syntribos root directory"))
|
||||
|
||||
|
||||
def list_opts():
|
||||
results = []
|
||||
results.append((None, list_cli_opts()))
|
||||
results.append((syntribos_group, list_syntribos_opts()))
|
||||
results.append((user_group, list_user_opts()))
|
||||
results.append((test_group, list_test_opts()))
|
||||
results.append((logger_group, list_logger_opts()))
|
||||
results.append((remote_group, list_remote_opts()))
|
||||
return results
|
||||
|
||||
|
||||
def register_opts():
|
||||
global OPTS_REGISTERED
|
||||
if not OPTS_REGISTERED:
|
||||
# CLI options
|
||||
CONF.register_cli_opts(list_cli_opts())
|
||||
# Syntribos options
|
||||
CONF.register_group(syntribos_group)
|
||||
CONF.register_cli_opts(list_syntribos_opts(), group=syntribos_group)
|
||||
# Keystone options
|
||||
CONF.register_group(user_group)
|
||||
CONF.register_opts(list_user_opts(), group=user_group)
|
||||
# Test options
|
||||
CONF.register_group(test_group)
|
||||
CONF.register_opts(list_test_opts(), group=test_group)
|
||||
# Logger options
|
||||
CONF.register_group(logger_group)
|
||||
CONF.register_opts(list_logger_opts(), group=logger_group)
|
||||
# Remote options
|
||||
CONF.register_group(remote_group)
|
||||
CONF.register_opts(list_remote_opts(), group=remote_group)
|
||||
OPTS_REGISTERED = True
|
||||
|
||||
|
||||
def list_payment_system_opts():
|
||||
return [
|
||||
cfg.StrOpt('ran', default='', help='Rackspace Account Number'),
|
||||
cfg.StrOpt('alt_ran', default='', help='Alternate RAN')
|
||||
]
|
||||
|
||||
|
||||
def list_cli_opts():
|
||||
return [
|
||||
cfg.SubCommandOpt(name="sub_command",
|
||||
handler=sub_commands,
|
||||
help=_("Available commands"),
|
||||
title="syntribos Commands"),
|
||||
cfg.MultiStrOpt("test-types", dest="test_types", short="t",
|
||||
default=[""], sample_default=["SQL", "XSS"],
|
||||
help=_(
|
||||
"Test types to run against the target API")),
|
||||
cfg.MultiStrOpt("excluded-types", dest="excluded_types", short="e",
|
||||
default=[""], sample_default=["SQL", "XSS"],
|
||||
help=_("Test types to be excluded from "
|
||||
"current run against the target API")),
|
||||
cfg.BoolOpt("colorize", dest="colorize", short="cl",
|
||||
default=True,
|
||||
help=_("Enable color in syntribos terminal output")),
|
||||
cfg.StrOpt("outfile", short="o",
|
||||
sample_default="out.json", help=_("File to print "
|
||||
"output to")),
|
||||
cfg.StrOpt("format", dest="output_format", short="f", default="json",
|
||||
choices=["json"], ignore_case=True,
|
||||
help=_("The format for outputting results")),
|
||||
cfg.StrOpt("min-severity", dest="min_severity", short="S",
|
||||
default="LOW", choices=syntribos.RANKING,
|
||||
help=_("Select a minimum severity for reported "
|
||||
"defects")),
|
||||
cfg.StrOpt("min-confidence", dest="min_confidence", short="C",
|
||||
default="LOW", choices=syntribos.RANKING,
|
||||
help=_("Select a minimum confidence for reported "
|
||||
"defects")),
|
||||
cfg.BoolOpt("stacktrace", dest="stacktrace", default=True,
|
||||
help=_("Select if Syntribos outputs a stacktrace "
|
||||
" if an exception is raised")),
|
||||
cfg.StrOpt(
|
||||
"custom_root", dest="custom_root",
|
||||
help=_("Filesystem location for syntribos root directory, "
|
||||
"containing logs, templates, payloads, config files. "
|
||||
"Creates directories and skips interactive prompts when "
|
||||
"used with 'syntribos init'"),
|
||||
deprecated_group="init", deprecated_name="custom_install_root")
|
||||
]
|
||||
|
||||
|
||||
def list_syntribos_opts():
|
||||
def wrap_try_except(func):
|
||||
def wrap(*args):
|
||||
try:
|
||||
func(*args)
|
||||
except IOError:
|
||||
msg = _(
|
||||
"\nCan't open a file or directory specified in the "
|
||||
"config file under the section `[syntribos]`; verify "
|
||||
"if the path exists.\nFor more information please refer "
|
||||
"the debug logs.")
|
||||
print(msg)
|
||||
exit(1)
|
||||
return wrap
|
||||
return [
|
||||
cfg.StrOpt("endpoint", default="",
|
||||
sample_default="http://localhost/app",
|
||||
help=_("The target host to be tested")),
|
||||
cfg.IntOpt("threads", default=16,
|
||||
sample_default="16",
|
||||
help=_("Maximum number of threads syntribos spawns "
|
||||
"(experimental)")),
|
||||
cfg.Opt("templates", type=ContentType("r"),
|
||||
default="",
|
||||
sample_default="~/.syntribos/templates",
|
||||
help=_("A directory of template files, or a single "
|
||||
"template file, to test on the target API")),
|
||||
cfg.StrOpt("payloads", default="",
|
||||
sample_default="~/.syntribos/data",
|
||||
help=_(
|
||||
"The location where we can find syntribos'"
|
||||
"payloads")),
|
||||
cfg.MultiStrOpt("exclude_results",
|
||||
default=[""],
|
||||
sample_default=["500_errors", "length_diff"],
|
||||
help=_(
|
||||
"Defect types to exclude from the "
|
||||
"results output")),
|
||||
cfg.Opt("custom_root", type=wrap_try_except(ExistingDirType()),
|
||||
short="c",
|
||||
sample_default="/your/custom/root",
|
||||
help=_(
|
||||
"The root directory where the subfolders that make up"
|
||||
" syntribos' environment (logs, templates, payloads, "
|
||||
"configuration files, etc.)"),
|
||||
deprecated_for_removal=True),
|
||||
cfg.StrOpt("meta_vars", sample_default="/path/to/meta.json",
|
||||
help=_(
|
||||
"The path to a meta variable definitions file, which "
|
||||
"will be used when parsing your templates")),
|
||||
]
|
||||
|
||||
|
||||
def list_user_opts():
|
||||
return [
|
||||
cfg.StrOpt("version", default="v2.0",
|
||||
help=_("keystone version"), choices=["v2.0", "v3"]),
|
||||
cfg.StrOpt("username", default="",
|
||||
help=_("keystone username")),
|
||||
cfg.StrOpt("password", default="",
|
||||
help=_("keystone user password"),
|
||||
secret=True),
|
||||
cfg.StrOpt("user_id", default="",
|
||||
help=_("Keystone user ID"), secret=True),
|
||||
cfg.StrOpt("token", default="", help=_("keystone auth token"),
|
||||
secret=True),
|
||||
cfg.StrOpt("endpoint", default="",
|
||||
help=_("keystone endpoint URI")),
|
||||
cfg.StrOpt("domain_name", default="",
|
||||
help=_("keystone domain name")),
|
||||
cfg.StrOpt("project_id", default="",
|
||||
help=_("keystone project id")),
|
||||
cfg.StrOpt("project_name", default="",
|
||||
help=_("keystone project name")),
|
||||
cfg.StrOpt("domain_id", default="",
|
||||
help=_("keystone domain id")),
|
||||
cfg.StrOpt("tenant_name", default="",
|
||||
help=_("keystone tenant name")),
|
||||
cfg.StrOpt("tenant_id", default="",
|
||||
help=_("keystone tenant id")),
|
||||
cfg.StrOpt("serialize_format", default="json",
|
||||
help=_("Type of request body")),
|
||||
cfg.StrOpt("deserialize_format", default="json",
|
||||
help=_("Type of response body")),
|
||||
cfg.IntOpt("token_ttl", default=1800,
|
||||
help=_("Time to live for token in seconds"))
|
||||
|
||||
]
|
||||
|
||||
|
||||
def list_test_opts():
|
||||
return [
|
||||
cfg.FloatOpt("length_diff_percent", default=1000.0,
|
||||
help=_(
|
||||
"Percentage difference between initial request "
|
||||
"and test request body length to trigger a signal")),
|
||||
cfg.FloatOpt("time_diff_percent", default=1000.0,
|
||||
help=_(
|
||||
"Percentage difference between initial response "
|
||||
"time and test response time to trigger a signal")),
|
||||
cfg.IntOpt("max_time", default=10,
|
||||
help=_(
|
||||
"Maximum absolute time (in seconds) to wait for a "
|
||||
"response before triggering a timeout signal")),
|
||||
cfg.IntOpt("max_length", default=500,
|
||||
help=_(
|
||||
"Maximum length (in characters) of the response text")),
|
||||
cfg.ListOpt("failure_keys", default="[`syntax error`]",
|
||||
help=_(
|
||||
"Comma seperated list of keys for which the test "
|
||||
"would fail."))
|
||||
]
|
||||
|
||||
|
||||
def list_logger_opts():
|
||||
# TODO(unrahul): Add log formating and verbosity options
|
||||
return [
|
||||
cfg.BoolOpt("http_request_compression", default=True,
|
||||
help=_(
|
||||
"Request content compression to compress fuzz "
|
||||
"strings present in the http request content.")),
|
||||
cfg.StrOpt("log_dir", default="",
|
||||
sample_default="~/.syntribos/logs",
|
||||
help=_(
|
||||
"Where to save debug log files for a syntribos run"
|
||||
))
|
||||
]
|
||||
|
||||
|
||||
def list_remote_opts():
|
||||
"""Method defining remote URIs for payloads and templates."""
|
||||
return [
|
||||
cfg.StrOpt(
|
||||
"cache_dir",
|
||||
default="",
|
||||
help=_("Base directory where cached files can be saved")),
|
||||
cfg.StrOpt(
|
||||
"payloads_uri",
|
||||
default=("https://github.com/openstack/syntribos-payloads/"
|
||||
"archive/master.tar.gz"),
|
||||
help=_("Remote URI to download payloads.")),
|
||||
cfg.StrOpt(
|
||||
"templates_uri",
|
||||
default=("https://github.com/openstack/"
|
||||
"syntribos-openstack-templates/archive/master.tar.gz"),
|
||||
help=_("Remote URI to download templates.")),
|
||||
cfg.BoolOpt("enable_cache", default=True,
|
||||
help=_(
|
||||
"Cache remote template & payload resources locally")),
|
||||
]
|
@ -1,19 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
SEP = "=" * 126
|
||||
RANKING = ['UNDEFINED', 'LOW', 'MEDIUM', 'HIGH']
|
||||
RANKING_VALUES = {'UNDEFINED': 0, 'LOW': 1, 'MEDIUM': 2, 'HIGH': 3}
|
||||
for rank in RANKING_VALUES:
|
||||
globals()[rank] = RANKING_VALUES[rank]
|
@ -1,27 +0,0 @@
|
||||
# Copyright 2018 Rackspace
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import base64
|
||||
import logging
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def basic_auth(user_section='user'):
|
||||
password = CONF.get(user_section).password or CONF.user.password
|
||||
username = CONF.get(user_section).username or CONF.user.username
|
||||
encoded_creds = base64.b64encode(
|
||||
"{}:{}".format(username, password).encode())
|
||||
return "Basic %s" % encoded_creds.decode()
|
@ -1,101 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import random
|
||||
import string
|
||||
|
||||
from cinderclient.v2.client import Client
|
||||
from keystoneauth1 import identity
|
||||
from keystoneauth1 import session
|
||||
from oslo_config import cfg
|
||||
|
||||
from syntribos.utils.memoize import memoize
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def _get_client():
|
||||
"""Returns a v2 cinder client object."""
|
||||
auth_url = CONF.user.endpoint
|
||||
if auth_url.endswith("/v3/"):
|
||||
auth_url = auth_url[-1]
|
||||
elif auth_url.endswith("/v3"):
|
||||
pass
|
||||
else:
|
||||
auth_url = "{}/v3".format(auth_url)
|
||||
auth = identity.v3.Password(auth_url=auth_url,
|
||||
project_name=CONF.user.project_name,
|
||||
project_domain_name=CONF.user.domain_name,
|
||||
user_domain_name=CONF.user.domain_name,
|
||||
username=CONF.user.username,
|
||||
password=CONF.user.password)
|
||||
return Client("2", session=session.Session(auth=auth))
|
||||
|
||||
|
||||
def create_volume(conn):
|
||||
volume = conn.volumes.create(name="sample_vol", size=1)
|
||||
return volume.id
|
||||
|
||||
|
||||
def list_volume_ids(conn):
|
||||
return [volume.id for volume in conn.volumes.list()]
|
||||
|
||||
|
||||
def create_volume_type(conn):
|
||||
vname = "".join(random.choice(string.ascii_lowercase) for _ in range(10))
|
||||
vtype = conn.volume_types.create(vname, "A new type of volume",
|
||||
is_public=True)
|
||||
return vtype.id
|
||||
|
||||
|
||||
def list_volume_type_ids(conn):
|
||||
return [volume.id for volume in conn.volume_types.list()]
|
||||
|
||||
|
||||
def create_snapshot(conn):
|
||||
volume_id = get_volume_id()
|
||||
snap_name = "".join(
|
||||
random.choice(string.ascii_lowercase) for _ in range(10))
|
||||
snapshot = conn.volume_snapshots.create(
|
||||
volume_id, name=snap_name, description="Test snapshot")
|
||||
return snapshot.id
|
||||
|
||||
|
||||
def list_snapshot_ids(conn):
|
||||
return [snapshot.id for snapshot in conn.volume_snapshots.list()]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_volume_id(create=False):
|
||||
cinder_client = _get_client()
|
||||
volume_ids = list_volume_ids(cinder_client)
|
||||
if create or not volume_ids:
|
||||
volume_ids.append(create_volume(cinder_client))
|
||||
return volume_ids[-1]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_volume_type_id(create=False):
|
||||
cinder_client = _get_client()
|
||||
vtype_ids = list_volume_type_ids(cinder_client)
|
||||
if create or not vtype_ids:
|
||||
vtype_ids.append(create_volume_type(cinder_client))
|
||||
return vtype_ids[-1]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_snapshot_id(create=False):
|
||||
cinder_client = _get_client()
|
||||
snapshot_ids = list_snapshot_ids(cinder_client)
|
||||
if create or not snapshot_ids:
|
||||
snapshot_ids.append(create_snapshot(cinder_client))
|
||||
return snapshot_ids[-1]
|
@ -1,88 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import base64
|
||||
import datetime
|
||||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def hash_it(data, hash_type="sha256"):
|
||||
"""Returns hashed value of data."""
|
||||
if hash_type == "sha1":
|
||||
hash_obj = hashlib.sha1()
|
||||
elif hash_type == "md5":
|
||||
hash_obj = hashlib.md5()
|
||||
else:
|
||||
hash_obj = hashlib.sha256()
|
||||
try:
|
||||
hash_obj.update(data.encode())
|
||||
return hash_obj.hexdigest()
|
||||
except (TypeError, AttributeError) as e:
|
||||
LOG.error("Couldn't hash the data, exception raised: %s", e)
|
||||
return hash(data)
|
||||
|
||||
|
||||
def hmac_it(data, key, hash_type="sha256"):
|
||||
"""Returns HMAC based on the hash algorithm, data and key."""
|
||||
if hash_type == "md5":
|
||||
hash_obj = hashlib.md5
|
||||
elif hash_type == "sha1":
|
||||
hash_obj = hashlib.sha1
|
||||
else:
|
||||
hash_obj = hashlib.sha256
|
||||
try:
|
||||
h_digest = hmac.new(key.encode(), data.encode(), hash_obj)
|
||||
return h_digest.hexdigest()
|
||||
except (TypeError, AttributeError) as e:
|
||||
LOG.error("Couldn't hash the data, exception raised: %s", e)
|
||||
|
||||
|
||||
def epoch_time(offset=0):
|
||||
"""Returns time since epoch."""
|
||||
try:
|
||||
return time.time() - offset
|
||||
except TypeError as e:
|
||||
LOG.error("Couldn't reduce offset, %s, from epoch time, ex %s.",
|
||||
offset, e)
|
||||
return time.time()
|
||||
|
||||
|
||||
def utc_datetime():
|
||||
"""Returns utc date time."""
|
||||
epoch = epoch_time()
|
||||
ts = datetime.datetime.fromtimestamp(epoch).strftime("%Y-%m-%d %H:%M:%S")
|
||||
return ts
|
||||
|
||||
|
||||
def base64_encode(data):
|
||||
"""Returns base 64 encoded value of data."""
|
||||
try:
|
||||
data = base64.b64encode(data.encode())
|
||||
except TypeError as e:
|
||||
LOG.error("Couldn't encode data to base64: %s", e)
|
||||
return data
|
||||
|
||||
|
||||
def url_encode(url):
|
||||
"""Returns encoded URL."""
|
||||
try:
|
||||
return six.moves.urllib.parse.quote_plus(url)
|
||||
except TypeError as e:
|
||||
LOG.error("Couldn't encode the URL: %s", e)
|
||||
return url
|
@ -1,42 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from glanceclient.v2.client import Client
|
||||
from oslo_config import cfg
|
||||
|
||||
from syntribos.extensions.identity import client as id_client
|
||||
from syntribos.utils.memoize import memoize
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def _get_client():
|
||||
token = id_client.get_scoped_token_v3("user")
|
||||
return Client(endpoint=CONF.syntribos.endpoint, token=token)
|
||||
|
||||
|
||||
def create_image(conn):
|
||||
image = conn.images.create(name="sample_image")
|
||||
return image.id
|
||||
|
||||
|
||||
def list_image_ids(conn):
|
||||
return [image.id for image in conn.images.list()]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_image_id():
|
||||
glance_client = _get_client()
|
||||
image_ids = list_image_ids(glance_client)
|
||||
if not image_ids:
|
||||
image_ids.append(create_image(glance_client))
|
||||
return image_ids[-1]
|
@ -1,233 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
|
||||
from oslo_config import cfg
|
||||
from requests import RequestException as RequestException
|
||||
|
||||
from syntribos.clients.http.client import SynHTTPClient
|
||||
import syntribos.extensions.identity.models.v2 as v2
|
||||
import syntribos.extensions.identity.models.v3 as v3
|
||||
from syntribos.utils.memoize import memoize
|
||||
|
||||
logging.basicConfig(level=logging.CRITICAL)
|
||||
LOG = logging.getLogger(__name__)
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def authenticate_v2(url,
|
||||
username=None,
|
||||
password=None,
|
||||
tenant_name=None,
|
||||
tenant_id=None,
|
||||
scoped=False,
|
||||
serialize_format="json",
|
||||
deserialize_format="json"):
|
||||
"""Creates auth request body and sends it to the given v2 endpoint.
|
||||
|
||||
:param str username: OpenStack username
|
||||
:param str password: OpenStack password
|
||||
:param str tenant_name: Name of tenant to which the user belongs
|
||||
:param str tenant_id: Id of the tenant
|
||||
:param bool scoped: Flag to retrieve scoped/unscoped tokens
|
||||
:param str serialize_format: Request body format(json/xml)
|
||||
:param str deserialize_format: Response body format(json/xml)
|
||||
"""
|
||||
headers = {}
|
||||
kwargs = {}
|
||||
password_creds = None
|
||||
if url.endswith('/v2.0/'):
|
||||
url = '{0}tokens'.format(url)
|
||||
elif url.endswith('/v2.0'):
|
||||
url = '{0}/tokens'.format(url)
|
||||
else:
|
||||
url = '{0}/v2.0/tokens'.format(url)
|
||||
headers["Content-Type"] = "application/{0}".format(serialize_format)
|
||||
headers["Accept"] = "application/{0}".format(deserialize_format)
|
||||
kwargs["tenant_name"] = tenant_name
|
||||
kwargs["tenant_id"] = tenant_id
|
||||
password_creds = v2.PasswordCredentials(
|
||||
username=username, password=password)
|
||||
if scoped:
|
||||
request_entity = v2.Auth(
|
||||
tenant_name=tenant_name,
|
||||
tenant_id=tenant_id,
|
||||
password_creds=password_creds)
|
||||
else:
|
||||
request_entity = v2.Auth(password_creds=password_creds)
|
||||
data = request_entity.serialize(serialize_format)
|
||||
try:
|
||||
resp, _ = SynHTTPClient().request(
|
||||
"POST", url, headers=headers, data=data, sanitize=True)
|
||||
r = resp.json()
|
||||
except RequestException as e:
|
||||
LOG.debug(e)
|
||||
else:
|
||||
if not r:
|
||||
raise Exception("Failed to authenticate")
|
||||
|
||||
if r['access'] is None:
|
||||
raise Exception("Failed to parse Auth response Body")
|
||||
return r['access']
|
||||
|
||||
|
||||
def authenticate_v2_config(user_section, scoped=False):
|
||||
"""Verifies minimum requirement for v2 auth."""
|
||||
endpoint = CONF.get(user_section).endpoint or CONF.user.endpoint
|
||||
password = CONF.get(user_section).password or CONF.user.password
|
||||
if not endpoint or not password:
|
||||
msg = "Required config parameters not present: {0}".format(
|
||||
[x for x in [endpoint, password] if not x])
|
||||
raise KeyError(msg)
|
||||
|
||||
return authenticate_v2(
|
||||
url=endpoint,
|
||||
username=CONF.get(user_section).username or CONF.user.username,
|
||||
password=password,
|
||||
tenant_name=CONF.get(user_section).tenant_name or
|
||||
CONF.user.tenant_name,
|
||||
tenant_id=CONF.get(user_section).tenant_id or CONF.user.tenant_id,
|
||||
scoped=scoped)
|
||||
|
||||
|
||||
@memoize
|
||||
def get_token_v2(user_section='user'):
|
||||
"""Returns unscoped v2 token."""
|
||||
access_data = authenticate_v2_config(user_section)
|
||||
return access_data['token']['id']
|
||||
|
||||
|
||||
@memoize
|
||||
def get_scoped_token_v2(user_section='user'):
|
||||
"""Returns scoped v2 token."""
|
||||
access_data = authenticate_v2_config(user_section, scoped=True)
|
||||
return access_data['token']['id']
|
||||
|
||||
|
||||
@memoize
|
||||
def get_tenant_id_v2(user_section='user'):
|
||||
"""Returns a tenant ID."""
|
||||
r = authenticate_v2_config(user_section, scoped=True)
|
||||
return r.json()["token"]["tenant"]["id"]
|
||||
|
||||
|
||||
def authenticate_v3(url,
|
||||
username=None,
|
||||
password=None,
|
||||
user_id=None,
|
||||
domain_id=None,
|
||||
domain_name=None,
|
||||
token=None,
|
||||
project_name=None,
|
||||
project_id=None,
|
||||
scoped=False,
|
||||
serialize_format="json",
|
||||
deserialize_format="json"):
|
||||
"""Creates auth request body and sends it to the given v3 endpoint.
|
||||
|
||||
:param str username: OpenStack username
|
||||
:param str password: OpenStack password
|
||||
:param str user_id: Id of the user
|
||||
:param str domain_name: Name of Domain the user belongs to
|
||||
:param str domain_id: Id of the domain
|
||||
:param str token: An auth token
|
||||
:param str project_name: Name of the project user is part of
|
||||
:param str project_id: Id of the project
|
||||
:param bool scoped: Flag to retrieve scoped/unscoped tokens
|
||||
:param str serialize_format: Request body format(json/xml)
|
||||
:param str deserialize_format: Response body format(json/xml)
|
||||
"""
|
||||
headers = {}
|
||||
kwargs = {}
|
||||
if url.endswith('/v3/'):
|
||||
url = '{0}auth/tokens'.format(url)
|
||||
elif url.endswith('/v3'):
|
||||
url = '{0}/auth/tokens'.format(url)
|
||||
else:
|
||||
url = '{0}/v3/auth/tokens'.format(url)
|
||||
headers["Content-Type"] = "application/json"
|
||||
headers["Accept"] = "application/json"
|
||||
if user_id:
|
||||
domain = None
|
||||
username = None
|
||||
else:
|
||||
domain = v3.Domain(name=domain_name, id_=domain_id)
|
||||
password = v3.Password(user=v3.User(
|
||||
name=username, password=password, id_=user_id, domain=domain))
|
||||
if token:
|
||||
kwargs = {"token": v3.Token(id_=token), "methods": ["token"]}
|
||||
else:
|
||||
kwargs = {"password": password, "methods": ["password"]}
|
||||
if scoped:
|
||||
if project_id:
|
||||
project_name = None
|
||||
domain = None
|
||||
elif domain is None:
|
||||
domain = v3.Domain(name=domain_name, id_=domain_id)
|
||||
project = v3.Project(name=project_name, id_=project_id, domain=domain)
|
||||
scope = v3.Scope(project=project, domain=domain)
|
||||
else:
|
||||
scope = None
|
||||
request_entity = v3.Auth(identity=v3.Identity(**kwargs), scope=scope)
|
||||
data = request_entity.serialize(serialize_format)
|
||||
try:
|
||||
r, _ = SynHTTPClient().request(
|
||||
"POST", url, headers=headers, data=data, sanitize=True)
|
||||
except RequestException as e:
|
||||
LOG.critical(e)
|
||||
else:
|
||||
if not r:
|
||||
raise Exception("Failed to authenticate")
|
||||
return r
|
||||
|
||||
|
||||
def authenticate_v3_config(user_section, scoped=False):
|
||||
"""Verifies minimum requirement for v3 auth."""
|
||||
endpoint = CONF.get(user_section).endpoint or CONF.user.endpoint
|
||||
if not endpoint:
|
||||
raise KeyError("Required config parameters not present: endpoint")
|
||||
return authenticate_v3(
|
||||
url=endpoint,
|
||||
username=CONF.get(user_section).username or CONF.user.username,
|
||||
password=CONF.get(user_section).password or CONF.user.password,
|
||||
user_id=CONF.get(user_section).user_id or CONF.user.user_id,
|
||||
domain_id=CONF.get(user_section).domain_id or CONF.user.domain_id,
|
||||
domain_name=CONF.get(user_section).domain_name or
|
||||
CONF.user.domain_name,
|
||||
token=CONF.get(user_section).token or CONF.user.token,
|
||||
project_name=CONF.get(user_section).project_name or
|
||||
CONF.user.project_name,
|
||||
project_id=CONF.get(user_section).project_id or CONF.user.project_id,
|
||||
scoped=scoped)
|
||||
|
||||
|
||||
@memoize
|
||||
def get_token_v3(user_section='user'):
|
||||
"""Returns an unscoped v3 token."""
|
||||
r = authenticate_v3_config(user_section)
|
||||
return r.headers["X-Subject-Token"]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_scoped_token_v3(user_section='user'):
|
||||
"""Returns a scoped v3 token."""
|
||||
r = authenticate_v3_config(user_section, scoped=True)
|
||||
return r.headers["X-Subject-Token"]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_project_id_v3(user_section='user'):
|
||||
"""Returns a project ID."""
|
||||
r = authenticate_v3_config(user_section, scoped=True)
|
||||
return r.json()["token"]["project"]["id"]
|
@ -1,226 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import json
|
||||
import logging
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
|
||||
class Namespaces(object):
|
||||
XMLNS_XSI = "http://www.w3.org/2001/XMLSchema-instance"
|
||||
XMLNS = "http://docs.openstack.org/identity/api/v2.0"
|
||||
|
||||
|
||||
class BaseIdentityModel(object):
|
||||
_namespaces = Namespaces
|
||||
|
||||
def __init__(self, kwargs):
|
||||
super(BaseIdentityModel, self).__init__()
|
||||
self._log = logging.getLogger(__name__)
|
||||
for k, v in kwargs.items():
|
||||
if k != "self" and not k.startswith("_"):
|
||||
setattr(self, k, v)
|
||||
|
||||
def serialize(self, format_type):
|
||||
try:
|
||||
serialize_method = '_obj_to_{0}'.format(format_type)
|
||||
return getattr(self, serialize_method)()
|
||||
except Exception as serialization_exception:
|
||||
self._log.error(
|
||||
'Error occured during serialization of a data model into'
|
||||
'the "%s: \n%s" format',
|
||||
format_type, serialization_exception)
|
||||
self._log.exception(serialization_exception)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serialized_str, format_type):
|
||||
if serialized_str and len(serialized_str) > 0:
|
||||
try:
|
||||
deserialize_method = '_{0}_to_obj'.format(format_type)
|
||||
return getattr(cls, deserialize_method)(serialized_str)
|
||||
except Exception as deserialization_exception:
|
||||
cls._log.exception(deserialization_exception)
|
||||
cls._log.debug(
|
||||
"Deserialization Error: Attempted to deserialize type"
|
||||
" using type: {0}".format(format_type.decode(
|
||||
encoding='UTF-8', errors='ignore')))
|
||||
cls._log.debug(
|
||||
"Deserialization Error: Unable to deserialize the "
|
||||
"following:\n{0}".format(serialized_str.decode(
|
||||
encoding='UTF-8', errors='ignore')))
|
||||
|
||||
@classmethod
|
||||
def _remove_xml_namespaces(cls, element):
|
||||
"""Prunes namespaces from XML element
|
||||
|
||||
:param element: element to be trimmed
|
||||
:returns: element with namespaces trimmed
|
||||
:rtype: :class:`xml.etree.ElementTree.Element`
|
||||
"""
|
||||
for key, value in vars(cls._namespaces).items():
|
||||
if key.startswith("__"):
|
||||
continue
|
||||
element = cls._remove_xml_etree_namespace(element, value)
|
||||
return element
|
||||
|
||||
@classmethod
|
||||
def _json_to_obj(cls, serialized_str):
|
||||
data_dict = json.loads(serialized_str, strict=False)
|
||||
return cls._dict_to_obj(data_dict)
|
||||
|
||||
@classmethod
|
||||
def _xml_to_obj(cls, serialized_str, encoding="iso-8859-2"):
|
||||
parser = ET.XMLParser(encoding=encoding)
|
||||
element = ET.fromstring(serialized_str, parser=parser)
|
||||
return cls._xml_ele_to_obj(cls._remove_xml_namespaces(element))
|
||||
|
||||
def _obj_to_json(self):
|
||||
return json.dumps(self._obj_to_dict())
|
||||
|
||||
def _obj_to_xml(self):
|
||||
element = self._obj_to_xml_ele()
|
||||
element.attrib["xmlns"] = self._namespaces.XMLNS
|
||||
return ET.tostring(element)
|
||||
|
||||
# These next two functions must be defined by the child classes before
|
||||
# serializing
|
||||
def _obj_to_dict(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def _obj_to_xml_ele(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def _find(element, tag):
|
||||
"""Finds element with tag
|
||||
|
||||
:param element: :class:`xml.etree.ElementTree.Element`, the element
|
||||
through which to start searching
|
||||
:param tag: the tag to search for
|
||||
:returns: The element with tag `tag` if found, or a new element with
|
||||
tag None if not found
|
||||
:rtype: :class:`xml.etree.ElementTree.Element`
|
||||
"""
|
||||
if element is None:
|
||||
return ET.Element(None)
|
||||
new_element = element.find(tag)
|
||||
if new_element is None:
|
||||
return ET.Element(None)
|
||||
return new_element
|
||||
|
||||
@staticmethod
|
||||
def _build_list_model(data, field_name, model):
|
||||
"""Builds list of python objects from XML or json data
|
||||
|
||||
If data type is json, will find all json objects with `field_name` as
|
||||
key, and convert them into python objects of type `model`.
|
||||
If XML, will find all :class:`xml.etree.ElementTree.Element` with
|
||||
`field_name` as tag, and convert them into python objects of type
|
||||
`model`
|
||||
|
||||
:param data: Either json or XML object
|
||||
:param str field_name: json key or XML tag
|
||||
:param model: Class of objects to be returned
|
||||
:returns: list of `model` objects
|
||||
:rtype: `list`
|
||||
"""
|
||||
if data is None:
|
||||
return []
|
||||
if isinstance(data, dict):
|
||||
if data.get(field_name) is None:
|
||||
return []
|
||||
return [model._dict_to_obj(tmp) for tmp in data.get(field_name)]
|
||||
return [model._xml_ele_to_obj(tmp) for tmp in data.findall(field_name)]
|
||||
|
||||
@staticmethod
|
||||
def _build_list(items, element=None):
|
||||
"""Builds json object or xml element from model
|
||||
|
||||
Calls either :func:`item._obj_to_dict` or
|
||||
:func:`item.obj_to_xml_ele` on all objects in `items`, and either
|
||||
returns the dict objects as a list or appends `items` to `element`
|
||||
|
||||
:param items: list of objects for conversion
|
||||
:param element: The element to be appended, or None if json
|
||||
:returns: list of dicts if `element` is None or `element` otherwise.
|
||||
"""
|
||||
if element is None:
|
||||
if items is None:
|
||||
return []
|
||||
return [item._obj_to_dict() for item in items]
|
||||
else:
|
||||
if items is None:
|
||||
return element
|
||||
for item in items:
|
||||
element.append(item._obj_to_xml_ele())
|
||||
return element
|
||||
|
||||
@staticmethod
|
||||
def _create_text_element(name, text):
|
||||
"""Creates element with text data
|
||||
|
||||
:returns: new element with name `name` and text `text`
|
||||
:rtype: :class:`xml.etree.ElementTree.Element`
|
||||
"""
|
||||
element = ET.Element(name)
|
||||
if text is True or text is False:
|
||||
element.text = str(text).lower()
|
||||
elif text is None:
|
||||
return ET.Element(None)
|
||||
else:
|
||||
element.text = str(text)
|
||||
return element
|
||||
|
||||
def __ne__(self, obj):
|
||||
return not self.__eq__(obj)
|
||||
|
||||
@classmethod
|
||||
def _remove_empty_values(cls, data):
|
||||
"""Remove empty values
|
||||
|
||||
Returns a new dictionary based on 'dictionary', minus any keys with
|
||||
values that evaluate to False.
|
||||
|
||||
:param dict data: Dictionary to be pruned
|
||||
:returns: dictionary without empty values
|
||||
:rtype: `dict`
|
||||
"""
|
||||
if isinstance(data, dict):
|
||||
return dict(
|
||||
(k, v) for k, v in data.items() if v not in (
|
||||
[], {}, None))
|
||||
elif isinstance(data, ET.Element):
|
||||
if data.attrib:
|
||||
data.attrib = cls._remove_empty_values(data.attrib)
|
||||
data._children = [
|
||||
c for c in data._children if c.tag is not None and (
|
||||
c.attrib or c.text is not None or c._children)]
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def _get_sub_model(model, json=True):
|
||||
"""Converts object to json or XML
|
||||
|
||||
:param model: Object to convert
|
||||
:param boolean json: True if converting to json, false if XML
|
||||
"""
|
||||
if json:
|
||||
if model is not None:
|
||||
return model._obj_to_dict()
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
if model is not None:
|
||||
return model._obj_to_xml_ele()
|
||||
else:
|
||||
return ET.Element(None)
|
@ -1,242 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import json
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
import syntribos.extensions.identity.models.base
|
||||
|
||||
|
||||
class AuthResponse(
|
||||
syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
def __init__(self,
|
||||
token=None,
|
||||
service_catalog=None,
|
||||
user=None,
|
||||
metadata=None):
|
||||
super(AuthResponse, self).__init__(locals())
|
||||
|
||||
@classmethod
|
||||
def _dict_to_obj(cls, data):
|
||||
return cls(token=Token._dict_to_obj(data.get('token')),
|
||||
metadata=Metadata._dict_to_obj(data.get('metadata')),
|
||||
user=User._dict_to_obj(data.get('user')),
|
||||
service_catalog=cls._build_list_model(
|
||||
data, "serviceCatalog", Service))
|
||||
|
||||
@classmethod
|
||||
def _json_to_obj(cls, serialized_str):
|
||||
data_dict = json.loads(serialized_str)
|
||||
return cls._dict_to_obj(data_dict.get("access"))
|
||||
|
||||
@classmethod
|
||||
def _xml_ele_to_obj(cls, data):
|
||||
return cls(
|
||||
service_catalog=cls._build_list_model(
|
||||
cls._find(data, "serviceCatalog"), "service", Service),
|
||||
token_model=Token._xml_ele_to_obj(cls._find(data, "token")),
|
||||
user_model=User._xml_ele_to_obj(cls._find(data, "user")))
|
||||
|
||||
def get_service(self, name):
|
||||
for service in self.service_catalog:
|
||||
if service.name == name:
|
||||
return service
|
||||
return None
|
||||
|
||||
|
||||
class Metadata(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
@classmethod
|
||||
def _dict_to_obj(cls, data):
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def _xml_ele_to_obj(cls, data):
|
||||
return data.attrib
|
||||
|
||||
|
||||
class Tenant(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
def __init__(self, enabled=None, description=None, name=None, id_=None):
|
||||
super(Tenant, self).__init__(locals())
|
||||
|
||||
@classmethod
|
||||
def _xml_ele_to_obj(cls, data):
|
||||
description = data.findtext('description')
|
||||
return cls(name=data.attrib.get("name"),
|
||||
id_=data.attrib.get("id"),
|
||||
enabled=True
|
||||
if data.attrib.get('enabled') == "true" else False,
|
||||
description=description)
|
||||
|
||||
@classmethod
|
||||
def _dict_to_obj(cls, data_dict):
|
||||
return cls(description=data_dict.get('description'),
|
||||
enabled=data_dict.get('enabled'),
|
||||
id_=data_dict.get('id'),
|
||||
name=data_dict.get('name'))
|
||||
|
||||
|
||||
class Token(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
def __init__(self, id_=None, issued_at=None, expires=None, tenant=None):
|
||||
super(Token, self).__init__(locals())
|
||||
|
||||
@classmethod
|
||||
def _dict_to_obj(cls, data):
|
||||
if data is None:
|
||||
return None
|
||||
return cls(id_=data.get('id'),
|
||||
expires=data.get('expires'),
|
||||
issued_at=data.get('issued_at'),
|
||||
tenant=Tenant._dict_to_obj(data.get('tenant', {})))
|
||||
|
||||
@classmethod
|
||||
def _xml_ele_to_obj(cls, data):
|
||||
return cls(id_=data.attrib.get('id'),
|
||||
expires=data.attrib.get('expires'),
|
||||
issued_at=data.attrib.get('issued_at'),
|
||||
tenant=Tenant._xml_ele_to_obj(data.find('tenant')))
|
||||
|
||||
|
||||
class User(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
def __init__(self, id_=None, name=None, username=None, roles=None):
|
||||
super(User, self).__init__(locals())
|
||||
|
||||
@classmethod
|
||||
def _dict_to_obj(cls, data):
|
||||
return cls(id_=data.get('id'),
|
||||
name=data.get('name'),
|
||||
username=data.get('username'),
|
||||
roles=cls._build_list_model(data, "roles", Role))
|
||||
|
||||
@classmethod
|
||||
def _xml_ele_to_obj(cls, data):
|
||||
return cls(id_=data.attrib.get('id'),
|
||||
name=data.attrib.get('name'),
|
||||
username=data.attrib.get('username'),
|
||||
roles=cls._build_list_model(
|
||||
cls._find(data, "roles"), "role", Role))
|
||||
|
||||
|
||||
class Service(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
def __init__(self, endpoints=None, name=None, type_=None):
|
||||
super(Service, self).__init__(locals())
|
||||
|
||||
@classmethod
|
||||
def _dict_to_obj(cls, data):
|
||||
return cls(
|
||||
endpoints=cls._build_list_model(data, "endpoints", Endpoint),
|
||||
name=data.get("name"),
|
||||
type_=data.get("type"))
|
||||
|
||||
@classmethod
|
||||
def _xml_ele_to_obj(cls, data):
|
||||
return cls(endpoints=cls._build_list_model(data, "endpoint", Endpoint),
|
||||
name=data.attrib.get("name"),
|
||||
type_=data.attrib.get("type"))
|
||||
|
||||
|
||||
class Endpoint(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
def __init__(self,
|
||||
region=None,
|
||||
id_=None,
|
||||
public_url=None,
|
||||
admin_url=None,
|
||||
internal_url=None,
|
||||
private_url=None,
|
||||
version_id=None,
|
||||
version_info=None,
|
||||
version_list=None):
|
||||
super(Endpoint, self).__init__(locals())
|
||||
|
||||
@classmethod
|
||||
def _dict_to_obj(cls, data):
|
||||
return cls(region=data.get('region'),
|
||||
id_=data.get('Id'),
|
||||
public_url=data.get('publicURL'),
|
||||
private_url=data.get('privateURL'),
|
||||
admin_url=data.get('adminURL'),
|
||||
internal_url=data.get('internalURL'),
|
||||
version_id=data.get('versionId'),
|
||||
version_info=data.get('versionInfo'),
|
||||
version_list=data.get('versionList'))
|
||||
|
||||
@classmethod
|
||||
def _xml_ele_to_obj(cls, ele):
|
||||
return cls(region=ele.attrib.get('region'),
|
||||
id_=ele.attrib.get('Id'),
|
||||
public_url=ele.attrib.get('publicURL'),
|
||||
private_url=ele.attrib.get('privateURL'),
|
||||
admin_url=ele.attrib.get('adminURL'),
|
||||
internal_url=ele.attrib.get('internalURL'),
|
||||
version_id=ele.attrib.get('versionId'),
|
||||
version_info=ele.attrib.get('versionInfo'),
|
||||
version_list=ele.attrib.get('versionList'))
|
||||
|
||||
|
||||
class Role(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
def __init__(self,
|
||||
id_=None,
|
||||
name=None,
|
||||
description=None,
|
||||
tenant_id=None,
|
||||
service_id=None):
|
||||
super(Role, self).__init__(locals())
|
||||
|
||||
@classmethod
|
||||
def _xml_ele_to_obj(cls, element):
|
||||
if element is None:
|
||||
return None
|
||||
return cls(id_=element.attrib.get("id"),
|
||||
name=element.attrib.get("name"),
|
||||
description=element.attrib.get("description"))
|
||||
|
||||
@classmethod
|
||||
def _dict_to_obj(cls, data):
|
||||
if data is None:
|
||||
return None
|
||||
return cls(id_=data.get("id"),
|
||||
name=data.get("name"),
|
||||
description=data.get("description"))
|
||||
|
||||
|
||||
class Auth(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
def __init__(self, password_creds=None, tenant_id=None, tenant_name=None):
|
||||
super(Auth, self).__init__(locals())
|
||||
|
||||
def _obj_to_dict(self):
|
||||
dic = {}
|
||||
dic["passwordCredentials"] = self._get_sub_model(self.password_creds)
|
||||
dic["tenantId"] = self.tenant_id
|
||||
dic["tenantName"] = self.tenant_name
|
||||
return {"auth": self._remove_empty_values(dic)}
|
||||
|
||||
def _obj_to_xml_ele(self):
|
||||
ele = ET.Element("auth")
|
||||
ele.append(self._get_sub_model(self.password_creds, False))
|
||||
ele.attrib["tenantId"] = self.tenant_id
|
||||
return self._remove_empty_values(ele)
|
||||
|
||||
|
||||
class PasswordCredentials(
|
||||
syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
def __init__(self, username=None, password=None):
|
||||
super(PasswordCredentials, self).__init__(locals())
|
||||
|
||||
def _obj_to_dict(self):
|
||||
dic = {"username": self.username, "password": self.password}
|
||||
return self._remove_empty_values(dic)
|
||||
|
||||
def _obj_to_xml_ele(self):
|
||||
ele = ET.Element("passwordCredentials")
|
||||
ele.attrib["username"] = self.username
|
||||
ele.attrib["password"] = self.password
|
||||
return self._remove_empty_values(ele)
|
@ -1,103 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import syntribos.extensions.identity.models.base
|
||||
|
||||
|
||||
class Auth(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
|
||||
def __init__(
|
||||
self, identity=None, scope=None):
|
||||
super(Auth, self).__init__(locals())
|
||||
|
||||
def _obj_to_dict(self):
|
||||
return {"auth": self._remove_empty_values({
|
||||
"identity": self._get_sub_model(self.identity),
|
||||
"scope": self._get_sub_model(self.scope)})}
|
||||
|
||||
|
||||
class Identity(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
|
||||
def __init__(self, token=None, password=None, methods=None):
|
||||
super(Identity, self).__init__(locals())
|
||||
|
||||
def _obj_to_dict(self):
|
||||
return self._remove_empty_values({
|
||||
"methods": self.methods or [],
|
||||
"password": self._get_sub_model(self.password),
|
||||
"token": self._get_sub_model(self.token)})
|
||||
|
||||
|
||||
class Password(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
|
||||
def __init__(self, user=None):
|
||||
super(Password, self).__init__(locals())
|
||||
|
||||
def _obj_to_dict(self):
|
||||
return self._remove_empty_values({
|
||||
"user": self._get_sub_model(self.user)})
|
||||
|
||||
|
||||
class User(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
|
||||
def __init__(self, id_=None, password=None, name=None, domain=None):
|
||||
super(User, self).__init__(locals())
|
||||
|
||||
def _obj_to_dict(self):
|
||||
return self._remove_empty_values({
|
||||
"id": self.id_,
|
||||
"password": self.password,
|
||||
"name": self.name,
|
||||
"domain": self._get_sub_model(self.domain)})
|
||||
|
||||
|
||||
class Token(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
|
||||
def __init__(self, id_=None):
|
||||
super(Token, self).__init__(locals())
|
||||
|
||||
def _obj_to_dict(self):
|
||||
return self._remove_empty_values({"id": self.id_})
|
||||
|
||||
|
||||
class Scope(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
|
||||
def __init__(self, project=None, domain=None):
|
||||
super(Scope, self).__init__(locals())
|
||||
|
||||
def _obj_to_dict(self):
|
||||
return self._remove_empty_values({
|
||||
"project": self._get_sub_model(self.project)})
|
||||
|
||||
|
||||
class Domain(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
|
||||
def __init__(self, name=None, id_=None):
|
||||
super(Domain, self).__init__(locals())
|
||||
|
||||
def _obj_to_dict(self):
|
||||
return self._remove_empty_values({
|
||||
"name": self.name,
|
||||
"id": self.id_})
|
||||
|
||||
|
||||
class Project(syntribos.extensions.identity.models.base.BaseIdentityModel):
|
||||
|
||||
def __init__(self, name=None, id_=None, domain=None):
|
||||
super(Project, self).__init__(locals())
|
||||
|
||||
def _obj_to_dict(self):
|
||||
return self._remove_empty_values({
|
||||
"name": self.name,
|
||||
"id": self.id_,
|
||||
"domain": self._get_sub_model(self.domain)})
|
@ -1,145 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from neutronclient.v2_0.client import Client
|
||||
from oslo_config import cfg
|
||||
|
||||
from syntribos.extensions.identity import client as id_client
|
||||
from syntribos.utils.memoize import memoize
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def _get_client():
|
||||
token = id_client.get_scoped_token_v3("user")
|
||||
return Client(endpoint=CONF.syntribos.endpoint, token=token)
|
||||
|
||||
|
||||
def create_network(conn):
|
||||
data = {"name": "sample_network",
|
||||
"admin_state_up": True}
|
||||
return conn.create_network({"network": data})
|
||||
|
||||
|
||||
def list_network_ids(conn):
|
||||
return [network["id"] for network in conn.list_networks()["networks"]]
|
||||
|
||||
|
||||
def create_subnet(conn, network_id):
|
||||
data = {"name": "sample_subnet",
|
||||
"network_id": network_id,
|
||||
"ip_version": 4,
|
||||
"cidr": "11.0.3.0/24"}
|
||||
return conn.create_subnet({"subnet": data})
|
||||
|
||||
|
||||
def list_subnet_ids(conn):
|
||||
subnet_ids = [subnet["id"] for subnet in conn.list_subnets()["subnets"]]
|
||||
return subnet_ids
|
||||
|
||||
|
||||
def create_port(conn, network_id):
|
||||
data = {"network_id": network_id,
|
||||
"name": "sample_port",
|
||||
"admin_state_up": True}
|
||||
return conn.create_port({"port": data})
|
||||
|
||||
|
||||
def list_port_ids(conn):
|
||||
port_ids = [port["id"] for port in conn.list_ports()["ports"]]
|
||||
return port_ids
|
||||
|
||||
|
||||
def create_security_group(conn):
|
||||
data = {"name": "new_servers",
|
||||
"description": "security group for servers"}
|
||||
return conn.create_security_group({"security_group": data})
|
||||
|
||||
|
||||
def list_security_group_ids(conn):
|
||||
sec_gp_ids = [sg["id"] for sg in conn.list_security_groups(
|
||||
)["security_groups"]]
|
||||
return sec_gp_ids
|
||||
|
||||
|
||||
def create_router(conn, network_id, subnet_id):
|
||||
# The network_id should be of an external network
|
||||
data = {
|
||||
"name": "router1",
|
||||
"external_gateway_info": {
|
||||
"network_id": network_id,
|
||||
"enable_snat": True,
|
||||
"external_fixed_ips": [
|
||||
{
|
||||
"ip_address": "172.24.4.6",
|
||||
"subnet_id": subnet_id
|
||||
}
|
||||
]
|
||||
},
|
||||
"admin_state_up": True
|
||||
}
|
||||
return conn.create_router({"router": data})
|
||||
|
||||
|
||||
def list_router_ids(conn):
|
||||
router_ids = [router["id"] for router in conn.list_routers()["routers"]]
|
||||
return router_ids
|
||||
|
||||
|
||||
@memoize
|
||||
def get_port_id():
|
||||
neutron_client = _get_client()
|
||||
port_ids = list_port_ids(neutron_client)
|
||||
if not port_ids:
|
||||
network_id = get_network_id()
|
||||
port_ids.append(create_port(neutron_client, network_id)["id"])
|
||||
return port_ids[-1]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_network_id():
|
||||
neutron_client = _get_client()
|
||||
network_ids = list_network_ids(neutron_client)
|
||||
if len(network_ids) < 3:
|
||||
network_ids.append(create_network(neutron_client)["id"])
|
||||
return network_ids[-1]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_subnet_id():
|
||||
neutron_client = _get_client()
|
||||
subnet_ids = list_subnet_ids(neutron_client)
|
||||
if not subnet_ids:
|
||||
network_id = get_network_id()
|
||||
subnet_ids.append(create_subnet(neutron_client, network_id)["id"])
|
||||
return subnet_ids[-1]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_sec_group_id():
|
||||
neutron_client = _get_client()
|
||||
sg_ids = list_security_group_ids(neutron_client)
|
||||
if not sg_ids:
|
||||
sg_ids.append(create_security_group(neutron_client)["id"])
|
||||
return sg_ids[-1]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_router_id():
|
||||
neutron_client = _get_client()
|
||||
router_ids = list_router_ids(neutron_client)
|
||||
if not router_ids:
|
||||
network_id = get_network_id()
|
||||
subnet_id = get_subnet_id()
|
||||
router_ids.append(
|
||||
create_router(neutron_client, network_id, subnet_id)["id"])
|
||||
return router_ids[-1]
|
@ -1,167 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from glanceclient.v2.client import Client as GC
|
||||
from keystoneauth1.identity import v3
|
||||
from keystoneauth1 import session
|
||||
from novaclient.client import Client
|
||||
from oslo_config import cfg
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
from syntribos.extensions.identity import client as id_client
|
||||
from syntribos.utils.memoize import memoize
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def create_connection(auth_url=None,
|
||||
project_name=None,
|
||||
project_domain_name="default",
|
||||
user_domain_name="default",
|
||||
project_domain_id="default",
|
||||
user_domain_id="default",
|
||||
username=None,
|
||||
password=None):
|
||||
"""Method return a glance client."""
|
||||
|
||||
if auth_url.endswith("/v3/"):
|
||||
auth_url = auth_url[-1]
|
||||
elif auth_url.endswith("/v3"):
|
||||
pass
|
||||
else:
|
||||
auth_url = "{}/v3".format(auth_url)
|
||||
auth = v3.Password(auth_url=auth_url,
|
||||
project_name=project_name,
|
||||
project_domain_name=project_domain_name,
|
||||
user_domain_name=user_domain_name,
|
||||
project_domain_id=project_domain_id,
|
||||
user_domain_id=user_domain_id,
|
||||
username=username,
|
||||
password=password)
|
||||
return Client("2", auth_url=CONF.user.endpoint,
|
||||
session=session.Session(auth=auth))
|
||||
|
||||
|
||||
def _get_client():
|
||||
# Required to use keystone client in order for nova client to properly
|
||||
# discover service URL
|
||||
nova_client = create_connection(
|
||||
auth_url=CONF.user.endpoint,
|
||||
project_name=CONF.user.project_name,
|
||||
project_domain_name=CONF.user.domain_name,
|
||||
user_domain_name=CONF.user.domain_name,
|
||||
project_domain_id=CONF.user.domain_id,
|
||||
user_domain_id=CONF.user.domain_id,
|
||||
username=CONF.user.username,
|
||||
password=CONF.user.password)
|
||||
|
||||
return nova_client
|
||||
|
||||
|
||||
def list_hypervisor_ids(conn):
|
||||
return [hypervisor.id for hypervisor in conn.hypervisors.list()]
|
||||
|
||||
|
||||
def list_server_ids(conn):
|
||||
return [server.id for server in conn.servers.list()]
|
||||
|
||||
|
||||
def create_server(conn):
|
||||
token = id_client.get_scoped_token_v3("user")
|
||||
_url = urlparse.urlunparse(CONF.syntribos.endpoint)
|
||||
endpoint = urlparse.urlunparse(
|
||||
(_url.scheme,
|
||||
_url.hostname + ":9292",
|
||||
_url.path,
|
||||
_url.params,
|
||||
_url.query,
|
||||
_url.fragment))
|
||||
_gc = GC(endpoint=endpoint, token=token)
|
||||
image = _gc.images.get(get_image_id())
|
||||
flavor = conn.flavors.get(get_flavor_id())
|
||||
server = conn.servers.create(
|
||||
name="test", flavor=flavor, image=image)
|
||||
|
||||
return server.id
|
||||
|
||||
|
||||
def list_flavor_ids(conn):
|
||||
return [flavor.id for flavor in conn.flavors.list()]
|
||||
|
||||
|
||||
def create_flavor(conn):
|
||||
flavor = conn.flavors.create(
|
||||
name="test", ram=1, vcpus=1, disk=1)
|
||||
return flavor.id
|
||||
|
||||
|
||||
def list_aggregate_ids(conn):
|
||||
return [aggregate.id for aggregate in conn.aggregates.list()]
|
||||
|
||||
|
||||
def create_aggregate(conn):
|
||||
aggregate = conn.aggregates.create(
|
||||
name="test", availability_zone="test_zone")
|
||||
return aggregate.id
|
||||
|
||||
|
||||
@memoize
|
||||
def get_hypervisor_id():
|
||||
nova_client = _get_client()
|
||||
hypervisor_ids = list_hypervisor_ids(nova_client)
|
||||
return hypervisor_ids[-1]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_image_id():
|
||||
token = id_client.get_scoped_token_v3("user")
|
||||
_url = urlparse.urlparse(CONF.syntribos.endpoint)
|
||||
endpoint = urlparse.urlunparse(
|
||||
(_url.scheme,
|
||||
_url.hostname + ":9292",
|
||||
_url.path,
|
||||
_url.params,
|
||||
_url.query,
|
||||
_url.fragment))
|
||||
_gc = GC(endpoint=endpoint, token=token)
|
||||
image_ids = [image.id for image in _gc.images.list()]
|
||||
if not image_ids:
|
||||
image_ids.append(_gc.images.create(name="test"))
|
||||
|
||||
return image_ids[-1]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_server_id():
|
||||
nova_client = _get_client()
|
||||
server_ids = list_server_ids(nova_client)
|
||||
if not server_ids:
|
||||
server_ids.append(create_server(nova_client))
|
||||
return server_ids[-1]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_flavor_id():
|
||||
nova_client = _get_client()
|
||||
flavor_ids = list_flavor_ids(nova_client)
|
||||
if not flavor_ids:
|
||||
flavor_ids.append(create_flavor(nova_client))
|
||||
return flavor_ids[-1]
|
||||
|
||||
|
||||
@memoize
|
||||
def get_aggregate_id():
|
||||
nova_client = _get_client()
|
||||
aggregate_ids = list_aggregate_ids(nova_client)
|
||||
if not aggregate_ids:
|
||||
aggregate_ids.append(create_aggregate(nova_client))
|
||||
return aggregate_ids[-1]
|
@ -1,95 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import datetime
|
||||
import random
|
||||
import string
|
||||
import time
|
||||
import uuid
|
||||
|
||||
import six
|
||||
|
||||
|
||||
def get_uuid():
|
||||
"""Generates strings to use where random or unique data is required.
|
||||
|
||||
:returns: universally unique identifiers
|
||||
"""
|
||||
while True:
|
||||
random_data = str(uuid.uuid4())
|
||||
yield random_data
|
||||
|
||||
|
||||
def fake_port():
|
||||
return random.int(0, 65535)
|
||||
|
||||
|
||||
def fake_ip():
|
||||
return "{}:{}:{}:{}".format(random.randint(0, 255),
|
||||
random.randint(0, 255),
|
||||
random.randint(0, 255),
|
||||
random.randint(0, 255))
|
||||
|
||||
|
||||
def fake_mac():
|
||||
return "{:x}:{:x}:{:x}:{:x}:{:x}:{:x}".format(random.randint(0, 255),
|
||||
random.randint(0, 255),
|
||||
random.randint(0, 255),
|
||||
random.randint(0, 255),
|
||||
random.randint(0, 255),
|
||||
random.randint(0, 255),
|
||||
random.randint(0, 255))
|
||||
|
||||
|
||||
def random_port():
|
||||
while True:
|
||||
yield fake_port()
|
||||
|
||||
|
||||
def random_ip():
|
||||
while True:
|
||||
yield fake_ip()
|
||||
|
||||
|
||||
def random_mac():
|
||||
while True:
|
||||
yield fake_mac()
|
||||
|
||||
|
||||
def random_string(n=10, string_type="lower"):
|
||||
if string_type == "lower":
|
||||
string_type = string.ascii_lowercase
|
||||
elif string_type == "upper":
|
||||
string_type = string.ascii_uppercase
|
||||
else:
|
||||
string_type = string.ascii_letters
|
||||
while True:
|
||||
r = "".join(random.choice(string_type) for _ in range(n))
|
||||
yield r
|
||||
|
||||
|
||||
def random_integer(beg=0, end=1478029570):
|
||||
# The default value of end is a valid epoch time, this is done so that
|
||||
# random intger can then be used to generate random epoch as well.
|
||||
while True:
|
||||
yield random.randint(beg, end)
|
||||
|
||||
|
||||
def random_utc_datetime():
|
||||
"""Returns random utc date time."""
|
||||
while True:
|
||||
offset = six.next(random_integer())
|
||||
epoch = time.time() - offset
|
||||
ts = datetime.datetime.fromtimestamp(epoch).strftime(
|
||||
"%Y-%m-%d %H:%M:%S")
|
||||
yield ts
|
@ -1,34 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint: skip-file
|
||||
import json
|
||||
|
||||
|
||||
class JSONFormatter(object):
|
||||
|
||||
def __init__(self, results):
|
||||
self.results = results
|
||||
|
||||
def report(self, output):
|
||||
output = json.dumps(output, sort_keys=True, cls=SetEncoder,
|
||||
indent=2, separators=(',', ': '))
|
||||
|
||||
self.results.stream.write(output)
|
||||
|
||||
|
||||
class SetEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, set):
|
||||
return list(obj)
|
||||
return json.JSONEncoder.default(self, obj)
|
@ -1,119 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
class Issue(object):
|
||||
|
||||
"""Object that encapsulates a security vulnerability
|
||||
|
||||
This object is designed to hold the metadata associated with
|
||||
a vulnerability.
|
||||
|
||||
:ivar defect_type: The type of vulnerability that Syntribos believes it has
|
||||
found. This may be something like 500 error or DoS, regardless of what
|
||||
the Test Type is.
|
||||
:ivar severity: "Low", "Medium", or "High", depending on the defect
|
||||
:ivar description: Description of the defect
|
||||
:ivar confidence: The confidence of the defect
|
||||
:ivar request: The request object sent that generated this defect
|
||||
:ivar response: The response object returned after sending the request
|
||||
:ivar target: A hostname/IP/etc. to be tested
|
||||
:ivar path: A specific REST API method, i.e. a URL path associated with a
|
||||
Target.
|
||||
:ivar test_type: The type of vulnerability that is being tested for. This
|
||||
is not necessarily the same as the Defect Type, which may be something
|
||||
like 500 error or DoS.
|
||||
:ivar content_type: The content-type of the unmodified request
|
||||
:ivar impacted_parameter: For fuzz tests only, a
|
||||
:class:`syntribos.tests.fuzz.base_fuzz.ImpactedParameter` that holds
|
||||
data about what part of the request was affected by the fuzz test.
|
||||
"""
|
||||
|
||||
def __init__(self, defect_type, severity, description, confidence,
|
||||
request=None, response=None, impacted_parameter=None,
|
||||
init_signals=[], test_signals=[], diff_signals=[]):
|
||||
self.defect_type = defect_type
|
||||
self.severity = severity
|
||||
self.description = description
|
||||
self.confidence = confidence
|
||||
self.request = request
|
||||
self.response = response
|
||||
self.impacted_parameter = None
|
||||
self.init_signals = init_signals
|
||||
self.test_signals = test_signals
|
||||
self.diff_signals = diff_signals
|
||||
|
||||
def as_dict(self):
|
||||
"""Convert the issue to a dict of values for outputting.
|
||||
|
||||
:rtype: `dict`
|
||||
:returns: dictionary of issue data
|
||||
"""
|
||||
out = {
|
||||
'issue_target': self.target,
|
||||
'issue_path': self.path,
|
||||
'issue_defect_type': self.defect_type,
|
||||
'issue_test_type': self.test_type,
|
||||
'issue_severity': self.severity,
|
||||
'issue_description': self.text,
|
||||
'issue_confidence': self.confidence
|
||||
}
|
||||
|
||||
if self.impacted_parameter:
|
||||
out['impacted_parameter'] = self.impacted_parameter.as_dict()
|
||||
|
||||
return out
|
||||
|
||||
def get_details(self):
|
||||
"""Returns the most relevant information needed for output.
|
||||
|
||||
:rtype: `dict`
|
||||
:returns: dictionary of issue details
|
||||
"""
|
||||
return {
|
||||
'description': self.text,
|
||||
'confidence': self.confidence,
|
||||
'severity': self.severity
|
||||
}
|
||||
|
||||
def request_as_dict(self, req):
|
||||
"""Convert the request object to a dict of values for outputting.
|
||||
|
||||
:param req: The request object
|
||||
:rtype: `dict`
|
||||
:returns: dictionary of HTTP request data
|
||||
"""
|
||||
return {
|
||||
'url': req.path_url,
|
||||
'method': req.method,
|
||||
'headers': dict(req.headers),
|
||||
'body': req.body,
|
||||
'cookies': req._cookies.get_dict()
|
||||
}
|
||||
|
||||
def response_as_dict(self, res):
|
||||
"""Convert the response object to a dict of values for outputting.
|
||||
|
||||
:param res: The result object
|
||||
:rtype: `dict`
|
||||
:returns: dictionary of HTTP response data
|
||||
"""
|
||||
return {
|
||||
'status_code': res.status_code,
|
||||
'reason': res.reason,
|
||||
'url': res.url,
|
||||
'headers': dict(res.headers),
|
||||
'cookies': res.cookies.get_dict(),
|
||||
'text': res.text
|
||||
}
|
@ -1,279 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
import unittest
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
import syntribos
|
||||
from syntribos._i18n import _
|
||||
from syntribos.formatters.json_formatter import JSONFormatter
|
||||
import syntribos.utils.remotes
|
||||
|
||||
CONF = cfg.CONF
|
||||
lock = threading.Lock()
|
||||
|
||||
|
||||
class IssueTestResult(unittest.TextTestResult):
|
||||
"""Custom unnittest results holder class
|
||||
|
||||
This class aggregates :class:`syntribos.issue.Issue` objects from all the
|
||||
tests as they run
|
||||
"""
|
||||
raw_issues = []
|
||||
output = {"failures": {}, "errors": [], "stats": {}}
|
||||
output["stats"]["severity"] = {
|
||||
"UNDEFINED": 0,
|
||||
"LOW": 0,
|
||||
"MEDIUM": 0,
|
||||
"HIGH": 0
|
||||
}
|
||||
stats = {"errors": 0, "unique_failures": 0, "successes": 0}
|
||||
severity_counter_dict = {}
|
||||
testsRunSinceLastPrint = 0
|
||||
failure_id = 0
|
||||
|
||||
def addFailure(self, test, err):
|
||||
"""Adds issues to data structures
|
||||
|
||||
Appends issues to the result's list of failures, as well as updates the
|
||||
stats for the result. Each failure in the list of failures takes the
|
||||
form:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"url": "host.com/blah",
|
||||
"type": "500_error",
|
||||
"description": "500 errors r bad, mkay?",
|
||||
"failure_id": 1234,
|
||||
"instances": [
|
||||
{
|
||||
"confidence": "HIGH",
|
||||
"param": {
|
||||
"location": "headers",
|
||||
"method": "POST",
|
||||
"variables": [
|
||||
"Content-Type"
|
||||
]
|
||||
},
|
||||
"strings": [
|
||||
"derp"
|
||||
],
|
||||
"severity": "LOW",
|
||||
"signals": {
|
||||
"diff_signals": [],
|
||||
"init_signals": [],
|
||||
"test_signals": []
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
:param test: The test that has failed
|
||||
:type test: :class:`syntribos.tests.base.BaseTestCase`
|
||||
:param tuple err: Tuple of format ``(type, value, traceback)``
|
||||
"""
|
||||
lock.acquire()
|
||||
for issue in test.failures:
|
||||
self.raw_issues.append(issue)
|
||||
defect_type = issue.defect_type
|
||||
if any([
|
||||
True for x in CONF.syntribos.exclude_results
|
||||
if x and x in defect_type
|
||||
]):
|
||||
continue
|
||||
|
||||
min_sev = syntribos.RANKING_VALUES[CONF.min_severity]
|
||||
min_conf = syntribos.RANKING_VALUES[CONF.min_confidence]
|
||||
if issue.severity < min_sev or issue.confidence < min_conf:
|
||||
continue
|
||||
|
||||
target = issue.target
|
||||
path = issue.path
|
||||
url = "{0}{1}".format(target, path)
|
||||
description = issue.description
|
||||
failure_obj = None
|
||||
|
||||
for f in self.failures:
|
||||
if (f["url"] == url and f["defect_type"] == defect_type and
|
||||
f["description"] == description):
|
||||
failure_obj = f
|
||||
break
|
||||
if not failure_obj:
|
||||
failure_obj = {
|
||||
"url": url,
|
||||
"defect_type": defect_type,
|
||||
"description": description,
|
||||
"failure_id": self.failure_id,
|
||||
"instances": []
|
||||
}
|
||||
self.failures.append(failure_obj)
|
||||
self.failure_id += 1
|
||||
|
||||
signals = {}
|
||||
if issue.init_signals:
|
||||
signals["init_signals"] = set(
|
||||
[s.slug for s in issue.init_signals])
|
||||
if issue.test_signals:
|
||||
signals["test_signals"] = set(
|
||||
[s.slug for s in issue.test_signals])
|
||||
if issue.diff_signals:
|
||||
signals["diff_signals"] = set(
|
||||
[s.slug for s in issue.diff_signals])
|
||||
sev_rating = syntribos.RANKING[issue.severity]
|
||||
conf_rating = syntribos.RANKING[issue.confidence]
|
||||
|
||||
if issue.impacted_parameter:
|
||||
method = issue.impacted_parameter.method
|
||||
loc = issue.impacted_parameter.location
|
||||
name = issue.impacted_parameter.name
|
||||
content_type = issue.content_type
|
||||
payload_string = issue.impacted_parameter.trunc_fuzz_string
|
||||
|
||||
param = {
|
||||
"method": method,
|
||||
"location": loc,
|
||||
}
|
||||
if loc == "data":
|
||||
param["type"] = content_type
|
||||
|
||||
instance_obj = None
|
||||
for i in failure_obj["instances"]:
|
||||
if (i["confidence"] == conf_rating and
|
||||
i["severity"] == sev_rating and
|
||||
i["param"]["method"] == method and
|
||||
i["param"]["location"] == loc):
|
||||
|
||||
i["param"]["variables"].add(name)
|
||||
for sig_type in signals:
|
||||
if sig_type in i["signals"]:
|
||||
i["signals"][sig_type].update(signals[
|
||||
sig_type])
|
||||
else:
|
||||
i["signals"][sig_type] = signals[sig_type]
|
||||
i["strings"].add(payload_string)
|
||||
instance_obj = i
|
||||
break
|
||||
|
||||
if not instance_obj:
|
||||
param["variables"] = set([name])
|
||||
instance_obj = {
|
||||
"confidence": conf_rating,
|
||||
"severity": sev_rating,
|
||||
"param": param,
|
||||
"strings": set([payload_string]),
|
||||
"signals": signals
|
||||
}
|
||||
failure_obj["instances"].append(instance_obj)
|
||||
self.stats["unique_failures"] += 1
|
||||
self.output["stats"]["severity"][sev_rating] += 1
|
||||
else:
|
||||
instance_obj = None
|
||||
for i in failure_obj["instances"]:
|
||||
if (i["confidence"] == conf_rating and
|
||||
i["severity"] == sev_rating):
|
||||
for sig_type in signals:
|
||||
if sig_type in i["signals"]:
|
||||
i["signals"][sig_type].update(signals[
|
||||
sig_type])
|
||||
else:
|
||||
i["signals"][sig_type] = signals[sig_type]
|
||||
instance_obj = i
|
||||
break
|
||||
if not instance_obj:
|
||||
instance_obj = {
|
||||
"confidence": conf_rating,
|
||||
"severity": sev_rating,
|
||||
"signals": signals
|
||||
}
|
||||
failure_obj["instances"].append(instance_obj)
|
||||
self.stats["unique_failures"] += 1
|
||||
self.output["stats"]["severity"][sev_rating] += 1
|
||||
lock.release()
|
||||
|
||||
def addError(self, test, err):
|
||||
"""Duplicates parent class addError functionality.
|
||||
|
||||
:param test: The test that encountered an error
|
||||
:type test: :class:`syntribos.tests.base.BaseTestCase`
|
||||
:param err:
|
||||
:type tuple: Tuple of format ``(type, value, traceback)``
|
||||
"""
|
||||
with lock:
|
||||
err_str = "{}: {}".format(err[0].__name__, str(err[1]))
|
||||
for e in self.errors:
|
||||
if e['error'] == err_str:
|
||||
if self.getDescription(test) in e['test']:
|
||||
return
|
||||
e['test'].append(self.getDescription(test))
|
||||
self.stats["errors"] += 1
|
||||
return
|
||||
stacktrace = traceback.format_exception(*err, limit=0)
|
||||
_e = {
|
||||
"test": [self.getDescription(test)],
|
||||
"error": err_str
|
||||
}
|
||||
if CONF.stacktrace:
|
||||
_e["stacktrace"] = [x.strip() for x in stacktrace]
|
||||
self.errors.append(_e)
|
||||
self.stats["errors"] += 1
|
||||
|
||||
def addSuccess(self, test):
|
||||
"""Duplicates parent class addSuccess functionality.
|
||||
|
||||
:param test: The test that was run
|
||||
:type test: :class:`syntribos.tests.base.BaseTestCase`
|
||||
"""
|
||||
with lock:
|
||||
self.stats["successes"] += 1
|
||||
|
||||
def printErrors(self, output_format):
|
||||
"""Print out each :class:`syntribos.issue.Issue` that was encountered
|
||||
|
||||
:param str output_format: "json"
|
||||
"""
|
||||
self.output["errors"] = self.errors
|
||||
self.output["failures"] = self.failures
|
||||
formatter_types = {"json": JSONFormatter(self)}
|
||||
formatter = formatter_types[output_format.lower()]
|
||||
formatter.report(self.output)
|
||||
|
||||
def print_result(self, start_time):
|
||||
"""Prints test summary/stats (e.g. # failures) to stdout."""
|
||||
self.printErrors(CONF.output_format)
|
||||
self.print_log_path_and_stats(start_time)
|
||||
|
||||
def print_log_path_and_stats(self, start_time, log_path):
|
||||
"""Print the path to the log folder for this run."""
|
||||
run_time = time.time() - start_time
|
||||
num_fail = self.stats["unique_failures"]
|
||||
num_err = self.stats["errors"]
|
||||
print("\n{sep}\nTotal: Ran {num} test{suff} in {time:.3f}s".format(
|
||||
sep=syntribos.SEP,
|
||||
num=self.testsRun,
|
||||
suff="s" * bool(self.testsRun - 1),
|
||||
time=run_time))
|
||||
print("Total: {f} unique failure{fsuff} "
|
||||
"and {e} unique error{esuff}".format(
|
||||
f=num_fail,
|
||||
e=num_err,
|
||||
fsuff="s" * bool(num_fail - 1),
|
||||
esuff="s" * bool(num_err - 1)))
|
||||
if log_path:
|
||||
print(syntribos.SEP)
|
||||
print(_("LOG PATH...: %s") % log_path)
|
||||
print(syntribos.SEP)
|
@ -1,513 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pkgutil
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
import unittest
|
||||
from multiprocessing.dummy import Pool as ThreadPool
|
||||
|
||||
from oslo_config import cfg
|
||||
from six.moves import input
|
||||
|
||||
import syntribos.config
|
||||
import syntribos.result
|
||||
import syntribos.tests as tests
|
||||
import syntribos.tests.base
|
||||
from syntribos._i18n import _
|
||||
from syntribos.formatters.json_formatter import JSONFormatter
|
||||
from syntribos.utils import cleanup
|
||||
from syntribos.utils import cli as cli
|
||||
from syntribos.utils import env as ENV
|
||||
from syntribos.utils import remotes
|
||||
from syntribos.utils.file_utils import ContentType
|
||||
|
||||
result = None
|
||||
user_base_dir = None
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
lock = threading.Lock()
|
||||
|
||||
|
||||
class Runner(object):
|
||||
"""The core engine of syntribos.
|
||||
|
||||
This class is composed of a set of static methods that forms the core of
|
||||
syntribos. These include methods to list tests, run, to load test modules,
|
||||
to dry run etc.
|
||||
"""
|
||||
|
||||
log_path = ""
|
||||
current_test_id = 1000
|
||||
|
||||
@classmethod
|
||||
def list_tests(cls):
|
||||
"""Print out the list of available tests types that can be run."""
|
||||
print(_("List of available tests...:\n"))
|
||||
print("{:<50}{}\n".format(_("[Test Name]"),
|
||||
_("[Description]")))
|
||||
testdict = {name: clss.__doc__ for name, clss in cls.get_tests()}
|
||||
for test in sorted(testdict):
|
||||
if testdict[test] is None:
|
||||
raise Exception(
|
||||
_("No test description provided"
|
||||
" as doc string for the test: %s") % test)
|
||||
else:
|
||||
test_description = testdict[test].split(".")[0]
|
||||
print("{test:<50}{desc}\r".format(
|
||||
test=test, desc=test_description))
|
||||
print("\n")
|
||||
|
||||
@classmethod
|
||||
def load_modules(cls, package):
|
||||
"""Imports all tests (:mod:`syntribos.tests`)
|
||||
|
||||
:param package: a package of tests for pkgutil to load
|
||||
"""
|
||||
for i, modname, k in pkgutil.walk_packages(
|
||||
path=package.__path__,
|
||||
prefix=package.__name__ + '.',
|
||||
onerror=lambda x: None):
|
||||
__import__(modname, fromlist=[])
|
||||
|
||||
@classmethod
|
||||
def get_tests(cls, test_types=None, excluded_types=None, dry_run=False):
|
||||
"""Yields relevant tests based on test type
|
||||
|
||||
:param list test_types: Test types to be run
|
||||
|
||||
:rtype: tuple
|
||||
:returns: (test type (str), ```syntribos.tests.base.TestType```)
|
||||
"""
|
||||
|
||||
cls.load_modules(tests)
|
||||
test_types = test_types or [""]
|
||||
excluded_types = excluded_types or [""]
|
||||
items = sorted((syntribos.tests.base.test_table).items())
|
||||
# If it's a dry run, only return the debug test
|
||||
if dry_run:
|
||||
return (x for x in items if "DEBUG" in x[0])
|
||||
# Otherwise, don't run the debug test at all
|
||||
else:
|
||||
excluded_types.append("DEBUG")
|
||||
included = []
|
||||
# Only include tests allowed by value in -t params
|
||||
for t in test_types:
|
||||
included += [x for x in items if t in x[0]]
|
||||
# Exclude any tests that meet the above but are excluded by -e params
|
||||
for e in excluded_types:
|
||||
if e:
|
||||
included = [x for x in included if e not in x[0]]
|
||||
return (i for i in included)
|
||||
|
||||
@classmethod
|
||||
def get_logger(cls, template_name):
|
||||
"""Updates the logger handler for LOG."""
|
||||
template_name = template_name.replace(os.path.sep, "::")
|
||||
template_name = template_name.replace(".", "_")
|
||||
log_file = "{0}.log".format(template_name)
|
||||
if not cls.log_path:
|
||||
cls.log_path = ENV.get_log_dir_name()
|
||||
log_file = os.path.join(cls.log_path, log_file)
|
||||
log_handle = logging.FileHandler(log_file, 'w')
|
||||
LOG = logging.getLogger()
|
||||
LOG.handlers = [log_handle]
|
||||
LOG.setLevel(logging.DEBUG)
|
||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||
return LOG
|
||||
|
||||
@classmethod
|
||||
def setup_config(cls, use_file=False, argv=None):
|
||||
"""Register CLI options & parse config file."""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
try:
|
||||
syntribos.config.register_opts()
|
||||
if use_file:
|
||||
# Parsing the args first in case a custom_install_root
|
||||
# was specified.
|
||||
CONF(argv, default_config_files=[])
|
||||
CONF(argv, default_config_files=[ENV.get_default_conf_file()])
|
||||
else:
|
||||
CONF(argv, default_config_files=[])
|
||||
except Exception as exc:
|
||||
syntribos.config.handle_config_exception(exc)
|
||||
if cls.worker:
|
||||
raise exc
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
||||
@classmethod
|
||||
def setup_runtime_env(cls):
|
||||
"""Sets up the environment for a current test run.
|
||||
|
||||
This includes registering / parsing config options, creating the
|
||||
timestamped log directory and the results log file, if specified
|
||||
"""
|
||||
# Setup logging
|
||||
cls.log_path = ENV.get_log_dir_name()
|
||||
if not os.path.isdir(cls.log_path):
|
||||
os.makedirs(cls.log_path)
|
||||
|
||||
# Create results file if any, otherwise use sys.stdout
|
||||
if CONF.outfile:
|
||||
cls.output = open(CONF.outfile, "w")
|
||||
else:
|
||||
cls.output = sys.stdout
|
||||
|
||||
@classmethod
|
||||
def get_meta_vars(cls, file_path):
|
||||
"""Creates the appropriate meta_var dict for the given file path
|
||||
|
||||
Meta variables are inherited according to directory. This function
|
||||
builds a meta variable dict from the top down.
|
||||
|
||||
:param file_path: the path of the current template
|
||||
:returns: `dict` of meta variables
|
||||
"""
|
||||
meta_vars = {}
|
||||
if CONF.syntribos.meta_vars:
|
||||
with open(CONF.syntribos.meta_vars, "r") as f:
|
||||
conf_meta_vars = json.loads(f.read())
|
||||
for k, v in conf_meta_vars.items():
|
||||
meta_vars[k] = v
|
||||
return meta_vars
|
||||
|
||||
path_segments = [""] + os.path.dirname(file_path).split(os.sep)
|
||||
current_path = ""
|
||||
for seg in path_segments:
|
||||
current_path = os.path.join(current_path, seg)
|
||||
if current_path in cls.meta_dir_dict:
|
||||
for k, v in cls.meta_dir_dict[current_path].items():
|
||||
meta_vars[k] = v
|
||||
return meta_vars
|
||||
|
||||
@classmethod
|
||||
def run(cls, argv=sys.argv[1:], worker=False):
|
||||
"""Method sets up logger and decides on Syntribos control flow
|
||||
|
||||
This is the method where control flow of Syntribos is decided
|
||||
based on the commands entered. Depending upon commands such
|
||||
as ```list_tests``` or ```run``` the respective method is called.
|
||||
"""
|
||||
global result
|
||||
cls.worker = worker
|
||||
# If we are initializing, don't look for a default config file
|
||||
if "init" in sys.argv:
|
||||
cls.setup_config()
|
||||
else:
|
||||
cls.setup_config(use_file=True, argv=argv)
|
||||
try:
|
||||
if CONF.sub_command.name == "init":
|
||||
cli.print_symbol()
|
||||
ENV.initialize_syntribos_env()
|
||||
exit(0)
|
||||
|
||||
elif CONF.sub_command.name == "list_tests":
|
||||
cli.print_symbol()
|
||||
cls.list_tests()
|
||||
exit(0)
|
||||
|
||||
elif CONF.sub_command.name == "download":
|
||||
cli.print_symbol()
|
||||
ENV.download_wrapper()
|
||||
exit(0)
|
||||
|
||||
elif CONF.sub_command.name == "root":
|
||||
print(ENV.get_syntribos_root())
|
||||
exit(0)
|
||||
|
||||
except AttributeError:
|
||||
print(
|
||||
_(
|
||||
"Not able to run the requested sub command, please check "
|
||||
"the debug logs for more information, exiting..."))
|
||||
exit(1)
|
||||
|
||||
if not ENV.is_syntribos_initialized():
|
||||
print(_("Syntribos was not initialized. Please run the 'init'"
|
||||
" command or set it up manually. See the README for"
|
||||
" more information about the installation process."))
|
||||
exit(1)
|
||||
|
||||
cls.setup_runtime_env()
|
||||
|
||||
decorator = unittest.runner._WritelnDecorator(cls.output)
|
||||
result = syntribos.result.IssueTestResult(decorator, True, verbosity=1)
|
||||
|
||||
cls.start_time = time.time()
|
||||
if CONF.sub_command.name == "run":
|
||||
list_of_tests = list(
|
||||
cls.get_tests(CONF.test_types, CONF.excluded_types))
|
||||
elif CONF.sub_command.name == "dry_run":
|
||||
dry_run_output = {"failures": [], "successes": []}
|
||||
list_of_tests = list(cls.get_tests(dry_run=True))
|
||||
|
||||
print(_("\nRunning Tests...:"))
|
||||
templates_dir = CONF.syntribos.templates
|
||||
if templates_dir is None:
|
||||
if cls.worker:
|
||||
raise Exception("No templates directory was found in the "
|
||||
"config file.")
|
||||
else:
|
||||
print(_("Attempting to download templates from {}").format(
|
||||
CONF.remote.templates_uri))
|
||||
templates_path = remotes.get(CONF.remote.templates_uri)
|
||||
try:
|
||||
templates_dir = ContentType("r")(templates_path)
|
||||
except IOError:
|
||||
print(_("Not able to open `%s`; please verify path, "
|
||||
"exiting...") % templates_path)
|
||||
exit(1)
|
||||
|
||||
print(_("\nPress Ctrl-C to pause or exit...\n"))
|
||||
meta_vars = None
|
||||
templates_dir = list(templates_dir)
|
||||
cls.meta_dir_dict = {}
|
||||
for file_path, file_content in templates_dir:
|
||||
if os.path.basename(file_path) == "meta.json":
|
||||
meta_path = os.path.dirname(file_path)
|
||||
try:
|
||||
cls.meta_dir_dict[meta_path] = json.loads(file_content)
|
||||
except json.decoder.JSONDecodeError:
|
||||
_full_path = os.path.abspath(file_path)
|
||||
print(syntribos.SEP)
|
||||
print(
|
||||
"\n"
|
||||
"*** The JSON parser raised an exception when parsing "
|
||||
"{}. Check that the file contains "
|
||||
"correctly formatted JSON data. ***\n".format(
|
||||
_full_path)
|
||||
)
|
||||
for file_path, req_str in templates_dir:
|
||||
if "meta.json" in file_path:
|
||||
continue
|
||||
meta_vars = cls.get_meta_vars(file_path)
|
||||
LOG = cls.get_logger(file_path)
|
||||
CONF.log_opt_values(LOG, logging.DEBUG)
|
||||
if not file_path.endswith(".template"):
|
||||
LOG.warning('file.....:%s (SKIPPED - not a .template file)',
|
||||
file_path)
|
||||
continue
|
||||
|
||||
test_names = [t for (t, i) in list_of_tests] # noqa
|
||||
log_string = ''.join([
|
||||
'\n{0}\nTEMPLATE FILE\n{0}\n'.format('-' * 12),
|
||||
'file.......: {0}\n'.format(file_path),
|
||||
'tests......: {0}\n'.format(test_names)
|
||||
])
|
||||
LOG.debug(log_string)
|
||||
print(syntribos.SEP)
|
||||
print("Template File...: {}".format(file_path))
|
||||
print(syntribos.SEP)
|
||||
|
||||
if CONF.sub_command.name == "run":
|
||||
cls.run_given_tests(list_of_tests, file_path,
|
||||
req_str, meta_vars)
|
||||
elif CONF.sub_command.name == "dry_run":
|
||||
cls.dry_run(list_of_tests, file_path,
|
||||
req_str, dry_run_output, meta_vars)
|
||||
|
||||
if CONF.sub_command.name == "run":
|
||||
result.print_result(cls.start_time, cls.log_path)
|
||||
cls.result = result
|
||||
cleanup.delete_temps()
|
||||
elif CONF.sub_command.name == "dry_run":
|
||||
cls.dry_run_report(dry_run_output)
|
||||
|
||||
@classmethod
|
||||
def dry_run(cls, list_of_tests, file_path, req_str, output,
|
||||
meta_vars=None):
|
||||
"""Runs debug test to check all steps leading up to executing a test
|
||||
|
||||
This method does not run any checks, but does parse the template files
|
||||
and config options. It then runs a debug test which sends no requests
|
||||
of its own.
|
||||
|
||||
Note: if any external calls referenced inside the template file do make
|
||||
requests, the parser will still make those requests even for a dry run
|
||||
|
||||
:param str file_path: Path of the template file
|
||||
:param str req_str: Request string of each template
|
||||
|
||||
:return: None
|
||||
"""
|
||||
for k, test_class in list_of_tests: # noqa
|
||||
try:
|
||||
print("\nParsing template file...\n")
|
||||
test_class.create_init_request(file_path, req_str, meta_vars)
|
||||
except Exception as e:
|
||||
print("\nError in parsing template:\n \t{0}\n".format(
|
||||
traceback.format_exc()))
|
||||
LOG.error("Error in parsing template:")
|
||||
output["failures"].append({
|
||||
"file": file_path,
|
||||
"error": e.__str__()
|
||||
})
|
||||
else:
|
||||
print(_("\nRequest sucessfully generated!\n"))
|
||||
output["successes"].append(file_path)
|
||||
|
||||
test_cases = list(
|
||||
test_class.get_test_cases(file_path, req_str, meta_vars)
|
||||
)
|
||||
if len(test_cases) > 0:
|
||||
for test in test_cases:
|
||||
if test:
|
||||
cls.run_test(test)
|
||||
|
||||
@classmethod
|
||||
def dry_run_report(cls, output):
|
||||
"""Reports the dry run through a formatter."""
|
||||
formatter_types = {
|
||||
"json": JSONFormatter(result),
|
||||
}
|
||||
formatter = formatter_types[CONF.output_format]
|
||||
formatter.report(output)
|
||||
|
||||
test_log = cls.log_path
|
||||
print(syntribos.SEP)
|
||||
print(_("LOG PATH...: {path}").format(path=test_log))
|
||||
print(syntribos.SEP)
|
||||
|
||||
@classmethod
|
||||
def run_given_tests(cls, list_of_tests, file_path, req_str,
|
||||
meta_vars=None):
|
||||
"""Loads all the templates and runs all the given tests
|
||||
|
||||
This method calls run_test method to run each of the tests one
|
||||
by one.
|
||||
|
||||
:param list list_of_tests: A list of all the loaded tests
|
||||
:param str file_path: Path of the template file
|
||||
:param str req_str: Request string of each template
|
||||
|
||||
:return: None
|
||||
"""
|
||||
pool = ThreadPool(CONF.syntribos.threads)
|
||||
try:
|
||||
template_start_time = time.time()
|
||||
failures = 0
|
||||
errors = 0
|
||||
print("\n ID \t\tTest Name \t\t\t\t\t\t Progress")
|
||||
for test_name, test_class in list_of_tests:
|
||||
test_class.test_id = cls.current_test_id
|
||||
cls.current_test_id += 5
|
||||
|
||||
result_string = "[{test_id}] : {name}".format(
|
||||
test_id=cli.colorize(
|
||||
test_class.test_id, color="green"),
|
||||
name=test_name.replace("_", " ").capitalize())
|
||||
if not CONF.colorize:
|
||||
result_string = result_string.ljust(55)
|
||||
else:
|
||||
result_string = result_string.ljust(60)
|
||||
try:
|
||||
test_class.create_init_request(file_path, req_str,
|
||||
meta_vars)
|
||||
except Exception:
|
||||
print(_(
|
||||
"Error in parsing template:\n %s\n"
|
||||
) % traceback.format_exc())
|
||||
LOG.error("Error in parsing template:")
|
||||
break
|
||||
test_cases = list(
|
||||
test_class.get_test_cases(file_path, req_str, meta_vars))
|
||||
total_tests = len(test_cases)
|
||||
if total_tests > 0:
|
||||
log_string = "[{test_id}] : {name}".format(
|
||||
test_id=test_class.test_id, name=test_name)
|
||||
LOG.debug(log_string)
|
||||
last_failures = result.stats['unique_failures']
|
||||
last_errors = result.stats['errors']
|
||||
p_bar = cli.ProgressBar(
|
||||
message=result_string, total_len=total_tests)
|
||||
test_class.send_init_request(file_path, req_str, meta_vars)
|
||||
|
||||
# This line runs the tests
|
||||
pool.map(lambda t: cls.run_test(t, p_bar), test_cases)
|
||||
|
||||
failures = result.stats['unique_failures'] - last_failures
|
||||
errors = result.stats['errors'] - last_errors
|
||||
failures_str = cli.colorize_by_percent(
|
||||
failures, total_tests)
|
||||
|
||||
if errors:
|
||||
errors_str = cli.colorize(errors, "red")
|
||||
print(_(
|
||||
" : %(fail)s Failure(s), %(err)s Error(s)\r") % {
|
||||
"fail": failures_str, "err": errors_str})
|
||||
else:
|
||||
print(_(
|
||||
" : %s Failure(s), 0 Error(s)\r") % failures_str)
|
||||
|
||||
run_time = time.time() - template_start_time
|
||||
LOG.info(_("Run time: %s sec."), run_time)
|
||||
if hasattr(result, "testsRun"):
|
||||
num_tests = result.testsRun - result.testsRunSinceLastPrint
|
||||
print(_("\nRan %(num)s test(s) in %(time).3f s\n") %
|
||||
{"num": num_tests, "time": run_time})
|
||||
result.testsRunSinceLastPrint = result.testsRun
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print(_(
|
||||
'\n\nPausing...Hit ENTER to continue, type quit to exit.'))
|
||||
try:
|
||||
response = input()
|
||||
if response.lower() == "quit":
|
||||
result.print_result(cls.start_time)
|
||||
cleanup.delete_temps()
|
||||
print(_("Exiting..."))
|
||||
pool.close()
|
||||
pool.join()
|
||||
exit(0)
|
||||
print(_('Resuming...'))
|
||||
except KeyboardInterrupt:
|
||||
result.print_result(cls.start_time)
|
||||
cleanup.delete_temps()
|
||||
print(_("Exiting..."))
|
||||
pool.close()
|
||||
pool.join()
|
||||
exit(0)
|
||||
|
||||
@classmethod
|
||||
def run_test(cls, test, p_bar=None):
|
||||
"""Create a new test suite, add a test, and run it
|
||||
|
||||
:param test: The test to add to the suite
|
||||
:param result: The result object to append to
|
||||
:type result: :class:`syntribos.result.IssueTestResult`
|
||||
"""
|
||||
if test:
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(test("run_test_case"))
|
||||
suite.run(result)
|
||||
if p_bar:
|
||||
with lock:
|
||||
p_bar.increment(1)
|
||||
p_bar.print_bar()
|
||||
|
||||
|
||||
def entry_point():
|
||||
"""Start runner. Need this so we can point to it in ``setup.cfg``."""
|
||||
Runner.run()
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
entry_point()
|
@ -1,265 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import six
|
||||
|
||||
from syntribos._i18n import _
|
||||
|
||||
|
||||
class SignalHolder(object):
|
||||
"""SignalHolder represents a 'set' of SynSignals.
|
||||
|
||||
:ivar list signals: Collection of :class:`SynSignal`
|
||||
:ivar list all_slugs: Collection of slugs in `signals` for fast search
|
||||
"""
|
||||
|
||||
def __init__(self, signals=None):
|
||||
"""The SignalHolder can be initialized with a set of signals
|
||||
|
||||
:param signals: Collection of signals (added with `self.register()`)
|
||||
:type signals: :class:`SynSignal` OR :class:`SignalHolder` OR `list`
|
||||
"""
|
||||
self.signals = []
|
||||
self.all_slugs = []
|
||||
|
||||
if signals is not None:
|
||||
self.register(signals)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.signals[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not isinstance(value, SynSignal):
|
||||
raise TypeError()
|
||||
|
||||
if value.strength == 0:
|
||||
return
|
||||
|
||||
if value.slug not in self.all_slugs:
|
||||
self.signals[key] = value
|
||||
self.all_slugs[key] = value.slug
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self.signals[key]
|
||||
# Indices for self.signals/self.all_slugs should be the same
|
||||
del self.all_slugs[key]
|
||||
|
||||
def __repr__(self):
|
||||
return '["' + '", "'.join([sig.slug for sig in self.signals]) + '"]'
|
||||
|
||||
def __len__(self):
|
||||
return len(self.signals)
|
||||
|
||||
def __eq__(self, other):
|
||||
if len(self) != len(other):
|
||||
return False
|
||||
s1_has_s2 = all([sig in self.signals for sig in other.signals])
|
||||
s2_has_s1 = all([sig in other.signals for sig in self.signals])
|
||||
return s1_has_s2 and s2_has_s1
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __contains__(self, item):
|
||||
"""This is used to search for signals in the 'if __ in __' pattern."""
|
||||
if not isinstance(item, SynSignal) and not isinstance(
|
||||
item, six.string_types):
|
||||
raise TypeError()
|
||||
|
||||
if isinstance(item, six.string_types):
|
||||
# We are searching for either a tag or a slug
|
||||
for signal in self.signals:
|
||||
if signal.matches_slug(item):
|
||||
return True
|
||||
if signal.matches_tag(item):
|
||||
return True
|
||||
return False
|
||||
else:
|
||||
# We are searching for a signal by its slug (unique ID)
|
||||
return item.slug in self.all_slugs
|
||||
|
||||
def register(self, signals):
|
||||
"""Add a signal/list of signals to the SignalHolder
|
||||
|
||||
Maintains a set (won't add signal if its slug is in `self.all_slugs`)
|
||||
|
||||
:param signals: A single SynSignal, or a collection of them
|
||||
:type signals: :class:`SynSignal` OR list OR :class:`SynHolder`
|
||||
"""
|
||||
if signals is None:
|
||||
return
|
||||
|
||||
if isinstance(signals, SynSignal):
|
||||
if self._is_dead(signals):
|
||||
return
|
||||
elif self._is_duplicate(signals):
|
||||
return
|
||||
self.signals.append(signals)
|
||||
self.all_slugs.append(signals.slug)
|
||||
|
||||
elif isinstance(signals, list) or isinstance(signals, SignalHolder):
|
||||
for signal in signals:
|
||||
self.register(signal)
|
||||
|
||||
else:
|
||||
raise TypeError()
|
||||
|
||||
def find(self, slugs=None, tags=None):
|
||||
"""Get the signals that are matched by `slugs` and/or `tags`
|
||||
|
||||
:param list slugs: A `list` of slugs to search for
|
||||
:param list tags: A `list` of tags to search for
|
||||
:rtype: class
|
||||
:returns: A :class:`SignalHolder` of matched :class:`SynSignal`
|
||||
"""
|
||||
bad_signals = SignalHolder()
|
||||
|
||||
if slugs:
|
||||
for bad_slug in slugs:
|
||||
bad_signals.register([
|
||||
sig for sig in self.signals if sig.matches_slug(bad_slug)
|
||||
])
|
||||
if tags:
|
||||
for bad_tag in tags:
|
||||
bad_signals.register(
|
||||
[sig for sig in self.signals if sig.matches_tag(bad_tag)])
|
||||
|
||||
return bad_signals
|
||||
|
||||
def _is_dead(self, signal):
|
||||
return signal is None or signal.strength == 0
|
||||
|
||||
def _is_duplicate(self, signal):
|
||||
return signal.slug in self.all_slugs
|
||||
|
||||
def ran_check(self, check_name):
|
||||
for signal in self.signals:
|
||||
if signal.check_name == check_name:
|
||||
return True
|
||||
|
||||
def compare(self, other):
|
||||
"""Returns a dict with details of diff between 2 SignalHolders.
|
||||
|
||||
:param: signal_holder1
|
||||
:ptype: :class: Syntribos.signal.SignalHolder
|
||||
:param: signal_holder2
|
||||
:ptype: :class: Syntribos.signal.SignalHolder
|
||||
:returns: data
|
||||
:rtype: :dict:
|
||||
"""
|
||||
data = {
|
||||
"is_diff": False,
|
||||
"sh1_len": len(self),
|
||||
"sh2_len": len(other),
|
||||
"sh1_not_in_sh2": SignalHolder(),
|
||||
"sh2_not_in_sh1": SignalHolder()
|
||||
}
|
||||
if self == other:
|
||||
return data
|
||||
for signal in self.signals:
|
||||
if signal not in other:
|
||||
data["is_diff"] = True
|
||||
data["sh1_not_in_sh2"].register(signal)
|
||||
for signal in other.signals:
|
||||
if signal not in self:
|
||||
data["is_diff"] = True
|
||||
data["sh2_not_in_sh1"].register(signal)
|
||||
return data
|
||||
|
||||
|
||||
class SynSignal(object):
|
||||
"""SynSignal represents a piece of information raised by a 'check'
|
||||
|
||||
:ivar str text: A message describing the signal
|
||||
:ivar str slug: A unique slug that identifies the signal
|
||||
:ivar float strength: A number from 0 to 1 representing confidence
|
||||
:ivar list tags: Collection of tags associated with the signal
|
||||
:ivar dict data: Information about the results of the check
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
text="",
|
||||
slug="",
|
||||
strength=0.0,
|
||||
tags=None,
|
||||
data=None,
|
||||
check_name=None):
|
||||
self.text = text if text else ""
|
||||
self.slug = slug if slug else ""
|
||||
self.check_name = check_name if check_name else ""
|
||||
|
||||
if self.__dict__.get("strength", None):
|
||||
self.strength = self.strength
|
||||
else:
|
||||
self.strength = strength
|
||||
self.tags = tags if tags else []
|
||||
self.data = data if data else {}
|
||||
|
||||
def __repr__(self):
|
||||
return self.slug
|
||||
|
||||
def __eq__(self, other):
|
||||
same_tags = self.tags == other.tags
|
||||
same_slug = self.slug == other.slug
|
||||
same_check_name = self.check_name == other.check_name
|
||||
return same_tags and same_slug and same_check_name
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def matches_tag(self, tag):
|
||||
"""Checks if a Signal has a given tag
|
||||
|
||||
:param str tag: Tag to search for
|
||||
:rtype: bool
|
||||
:returns: True if fuzzy match, else False
|
||||
"""
|
||||
for t in self.tags:
|
||||
if tag in t:
|
||||
return True
|
||||
return False
|
||||
|
||||
def matches_slug(self, slug):
|
||||
"""Checks if a Signal has a given slug
|
||||
|
||||
:param str slug: Slug to search for
|
||||
:rtype: bool
|
||||
:returns: True if fuzzy match, else False
|
||||
"""
|
||||
slug = slug.upper()
|
||||
return slug in self.slug
|
||||
|
||||
|
||||
def from_generic_exception(exception):
|
||||
"""Return a SynSignal from a generic Exception
|
||||
|
||||
:param exception: A generic Exception that can't be identified
|
||||
:type exception: Exception
|
||||
:rtype: :class:`SynSignal`
|
||||
:returns: A signal describing the exception
|
||||
"""
|
||||
if not isinstance(exception, Exception):
|
||||
raise Exception(_("This function accepts only Exception objects"))
|
||||
|
||||
exc_text = str(exception)
|
||||
text = _("This request raised an exception: '%s'") % exc_text
|
||||
data = {
|
||||
_("exception_name"): exception.__class__.__name__,
|
||||
_("exception_text"): exc_text,
|
||||
_("exception"): exception
|
||||
}
|
||||
slug = "GENERIC_EXCEPTION_{name}".format(
|
||||
name=data["exception_name"].upper())
|
||||
tags = ["EXCEPTION_RAISED"]
|
||||
|
||||
return SynSignal(text=text, slug=slug, strength=1.0, tags=tags, data=data)
|
@ -1,89 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from oslo_config import cfg
|
||||
|
||||
import syntribos
|
||||
import syntribos.config
|
||||
import syntribos.extensions.identity.client
|
||||
from syntribos.tests import base
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class AuthTestCase(base.BaseTestCase):
|
||||
"""Test for possible token misuse in keystone."""
|
||||
test_name = "AUTH"
|
||||
parameter_location = "headers"
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(AuthTestCase, cls).setUpClass()
|
||||
version = CONF.user.version
|
||||
|
||||
if not version or version == 'v2.0':
|
||||
alt_token = syntribos.extensions.identity.client.get_token_v2(
|
||||
'alt_user')
|
||||
else:
|
||||
alt_token = syntribos.extensions.identity.client.get_token_v3(
|
||||
'alt_user')
|
||||
|
||||
cls.request.headers['x-auth-token'] = alt_token
|
||||
|
||||
cls.test_resp, cls.test_signals = cls.client.request(
|
||||
method=cls.request.method, url=cls.request.url,
|
||||
headers=cls.request.headers, params=cls.request.params,
|
||||
data=cls.request.data)
|
||||
|
||||
@classmethod
|
||||
def send_init_request(cls, filename, file_content, meta_vars):
|
||||
super(AuthTestCase, cls).send_init_request(filename,
|
||||
file_content, meta_vars)
|
||||
cls.request = cls.init_req.get_prepared_copy()
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
super(AuthTestCase, cls).tearDownClass()
|
||||
|
||||
def test_case(self):
|
||||
if 'HTTP_STATUS_CODE_2XX' in self.test_signals:
|
||||
description = (
|
||||
"This request did not fail with 404 (User not found),"
|
||||
" therefore it indicates that authentication with"
|
||||
" another user's token was successful.")
|
||||
self.register_issue(
|
||||
defect_type="alt_user_token",
|
||||
severity=syntribos.HIGH,
|
||||
confidence=syntribos.HIGH,
|
||||
description=description
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_test_cases(cls, filename, file_content, meta_vars):
|
||||
"""Generates the test cases
|
||||
|
||||
For this particular test, only a single test
|
||||
is created (in addition to the base case, that is)
|
||||
"""
|
||||
alt_user_group = cfg.OptGroup(name="alt_user",
|
||||
title="Alt Keystone User Config")
|
||||
CONF.register_group(alt_user_group)
|
||||
CONF.register_opts(syntribos.config.list_user_opts(),
|
||||
group=alt_user_group)
|
||||
|
||||
alt_user_id = CONF.alt_user.user_id
|
||||
alt_user_username = CONF.alt_user.username
|
||||
if not alt_user_id or not alt_user_username:
|
||||
return
|
||||
|
||||
yield cls
|
@ -1,278 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
import string as t_string
|
||||
import unittest
|
||||
|
||||
from oslo_config import cfg
|
||||
import six
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
import syntribos
|
||||
from syntribos.clients.http import client
|
||||
from syntribos.clients.http import parser
|
||||
from syntribos.signal import SignalHolder
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
ALLOWED_CHARS = "().-_{0}{1}".format(t_string.ascii_letters, t_string.digits)
|
||||
|
||||
"""test_table is the master list of tests to be run by the runner"""
|
||||
CONF = cfg.CONF
|
||||
test_table = {}
|
||||
|
||||
|
||||
def replace_invalid_characters(string, new_char="_"):
|
||||
"""Replace invalid characters in test names
|
||||
|
||||
This function corrects `string` so the following is true.
|
||||
|
||||
Identifiers (also referred to as names) are described by the
|
||||
following lexical definitions:
|
||||
|
||||
| ``identifier ::= (letter|"_") (letter | digit | "_")*``
|
||||
| ``letter ::= lowercase | uppercase``
|
||||
| ``lowercase ::= "a"..."z"``
|
||||
| ``uppercase ::= "A"..."Z"``
|
||||
| ``digit ::= "0"..."9"``
|
||||
|
||||
:param str string: Test name
|
||||
:param str new_char: The character to replace invalid characters with
|
||||
:returns: The test name, with invalid characters replaced with `new_char`
|
||||
:rtype: str
|
||||
"""
|
||||
if not string:
|
||||
return string
|
||||
for char in set(string) - set(ALLOWED_CHARS):
|
||||
string = string.replace(char, new_char)
|
||||
if string[0] in t_string.digits:
|
||||
string = string.replace(string[0], new_char, 1)
|
||||
return string
|
||||
|
||||
|
||||
class TestType(type):
|
||||
|
||||
"""This is the metaclass for each class extending :class:`BaseTestCase`."""
|
||||
|
||||
def __new__(cls, cls_name, cls_parents, cls_attr):
|
||||
new_class = super(TestType, cls).__new__(
|
||||
cls, cls_name, cls_parents, cls_attr)
|
||||
test_name = getattr(new_class, "test_name", None)
|
||||
if test_name is not None:
|
||||
if test_name not in test_table:
|
||||
test_table[test_name] = new_class
|
||||
return new_class
|
||||
|
||||
|
||||
@six.add_metaclass(TestType)
|
||||
class BaseTestCase(unittest.TestCase):
|
||||
|
||||
"""Base class for building new tests
|
||||
|
||||
:attribute str test_name: A name like ``XML_EXTERNAL_ENTITY_BODY``,
|
||||
containing the test type and the portion of the request template being
|
||||
tested
|
||||
:attribute list failures: A collection of "failures" raised by tests
|
||||
:attribute bool dead: Flip this if one of the requests doesn't return a
|
||||
response object
|
||||
:attribute client: HTTP client to be used by the test
|
||||
:attribute init_req: Initial request (loaded from request template)
|
||||
:attribute init_resp: Response to the initial request
|
||||
:attribute test_req: Request sent by the test for analysis
|
||||
:attribute test_resp: Response to the test request
|
||||
:attribute init_signals: Holder for signals on `init_req`
|
||||
:attribute test_signals: Holder for signals on `test_req`
|
||||
:attribute diff_signals: Holder for signals between `init_req` and
|
||||
`test_req`
|
||||
"""
|
||||
|
||||
test_name = None
|
||||
failures = []
|
||||
errors = []
|
||||
dead = False
|
||||
client = client()
|
||||
|
||||
init_req = None
|
||||
init_resp = None
|
||||
test_req = None
|
||||
test_resp = None
|
||||
|
||||
init_signals = SignalHolder()
|
||||
test_signals = SignalHolder()
|
||||
diff_signals = SignalHolder()
|
||||
|
||||
@classmethod
|
||||
def register_opts(cls):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def get_test_cases(cls, filename, file_content, meta_vars):
|
||||
"""Returns tests for given TestCase class (overwritten by children)."""
|
||||
yield cls
|
||||
|
||||
@classmethod
|
||||
def create_init_request(cls, filename, file_content, meta_vars):
|
||||
"""Parses template and creates init request object
|
||||
|
||||
This method does not send the initial request, instead, it only creates
|
||||
the object for use in the debug test
|
||||
|
||||
:param str filename: name of template file
|
||||
:param str file_content: content of template file as string
|
||||
"""
|
||||
request_obj = parser.create_request(
|
||||
file_content, CONF.syntribos.endpoint, meta_vars)
|
||||
cls.init_req = request_obj
|
||||
cls.init_resp = None
|
||||
cls.init_signals = None
|
||||
cls.template_path = filename
|
||||
|
||||
@classmethod
|
||||
def send_init_request(cls, filename, file_content, meta_vars):
|
||||
"""Parses template, creates init request object, and sends init request
|
||||
|
||||
This method sends the initial request, which is the request created
|
||||
after parsing the template file. This request will not be modified
|
||||
any further by the test cases themselves.
|
||||
|
||||
:param str filename: name of template file
|
||||
:param str file_content: content of template file as string
|
||||
"""
|
||||
if not cls.init_req:
|
||||
cls.init_req = parser.create_request(
|
||||
file_content, CONF.syntribos.endpoint, meta_vars)
|
||||
prepared_copy = cls.init_req.get_prepared_copy()
|
||||
cls.prepared_init_req = prepared_copy
|
||||
cls.init_resp, cls.init_signals = cls.client.send_request(
|
||||
prepared_copy)
|
||||
if cls.init_resp is not None:
|
||||
# Get the computed body and add it to our RequestObject
|
||||
# TODO(cneill): Figure out a better way to handle this discrepancy
|
||||
cls.init_req.body = cls.init_resp.request.body
|
||||
else:
|
||||
cls.dead = True
|
||||
|
||||
@classmethod
|
||||
def extend_class(cls, new_name, kwargs):
|
||||
"""Creates an extension for the class
|
||||
|
||||
Each TestCase class created is added to the `test_table`, which is then
|
||||
read in by the test runner as the master list of tests to be run.
|
||||
|
||||
:param str new_name: Name of new class to be created
|
||||
:param dict kwargs: Keyword arguments to pass to the new class
|
||||
:rtype: class
|
||||
:returns: A TestCase class extending :class:`BaseTestCase`
|
||||
"""
|
||||
|
||||
new_name = replace_invalid_characters(new_name)
|
||||
if not isinstance(kwargs, dict):
|
||||
raise Exception("kwargs must be a dictionary")
|
||||
new_cls = type(new_name, (cls, ), kwargs)
|
||||
new_cls.__module__ = cls.__module__
|
||||
return new_cls
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
super(BaseTestCase, cls).tearDownClass()
|
||||
if not cls.failures:
|
||||
if "EXCEPTION_RAISED" in cls.test_signals:
|
||||
sig = cls.test_signals.find(
|
||||
tags="EXCEPTION_RAISED")[0]
|
||||
exc_name = type(sig.data["exception"]).__name__
|
||||
if ("CONNECTION_FAIL" in sig.tags):
|
||||
six.raise_from(FatalHTTPError(
|
||||
"The remote target has forcibly closed the connection "
|
||||
"with Syntribos and resulted in exception '{}'. This "
|
||||
"could potentially mean that a fatal error was "
|
||||
"encountered within the target application or server"
|
||||
" itself.".format(exc_name)), sig.data["exception"])
|
||||
else:
|
||||
raise sig.data["exception"]
|
||||
|
||||
@classmethod
|
||||
def tearDown(cls):
|
||||
get_slugs = [sig.slug for sig in cls.test_signals]
|
||||
get_checks = [sig.check_name for sig in cls.test_signals]
|
||||
test_signals_used = "Signals: " + str(get_slugs)
|
||||
LOG.debug(test_signals_used)
|
||||
test_checks_used = "Checks used: " + str(get_checks)
|
||||
LOG.debug(test_checks_used)
|
||||
|
||||
def run_test_case(self):
|
||||
"""This kicks off the test(s) for a given TestCase class
|
||||
|
||||
After running the tests, an `AssertionError` is raised if any tests
|
||||
were added to self.failures.
|
||||
|
||||
:raises: :exc:`AssertionError`
|
||||
"""
|
||||
if not self.dead:
|
||||
try:
|
||||
self.test_case()
|
||||
except Exception as e:
|
||||
self.errors += e
|
||||
raise
|
||||
if self.failures:
|
||||
raise AssertionError
|
||||
|
||||
def test_case(self):
|
||||
"""This method is overwritten by individual TestCase classes
|
||||
|
||||
It represents the actual test that is called in :func:`run_test_case`,
|
||||
and handles populating `self.failures`
|
||||
"""
|
||||
pass
|
||||
|
||||
def register_issue(self, defect_type, severity, confidence, description):
|
||||
"""Adds an issue to the test's list of issues
|
||||
|
||||
Creates a :class:`syntribos.issue.Issue` object, with given function
|
||||
parameters as instances variables, and registers the issue as a
|
||||
failure and associates the test's metadata to it.
|
||||
|
||||
:param defect_type: The type of vulnerability that Syntribos believes
|
||||
it has found. This may be something like 500 error or DoS, regardless
|
||||
tof whathe Test Type is.
|
||||
:param severity: "Low", "Medium", or "High", depending on the defect
|
||||
:param description: Description of the defect
|
||||
:param confidence: The confidence of the defect
|
||||
:returns: new issue object with metadata associated
|
||||
:rtype: Issue
|
||||
"""
|
||||
|
||||
issue = syntribos.Issue(defect_type=defect_type,
|
||||
severity=severity,
|
||||
confidence=confidence,
|
||||
description=description)
|
||||
|
||||
issue.request = self.test_req
|
||||
issue.response = self.test_resp
|
||||
issue.template_path = self.template_path
|
||||
issue.parameter_location = self.parameter_location
|
||||
issue.test_type = self.test_name
|
||||
url_components = urlparse(self.init_resp.url)
|
||||
issue.target = url_components.netloc
|
||||
issue.path = url_components.path
|
||||
issue.init_signals = self.init_signals
|
||||
issue.test_signals = self.test_signals
|
||||
issue.diff_signals = self.diff_signals
|
||||
|
||||
self.failures.append(issue)
|
||||
|
||||
return issue
|
||||
|
||||
|
||||
class FatalHTTPError(Exception):
|
||||
pass
|
@ -1,25 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from syntribos.tests import base
|
||||
|
||||
|
||||
class DryRunTestCase(base.BaseTestCase):
|
||||
|
||||
"""Debug dry run test to run no logic and return no results."""
|
||||
|
||||
test_name = "DEBUG_DRY_RUN"
|
||||
parameter_location = "debug"
|
||||
|
||||
def test_case(self):
|
||||
pass
|
@ -1,252 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint: skip-file
|
||||
import logging
|
||||
import os
|
||||
|
||||
from oslo_config import cfg
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
import syntribos
|
||||
from syntribos.checks import length_diff as length_diff
|
||||
from syntribos.tests import base
|
||||
import syntribos.tests.fuzz.datagen
|
||||
from syntribos.utils.file_utils import ContentType
|
||||
from syntribos.utils import remotes
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class BaseFuzzTestCase(base.BaseTestCase):
|
||||
failure_keys = None
|
||||
success_keys = None
|
||||
|
||||
@classmethod
|
||||
def _get_strings(cls, file_name=None):
|
||||
payloads = CONF.syntribos.payloads
|
||||
if not payloads:
|
||||
payloads = remotes.get(CONF.remote.payloads_uri)
|
||||
content = ContentType('r')(payloads)
|
||||
for file_path, _ in content:
|
||||
if file_path.endswith(".txt"):
|
||||
file_dir = os.path.split(file_path)[0]
|
||||
payloads = os.path.join(payloads, file_dir)
|
||||
break
|
||||
try:
|
||||
if os.path.isfile(cls.data_key):
|
||||
path = cls.data_key
|
||||
else:
|
||||
path = os.path.join(payloads, file_name or cls.data_key)
|
||||
with open(path, "r") as fp:
|
||||
return fp.read().splitlines()
|
||||
except (IOError, AttributeError, TypeError) as e:
|
||||
LOG.error("Exception raised: {}".format(e))
|
||||
print("\nPayload file for test '{}' not readable, "
|
||||
"exiting...".format(cls.test_name))
|
||||
exit(1)
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
"""being used as a setup test not."""
|
||||
super(BaseFuzzTestCase, cls).setUpClass()
|
||||
cls.test_resp, cls.test_signals = cls.client.request(
|
||||
method=cls.request.method,
|
||||
url=cls.request.url,
|
||||
headers=cls.request.headers,
|
||||
params=cls.request.params,
|
||||
data=cls.request.data)
|
||||
|
||||
if not hasattr(cls.request, 'body'):
|
||||
cls.request.body = cls.request.data
|
||||
cls.test_req = cls.request
|
||||
|
||||
if cls.test_resp is None or "EXCEPTION_RAISED" in cls.test_signals:
|
||||
cls.dead = True
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
super(BaseFuzzTestCase, cls).tearDownClass()
|
||||
|
||||
def run_default_checks(self):
|
||||
"""Tests for some default issues
|
||||
|
||||
These issues are not specific to any test type, and can be raised as a
|
||||
result of many different types of attacks. Therefore, they're defined
|
||||
separately from the test_case method so that they are not overwritten
|
||||
by test cases that inherit from BaseFuzzTestCase.
|
||||
|
||||
Any extension to this class should call
|
||||
self.run_default_checks() in order to test for the Issues
|
||||
defined here
|
||||
"""
|
||||
if "HTTP_STATUS_CODE_5XX" in self.test_signals:
|
||||
self.register_issue(
|
||||
defect_type="500_errors",
|
||||
severity=syntribos.LOW,
|
||||
confidence=syntribos.HIGH,
|
||||
description=("This request returns an error with status code "
|
||||
"{0}, which might indicate some server-side "
|
||||
"fault that may lead to further vulnerabilities"
|
||||
).format(self.test_resp.status_code))
|
||||
self.diff_signals.register(length_diff(self))
|
||||
if "LENGTH_DIFF_OVER" in self.diff_signals:
|
||||
if self.init_resp.status_code == self.test_resp.status_code:
|
||||
description = ("The difference in length between the response "
|
||||
"to the baseline request and the request "
|
||||
"returned when sending an attack string "
|
||||
"exceeds {0} percent, which could indicate a "
|
||||
"vulnerability to injection attacks"
|
||||
).format(CONF.test.length_diff_percent)
|
||||
self.register_issue(
|
||||
defect_type="length_diff",
|
||||
severity=syntribos.LOW,
|
||||
confidence=syntribos.LOW,
|
||||
description=description)
|
||||
|
||||
def test_case(self):
|
||||
"""Performs the test
|
||||
|
||||
The test runner will call test_case on every TestCase class, and will
|
||||
report any AssertionError raised by this method to the results.
|
||||
"""
|
||||
self.run_default_checks()
|
||||
|
||||
@classmethod
|
||||
def get_test_cases(cls, filename, file_content, meta_vars):
|
||||
"""Generates new TestCases for each fuzz string
|
||||
|
||||
For each string returned by cls._get_strings(), yield a TestCase class
|
||||
for the string as an extension to the current TestCase class. Every
|
||||
string used as a fuzz test payload entails the generation of a new
|
||||
subclass for each parameter fuzzed. See :func:`base.extend_class`.
|
||||
"""
|
||||
cls.failures = []
|
||||
if hasattr(cls, 'data_key'):
|
||||
prefix_name = "{filename}_{test_name}_{fuzz_file}_".format(
|
||||
filename=filename,
|
||||
test_name=cls.test_name,
|
||||
fuzz_file=cls.data_key)
|
||||
else:
|
||||
prefix_name = "{filename}_{test_name}_".format(
|
||||
filename=filename, test_name=cls.test_name)
|
||||
|
||||
fr = syntribos.tests.fuzz.datagen.fuzz_request(
|
||||
cls.init_req, cls._get_strings(), cls.parameter_location,
|
||||
prefix_name)
|
||||
for fuzz_name, request, fuzz_string, param_path in fr:
|
||||
yield cls.extend_class(fuzz_name, fuzz_string, param_path,
|
||||
{"request": request})
|
||||
|
||||
@classmethod
|
||||
def extend_class(cls, new_name, fuzz_string, param_path, kwargs):
|
||||
"""Creates an extension for the class
|
||||
|
||||
Each TestCase class created is added to the `test_table`, which is then
|
||||
read in by the test runner as the master list of tests to be run.
|
||||
|
||||
:param str new_name: Name of new class to be created
|
||||
:param str fuzz_string: Fuzz string to insert
|
||||
:param str param_path: String tracing location of the ImpactedParameter
|
||||
:param dict kwargs: Keyword arguments to pass to the new class
|
||||
:rtype: class
|
||||
:returns: A TestCase class extending :class:`BaseTestCase`
|
||||
"""
|
||||
|
||||
new_cls = super(BaseFuzzTestCase, cls).extend_class(new_name, kwargs)
|
||||
new_cls.fuzz_string = fuzz_string
|
||||
new_cls.param_path = param_path
|
||||
return new_cls
|
||||
|
||||
def register_issue(self, defect_type, severity, confidence, description):
|
||||
"""Adds an issue to the test's list of issues
|
||||
|
||||
Creates a :class:`syntribos.issue.Issue` object, with given function
|
||||
parameters as instance variables, registers the Issue as a
|
||||
failure, and associates the test's metadata to it, including the
|
||||
:class:`syntribos.tests.fuzz.base_fuzz.ImpactedParameter` object that
|
||||
encapsulates the details of the fuzz test.
|
||||
|
||||
:param defect_type: The type of vulnerability that Syntribos believes
|
||||
it has found. This may be something like 500 error or DoS, regardless
|
||||
of what the Test Type is.
|
||||
:param severity: "Low", "Medium", or "High", depending on the defect
|
||||
:param description: Description of the defect
|
||||
:param confidence: The confidence in the validity of the defect
|
||||
:returns: new issue object with metadata associated
|
||||
:rtype: :class:`syntribos.issue.Issue`
|
||||
"""
|
||||
|
||||
issue = syntribos.Issue(
|
||||
defect_type=defect_type,
|
||||
severity=severity,
|
||||
confidence=confidence,
|
||||
description=description)
|
||||
|
||||
issue.request = self.test_req
|
||||
issue.response = self.test_resp
|
||||
issue.template_path = self.template_path
|
||||
|
||||
issue.test_type = self.test_name
|
||||
url_components = urlparse(self.prepared_init_req.url)
|
||||
issue.target = url_components.netloc
|
||||
issue.path = url_components.path
|
||||
issue.init_signals = self.init_signals
|
||||
issue.test_signals = self.test_signals
|
||||
issue.diff_signals = self.diff_signals
|
||||
if 'content-type' in self.init_req.headers:
|
||||
issue.content_type = self.init_req.headers['content-type']
|
||||
else:
|
||||
issue.content_type = None
|
||||
|
||||
issue.impacted_parameter = ImpactedParameter(
|
||||
method=issue.request.method,
|
||||
location=self.parameter_location,
|
||||
name=self.param_path,
|
||||
value=self.fuzz_string)
|
||||
|
||||
self.failures.append(issue)
|
||||
|
||||
return issue
|
||||
|
||||
|
||||
class ImpactedParameter(object):
|
||||
"""Object that encapsulates the details about what caused the defect
|
||||
|
||||
:ivar method: The HTTP method used in the test
|
||||
:ivar location: The location of the impacted parameter
|
||||
:ivar name: The parameter (e.g. HTTP header, GET var) that was modified by
|
||||
a given test case
|
||||
:ivar value: The "fuzz" string that was supplied in a given test case
|
||||
:ivar request_body_format: The type of a body (POST/PATCH/etc.) variable.
|
||||
"""
|
||||
|
||||
def __init__(self, method, location, name, value):
|
||||
self.method = method
|
||||
self.location = location
|
||||
if len(value) >= 128:
|
||||
self.trunc_fuzz_string = "{0}...({1} chars)...{2}".format(
|
||||
value[:64], len(value), value[-64:])
|
||||
else:
|
||||
self.trunc_fuzz_string = value
|
||||
self.fuzz_string = value
|
||||
self.name = name
|
||||
|
||||
def as_dict(self):
|
||||
return {
|
||||
"method": self.method,
|
||||
"location": self.location,
|
||||
"name": self.name,
|
||||
"value": self.trunc_fuzz_string
|
||||
}
|
@ -1,88 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import syntribos
|
||||
from syntribos._i18n import _
|
||||
from syntribos.checks import has_string as has_string
|
||||
from syntribos.checks import time_diff as time_diff
|
||||
from syntribos.tests.fuzz import base_fuzz
|
||||
|
||||
|
||||
class BufferOverflowBody(base_fuzz.BaseFuzzTestCase):
|
||||
"""Test for buffer overflow vulnerabilities in HTTP body."""
|
||||
|
||||
test_name = "BUFFER_OVERFLOW_BODY"
|
||||
parameter_location = "data"
|
||||
failure_keys = [
|
||||
'*** stack smashing detected ***:',
|
||||
'Backtrace:',
|
||||
'Memory map:',
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def _get_strings(cls, file_name=None):
|
||||
return [
|
||||
"A" * (2 ** 16 + 1),
|
||||
"a" * 10 ** 5,
|
||||
'\x00' * (2 ** 16 + 1),
|
||||
"%%s" * 513,
|
||||
]
|
||||
|
||||
def test_case(self):
|
||||
self.run_default_checks()
|
||||
self.test_signals.register(has_string(self))
|
||||
if "FAILURE_KEYS_PRESENT" in self.test_signals:
|
||||
failed_strings = self.test_signals.find(
|
||||
slugs="FAILURE_KEYS_PRESENT")[0].data["failed_strings"]
|
||||
self.register_issue(
|
||||
defect_type="bof_strings",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.MEDIUM,
|
||||
description=("The string(s): '{0}', known to be commonly "
|
||||
"returned after a successful buffer overflow "
|
||||
"attack, have been found in the response. This "
|
||||
"could indicate a vulnerability to buffer "
|
||||
"overflow attacks.").format(failed_strings))
|
||||
|
||||
self.diff_signals.register(time_diff(self))
|
||||
if "TIME_DIFF_OVER" in self.diff_signals:
|
||||
self.register_issue(
|
||||
defect_type="bof_timing",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.LOW,
|
||||
description=(_("The time it took to resolve a request with a "
|
||||
"long string was too long compared to the "
|
||||
"baseline request. This could indicate a "
|
||||
"vulnerability to buffer overflow attacks")))
|
||||
|
||||
|
||||
class BufferOverflowParams(BufferOverflowBody):
|
||||
"""Test for buffer overflow vulnerabilities in HTTP params."""
|
||||
|
||||
test_name = "BUFFER_OVERFLOW_PARAMS"
|
||||
parameter_location = "params"
|
||||
|
||||
|
||||
class BufferOverflowHeaders(BufferOverflowBody):
|
||||
"""Test for buffer overflow vulnerabilities in HTTP header."""
|
||||
|
||||
test_name = "BUFFER_OVERFLOW_HEADERS"
|
||||
parameter_location = "headers"
|
||||
|
||||
|
||||
class BufferOverflowURL(BufferOverflowBody):
|
||||
"""Test for buffer overflow vulnerabilities in HTTP URL."""
|
||||
|
||||
test_name = "BUFFER_OVERFLOW_URL"
|
||||
parameter_location = "url"
|
||||
url_var = "FUZZ"
|
@ -1,81 +0,0 @@
|
||||
# Copyright 2016 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import syntribos
|
||||
from syntribos._i18n import _
|
||||
from syntribos.checks import has_string as has_string
|
||||
from syntribos.checks import time_diff as time_diff
|
||||
from syntribos.tests.fuzz import base_fuzz
|
||||
|
||||
|
||||
class CommandInjectionBody(base_fuzz.BaseFuzzTestCase):
|
||||
"""Test for command injection vulnerabilities in HTTP body."""
|
||||
|
||||
test_name = "COMMAND_INJECTION_BODY"
|
||||
parameter_location = "data"
|
||||
data_key = "command_injection.txt"
|
||||
failure_keys = [
|
||||
'uid=',
|
||||
'root:',
|
||||
'default=',
|
||||
'[boot loader]']
|
||||
|
||||
def test_case(self):
|
||||
self.run_default_checks()
|
||||
self.test_signals.register(has_string(self))
|
||||
if "FAILURE_KEYS_PRESENT" in self.test_signals:
|
||||
failed_strings = self.test_signals.find(
|
||||
slugs="FAILURE_KEYS_PRESENT")[0].data["failed_strings"]
|
||||
self.register_issue(
|
||||
defect_type="command_injection",
|
||||
severity=syntribos.HIGH,
|
||||
confidence=syntribos.MEDIUM,
|
||||
description=("A string known to be commonly returned after a "
|
||||
"successful command injection attack was "
|
||||
"included in the response. This could indicate "
|
||||
"a vulnerability to command injection "
|
||||
"attacks.").format(failed_strings))
|
||||
self.diff_signals.register(time_diff(self))
|
||||
if "TIME_DIFF_OVER" in self.diff_signals:
|
||||
self.register_issue(
|
||||
defect_type="command_injection",
|
||||
severity=syntribos.HIGH,
|
||||
confidence=syntribos.MEDIUM,
|
||||
description=(_("The time elapsed between the sending of "
|
||||
"the request and the arrival of the res"
|
||||
"ponse exceeds the expected amount of time, "
|
||||
"suggesting a vulnerability to command "
|
||||
"injection attacks.")))
|
||||
|
||||
|
||||
class CommandInjectionParams(CommandInjectionBody):
|
||||
"""Test for command injection vulnerabilities in HTTP params."""
|
||||
|
||||
test_name = "COMMAND_INJECTION_PARAMS"
|
||||
parameter_location = "params"
|
||||
|
||||
|
||||
class CommandInjectionHeaders(CommandInjectionBody):
|
||||
"""Test for command injection vulnerabilities in HTTP header."""
|
||||
|
||||
test_name = "COMMAND_INJECTION_HEADERS"
|
||||
parameter_location = "headers"
|
||||
|
||||
|
||||
class CommandInjectionURL(CommandInjectionBody):
|
||||
"""Test for command injection vulnerabilities in HTTP URL."""
|
||||
|
||||
test_name = "COMMAND_INJECTION_URL"
|
||||
parameter_location = "url"
|
||||
url_var = "FUZZ"
|
@ -1,260 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import copy
|
||||
import re
|
||||
from xml.etree import ElementTree
|
||||
|
||||
import six
|
||||
|
||||
from syntribos.clients.http.parser import _string_var_objs
|
||||
from syntribos.clients.http.parser import RequestCreator
|
||||
from syntribos.clients.http import VariableObject
|
||||
|
||||
|
||||
def fuzz_request(req, strings, fuzz_type, name_prefix):
|
||||
"""Creates the fuzzed RequestObject
|
||||
|
||||
Gets the name and the fuzzed request model from _fuzz_data, and
|
||||
creates a RequestObject from the parameters of the model.
|
||||
|
||||
:param req: The RequestObject to be fuzzed
|
||||
:type req: :class:`syntribos.clients.http.parser.RequestObject`
|
||||
:param list strings: List of strings to fuzz with
|
||||
:param str fuzz_type: What attribute of the RequestObject to fuzz
|
||||
:param name_prefix: (Used for ImpactedParameter)
|
||||
:returns: Generator of tuples:
|
||||
(name, request, fuzzstring, ImpactedParameter name)
|
||||
:rtype: `tuple`
|
||||
"""
|
||||
for name, data, stri, param_path in _fuzz_data(
|
||||
strings, getattr(req, fuzz_type), req.action_field, name_prefix):
|
||||
request_copy = req.get_copy()
|
||||
setattr(request_copy, fuzz_type, data)
|
||||
request_copy.prepare_request()
|
||||
yield name, request_copy, stri, param_path
|
||||
|
||||
|
||||
def _fuzz_data(strings, data, skip_var, name_prefix):
|
||||
"""Iterates through model fields and places fuzz string in each field
|
||||
|
||||
For each attribute in the model object, call the _build_X_combinations
|
||||
method corresponding to the type of the data parameter, which replaces
|
||||
the value with the fuzz string.
|
||||
|
||||
:param list strings: List of strings to fuzz with
|
||||
:param data: Can be a dict, XML Element, or string
|
||||
:param str skip_var: String representing ACTION_FIELDs
|
||||
:param str name_prefix: (Used for ImpactedParameter)
|
||||
:returns: Generator of tuples:
|
||||
(name, model, string, ImpactedParameter name)
|
||||
"""
|
||||
param_path = ""
|
||||
for str_num, stri in enumerate(strings, 1):
|
||||
if isinstance(data, dict):
|
||||
model_iter = _build_dict_combinations(stri, data, skip_var)
|
||||
elif isinstance(data, ElementTree.Element):
|
||||
model_iter = _build_xml_combinations(stri, data, skip_var)
|
||||
elif isinstance(data, six.string_types):
|
||||
model_iter = _build_str_combinations(stri, data)
|
||||
else:
|
||||
raise TypeError("Format not recognized!")
|
||||
for model_num, (model, param_path) in enumerate(model_iter, 1):
|
||||
name = "{0}str{1}_model{2}".format(name_prefix, str_num, model_num)
|
||||
yield (name, model, stri, param_path)
|
||||
|
||||
|
||||
def _build_str_combinations(fuzz_string, data):
|
||||
"""Places `fuzz_string` in fuzz location for string data.
|
||||
|
||||
:param str fuzz_string: Value to place in fuzz location
|
||||
:param str data: Lines from the request template
|
||||
"""
|
||||
# Match either "{identifier:value}" or "{value}"
|
||||
var_regex = r"{([\w]*):?([^}]*)}"
|
||||
for match in re.finditer(var_regex, data):
|
||||
start, stop = match.span()
|
||||
model = "{0}{1}{2}".format(data[:start], fuzz_string, data[stop:])
|
||||
|
||||
if match.group(1):
|
||||
# The string is of the format "{identifier:value}", so we just
|
||||
# want the identifier as the param_path
|
||||
param = match.group(1)
|
||||
else:
|
||||
param = match.group(0)
|
||||
|
||||
if param in _string_var_objs:
|
||||
var_obj = _string_var_objs[param]
|
||||
if not _check_var_obj_limits(var_obj, fuzz_string):
|
||||
continue
|
||||
param = RequestCreator.replace_one_variable(var_obj)
|
||||
yield model, param
|
||||
|
||||
|
||||
def _build_dict_combinations(fuzz_string, dic, skip_var):
|
||||
"""Places fuzz string in fuzz location for object data.
|
||||
|
||||
:param str fuzz_string: Value to place in fuzz location
|
||||
:param dic: A dictionary to fuzz
|
||||
:param skip_var: ACTION_FIELD UUID value to skip
|
||||
"""
|
||||
for key, val in dic.items():
|
||||
if skip_var in key:
|
||||
continue
|
||||
elif isinstance(val, VariableObject):
|
||||
if not _check_var_obj_limits(val, fuzz_string):
|
||||
continue
|
||||
else:
|
||||
yield _merge_dictionaries(dic, {key: fuzz_string}), key
|
||||
elif isinstance(val, dict):
|
||||
for ret, param_path in _build_dict_combinations(fuzz_string, val,
|
||||
skip_var):
|
||||
yield (_merge_dictionaries(dic, {
|
||||
key: ret
|
||||
}), "{0}/{1}".format(key, param_path))
|
||||
elif isinstance(val, list):
|
||||
for i, v in enumerate(val):
|
||||
list_ = [_ for _ in val]
|
||||
if isinstance(v, dict):
|
||||
for ret, param_path in _build_dict_combinations(
|
||||
fuzz_string, v, skip_var):
|
||||
list_[i] = copy.copy(ret)
|
||||
yield (_merge_dictionaries(dic, {
|
||||
key: ret
|
||||
}), "{0}[{1}]/{2}".format(key, i, param_path))
|
||||
elif isinstance(v, VariableObject):
|
||||
if not _check_var_obj_limits(v, fuzz_string):
|
||||
continue
|
||||
else:
|
||||
list_[i] = fuzz_string
|
||||
yield (_merge_dictionaries(dic, {
|
||||
key: list_
|
||||
}), "{0}[{1}]".format(key, i))
|
||||
else:
|
||||
yield _merge_dictionaries(dic, {key: fuzz_string}), key
|
||||
|
||||
|
||||
def _merge_dictionaries(x, y):
|
||||
"""Merge `dicts` together
|
||||
|
||||
Create a copy of `x`, and update that with elements of `y`, to prevent
|
||||
squashing of passed in dicts.
|
||||
|
||||
:param dict x: Dictionary 1
|
||||
:param dict y: Dictionary 2
|
||||
:returns: Merged dictionary
|
||||
:rtype: `dict`
|
||||
"""
|
||||
|
||||
z = x.copy()
|
||||
z.update(y)
|
||||
return z
|
||||
|
||||
|
||||
def _build_xml_combinations(stri, ele, skip_var):
|
||||
"""Places fuzz string in fuzz location for XML data."""
|
||||
if skip_var not in ele.tag:
|
||||
if ele.text and skip_var not in ele.text:
|
||||
yield _update_xml_ele_text(ele, stri), ele.tag
|
||||
for attr, param_path in _build_dict_combinations(stri, ele.attrib,
|
||||
skip_var):
|
||||
yield (_update_xml_ele_attribs(ele, attr),
|
||||
"{0}/{1}".format(ele.tag, param_path))
|
||||
for i, element in enumerate(list(ele)):
|
||||
for ret, param_path in _build_xml_combinations(stri, element,
|
||||
skip_var):
|
||||
list_ = list(ele)
|
||||
list_[i] = copy.copy(ret)
|
||||
yield (_update_inner_xml_ele(ele, list_),
|
||||
"{0}/{1}".format(ele.tag, param_path))
|
||||
|
||||
|
||||
def _update_xml_ele_text(ele, text):
|
||||
"""Copies an XML element, updates its text attribute with `text`
|
||||
|
||||
:param ele: XML element to be copied, modified
|
||||
:type ele: :class:`xml.ElementTree.Element`
|
||||
:param str text: Text to populate `ele`'s text attribute with
|
||||
:returns: XML element with "text" attribute set to `text`
|
||||
:rtype: :class:`xml.ElementTree.Element`
|
||||
"""
|
||||
ret = copy.copy(ele)
|
||||
ret.text = text
|
||||
return ret
|
||||
|
||||
|
||||
def _update_xml_ele_attribs(ele, attribs):
|
||||
"""Copies an XML element, populates attributes from `attribs`
|
||||
|
||||
:param ele: XML element to be copied, modified
|
||||
:type ele: :class:`xml.ElementTree.Element`
|
||||
:param dict attribs: Source of new attribute values for `ele`
|
||||
:returns: XML element with all attributes overwritten by `attribs`
|
||||
:rtype: :class:`xml.ElementTree.Element`
|
||||
"""
|
||||
ret = copy.copy(ele)
|
||||
ret.attrib = attribs
|
||||
return ret
|
||||
|
||||
|
||||
def _update_inner_xml_ele(ele, list_):
|
||||
"""Copies an XML element, populates sub-elements from `list_`
|
||||
|
||||
Returns a copy of the element with the subelements given via list_
|
||||
:param ele: XML element to be copied, modified
|
||||
:type ele: :class:`xml.ElementTree.Element`
|
||||
:param list list_: List of subelements to append to `ele`
|
||||
:returns: XML element with new subelements from `list_`
|
||||
:rtype: :class:`xml.ElementTree.Element`
|
||||
"""
|
||||
ret = copy.copy(ele)
|
||||
for i, v in enumerate(list_):
|
||||
ret[i] = v
|
||||
return ret
|
||||
|
||||
|
||||
def _check_var_obj_limits(var_obj, fuzz_string):
|
||||
if not var_obj.fuzz:
|
||||
return False
|
||||
if var_obj.fuzz_types:
|
||||
ret = False
|
||||
if "int" in var_obj.fuzz_types:
|
||||
try:
|
||||
int(fuzz_string)
|
||||
ret = True
|
||||
except ValueError:
|
||||
pass
|
||||
if "ascii" in var_obj.fuzz_types:
|
||||
try:
|
||||
fuzz_string.encode('ascii')
|
||||
ret = True
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
if "url" in var_obj.fuzz_types:
|
||||
url_re = r"^[A-Za-z0-9\-\._~:\/\?#[\]@!\$&'()*\+,;=%]+$"
|
||||
if re.match(url_re, fuzz_string):
|
||||
ret = True
|
||||
if "str" in var_obj.fuzz_types:
|
||||
try:
|
||||
str(fuzz_string)
|
||||
ret = True
|
||||
except ValueError:
|
||||
pass
|
||||
if not ret:
|
||||
return ret
|
||||
|
||||
if len(fuzz_string) > var_obj.max_length:
|
||||
return False
|
||||
if len(fuzz_string) < var_obj.min_length:
|
||||
return False
|
||||
return True
|
@ -1,59 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import syntribos
|
||||
from syntribos._i18n import _
|
||||
from syntribos.checks import time_diff as time_diff
|
||||
from syntribos.tests.fuzz import base_fuzz
|
||||
|
||||
|
||||
class IntOverflowBody(base_fuzz.BaseFuzzTestCase):
|
||||
"""Test for integer overflow vulnerabilities in HTTP body."""
|
||||
|
||||
test_name = "INTEGER_OVERFLOW_BODY"
|
||||
parameter_location = "data"
|
||||
data_key = "integer-overflow.txt"
|
||||
|
||||
def test_case(self):
|
||||
self.diff_signals.register(time_diff(self))
|
||||
if "TIME_DIFF_OVER" in self.diff_signals:
|
||||
self.register_issue(
|
||||
defect_type="int_timing",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.LOW,
|
||||
description=(_("The time it took to resolve a request with an "
|
||||
"invalid integer was too long compared to the "
|
||||
"baseline request. This could indicate a "
|
||||
"vulnerability to buffer overflow attacks")))
|
||||
|
||||
|
||||
class IntOverflowParams(IntOverflowBody):
|
||||
"""Test for integer overflow vulnerabilities in HTTP params."""
|
||||
|
||||
test_name = "INTEGER_OVERFLOW_PARAMS"
|
||||
parameter_location = "params"
|
||||
|
||||
|
||||
class IntOverflowHeaders(IntOverflowBody):
|
||||
"""Test for integer overflow vulnerabilities in HTTP header."""
|
||||
|
||||
test_name = "INTEGER_OVERFLOW_HEADERS"
|
||||
parameter_location = "headers"
|
||||
|
||||
|
||||
class IntOverflowURL(IntOverflowBody):
|
||||
"""Test for integer overflow vulnerabilities in HTTP URL."""
|
||||
|
||||
test_name = "INTEGER_OVERFLOW_URL"
|
||||
parameter_location = "url"
|
||||
url_var = "FUZZ"
|
@ -1,63 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import syntribos
|
||||
from syntribos._i18n import _
|
||||
from syntribos.checks import has_string as has_string
|
||||
from syntribos.checks import time_diff as time_diff
|
||||
from syntribos.tests.fuzz import base_fuzz
|
||||
|
||||
|
||||
class JSONDepthOverflowBody(base_fuzz.BaseFuzzTestCase):
|
||||
"""Test for json depth overflow in HTTP body."""
|
||||
|
||||
test_name = "JSON_DEPTH_OVERFLOW_BODY"
|
||||
parameter_location = "data"
|
||||
failure_keys = [
|
||||
"maximum recursion depth exceeded",
|
||||
"RuntimeError",
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def _get_strings(cls, file_name=None):
|
||||
return [
|
||||
'{"id":' * 1000 + '42' + '}' * 1000,
|
||||
'{"id":' * 10000 + '4242' + '}' * 10000
|
||||
]
|
||||
|
||||
def test_case(self):
|
||||
self.run_default_checks()
|
||||
self.test_signals.register(has_string(self))
|
||||
if "FAILURE_KEYS_PRESENT" in self.test_signals:
|
||||
failed_strings = self.test_signals.find(
|
||||
slugs="FAILURE_KEYS_PRESENT")[0].data["failed_strings"]
|
||||
self.register_issue(
|
||||
defect_type="json_depth_limit_strings",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.HIGH,
|
||||
description=(
|
||||
"The string(s): '{0}', is known to be commonly "
|
||||
"returned after a successful overflow of the json"
|
||||
" parsers depth limit. This could possibly "
|
||||
"result in a dos vulnerability.").format(failed_strings))
|
||||
|
||||
self.diff_signals.register(time_diff(self))
|
||||
if "TIME_DIFF_OVER" in self.diff_signals:
|
||||
self.register_issue(
|
||||
defect_type="json_depth_timing",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.LOW,
|
||||
description=(_("The time it took to resolve a request "
|
||||
"was too long compared to the "
|
||||
"baseline request. This could indicate a "
|
||||
"vulnerability to denial of service attacks.")))
|
@ -1,44 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from syntribos.tests.fuzz import base_fuzz
|
||||
|
||||
|
||||
class LDAPInjectionBody(base_fuzz.BaseFuzzTestCase):
|
||||
"""Test for LDAP injection vulnerabilities in HTTP body."""
|
||||
|
||||
test_name = "LDAP_INJECTION_BODY"
|
||||
parameter_location = "data"
|
||||
data_key = "ldap.txt"
|
||||
|
||||
|
||||
class LDAPInjectionParams(LDAPInjectionBody):
|
||||
"""Test for LDAP injection vulnerabilities in HTTP params."""
|
||||
|
||||
test_name = "LDAP_INJECTION_PARAMS"
|
||||
parameter_location = "params"
|
||||
|
||||
|
||||
class LDAPInjectionHeaders(LDAPInjectionBody):
|
||||
"""Test for LDAP injection vulnerabilities in HTTP header."""
|
||||
|
||||
test_name = "LDAP_INJECTION_HEADERS"
|
||||
parameter_location = "headers"
|
||||
|
||||
|
||||
class LDAPInjectionURL(LDAPInjectionBody):
|
||||
"""Test for LDAP injection vulnerabilities in HTTP URL."""
|
||||
|
||||
test_name = "LDAP_INJECTION_URL"
|
||||
parameter_location = "url"
|
||||
url_var = "FUZZ"
|
@ -1,59 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import syntribos
|
||||
from syntribos.checks import time_diff as time_diff
|
||||
from syntribos.tests.fuzz import base_fuzz
|
||||
|
||||
|
||||
class ReDosBody(base_fuzz.BaseFuzzTestCase):
|
||||
"""Test for Regex DoS vulnerabilities in HTTP body."""
|
||||
|
||||
test_name = "REDOS_BODY"
|
||||
parameter_location = "data"
|
||||
data_key = "redos.txt"
|
||||
|
||||
def test_case(self):
|
||||
self.run_default_checks()
|
||||
self.diff_signals.register(time_diff(self))
|
||||
if "TIME_DIFF_OVER" in self.diff_signals:
|
||||
self.register_issue(
|
||||
defect_type="redos_timing",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.LOW,
|
||||
description=("A response to one of our payload requests has "
|
||||
"taken too long compared to the baseline "
|
||||
"request. This could indicate a vulnerability "
|
||||
"to time-based Regex DoS attacks"))
|
||||
|
||||
|
||||
class ReDosParams(ReDosBody):
|
||||
"""Test for Regex DoS vulnerabilities in HTTP params."""
|
||||
|
||||
test_name = "REDOS_PARAMS"
|
||||
parameter_location = "params"
|
||||
|
||||
|
||||
class ReDosHeaders(ReDosBody):
|
||||
"""Test for Regex DoS vulnerabilities in HTTP header."""
|
||||
|
||||
test_name = "REDOS_HEADERS"
|
||||
parameter_location = "headers"
|
||||
|
||||
|
||||
class ReDosURL(ReDosBody):
|
||||
"""Test for Regex DoS vulnerabilities in HTTP URL."""
|
||||
|
||||
test_name = "REDOS_URL"
|
||||
parameter_location = "url"
|
||||
url_var = "FUZZ"
|
@ -1,83 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import syntribos
|
||||
from syntribos._i18n import _
|
||||
from syntribos.checks import has_string as has_string
|
||||
from syntribos.checks import time_diff as time_diff
|
||||
from syntribos.tests.fuzz import base_fuzz
|
||||
|
||||
|
||||
class SQLInjectionBody(base_fuzz.BaseFuzzTestCase):
|
||||
"""Test for SQL injection vulnerabilities in HTTP body."""
|
||||
|
||||
test_name = "SQL_INJECTION_BODY"
|
||||
parameter_location = "data"
|
||||
data_key = "sql-injection.txt"
|
||||
failure_keys = [
|
||||
"SQL syntax", "mysql", "MySqlException (0x", "valid MySQL result",
|
||||
"check the manual that corresponds to your MySQL server version",
|
||||
"MySqlClient.", "com.mysql.jdbc.exceptions", "SQLite/JDBCDriver",
|
||||
"SQLite.Exception", "System.Data.SQLite.SQLiteException", "sqlite_.",
|
||||
"SQLite3::", "[SQLITE_ERROR]", "Unknown column", "where clause",
|
||||
"SqlServer", "syntax error"
|
||||
]
|
||||
|
||||
def test_case(self):
|
||||
self.run_default_checks()
|
||||
self.test_signals.register(has_string(self))
|
||||
if "FAILURE_KEYS_PRESENT" in self.test_signals:
|
||||
failed_strings = self.test_signals.find(
|
||||
slugs="FAILURE_KEYS_PRESENT")[0].data["failed_strings"]
|
||||
self.register_issue(
|
||||
defect_type="sql_strings",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.LOW,
|
||||
description=("The string(s): '{0}', known to be commonly "
|
||||
"returned after a successful SQL injection attack"
|
||||
", have been found in the response. This could "
|
||||
"indicate a vulnerability to SQL injection "
|
||||
"attacks.").format(failed_strings))
|
||||
|
||||
self.diff_signals.register(time_diff(self))
|
||||
if "TIME_DIFF_OVER" in self.diff_signals:
|
||||
self.register_issue(
|
||||
defect_type="sql_timing",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.LOW,
|
||||
description=(_("A response to one of our payload requests has "
|
||||
"taken too long compared to the baseline "
|
||||
"request. This could indicate a vulnerability "
|
||||
"to time-based SQL injection attacks")))
|
||||
|
||||
|
||||
class SQLInjectionParams(SQLInjectionBody):
|
||||
"""Test for SQL injection vulnerabilities in HTTP params."""
|
||||
|
||||
test_name = "SQL_INJECTION_PARAMS"
|
||||
parameter_location = "params"
|
||||
|
||||
|
||||
class SQLInjectionHeaders(SQLInjectionBody):
|
||||
"""Test for SQL injection vulnerabilities in HTTP header."""
|
||||
|
||||
test_name = "SQL_INJECTION_HEADERS"
|
||||
parameter_location = "headers"
|
||||
|
||||
|
||||
class SQLInjectionURL(SQLInjectionBody):
|
||||
"""Test for SQL injection vulnerabilities in HTTP URL."""
|
||||
|
||||
test_name = "SQL_INJECTION_URL"
|
||||
parameter_location = "url"
|
||||
url_var = "FUZZ"
|
@ -1,45 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from syntribos.tests.fuzz import base_fuzz
|
||||
|
||||
|
||||
class StringValidationBody(base_fuzz.BaseFuzzTestCase):
|
||||
"""Test for string validation vulnerabilities in HTTP body."""
|
||||
|
||||
test_name = "STRING_VALIDATION_BODY"
|
||||
parameter_location = "data"
|
||||
data_key = "string_validation.txt"
|
||||
|
||||
|
||||
class StringValidationParams(StringValidationBody):
|
||||
"""Test for string validation vulnerabilities in HTTP params."""
|
||||
|
||||
test_name = "STRING_VALIDATION_PARAMS"
|
||||
parameter_location = "params"
|
||||
|
||||
|
||||
class StringValidationHeaders(StringValidationBody):
|
||||
"""Test for string validation vulnerabilities in HTTP header."""
|
||||
|
||||
test_name = "STRING_VALIDATION_HEADERS"
|
||||
parameter_location = "headers"
|
||||
|
||||
|
||||
class StringValidationURL(StringValidationBody):
|
||||
"""Test for string validation vulnerabilities in HTTP URL."""
|
||||
|
||||
test_name = "STRING_VALIDATION_URL"
|
||||
parameter_location = "url"
|
||||
url_var = "FUZZ"
|
@ -1,114 +0,0 @@
|
||||
# Copyright 2016 Intel
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import os
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
import syntribos
|
||||
from syntribos._i18n import _
|
||||
from syntribos.checks import has_string as has_string
|
||||
from syntribos.checks import time_diff as time_diff
|
||||
from syntribos.tests.fuzz import base_fuzz
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def user_defined_config():
|
||||
"""Create config options for user defined test."""
|
||||
user_defined_group = cfg.OptGroup(
|
||||
name="user_defined", title="Data for user defined test")
|
||||
CONF.register_group(user_defined_group)
|
||||
options = [
|
||||
cfg.StrOpt(
|
||||
"payload", help="Path to a payload data file."), cfg.StrOpt(
|
||||
"failure_keys", help="Possible failure keys")
|
||||
]
|
||||
CONF.register_opts(options, group=user_defined_group)
|
||||
|
||||
|
||||
class UserDefinedVulnBody(base_fuzz.BaseFuzzTestCase):
|
||||
"""Test for user defined vulnerabilities in HTTP body."""
|
||||
|
||||
test_name = "USER_DEFINED_VULN_BODY"
|
||||
parameter_location = "data"
|
||||
user_defined_config()
|
||||
data_key = CONF.user_defined.payload
|
||||
failure_keys = CONF.user_defined.failure_keys
|
||||
|
||||
def test_case(self):
|
||||
self.run_default_checks()
|
||||
self.test_signals.register(has_string(self))
|
||||
if "FAILURE_KEYS_PRESENT" in self.test_signals:
|
||||
failed_strings = self.test_signals.find(
|
||||
slugs="FAILURE_KEYS_PRESENT")[0].data["failed_strings"]
|
||||
self.register_issue(
|
||||
defect_type="user_defined_strings",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.LOW,
|
||||
description=("The string(s): '{0}', is in the list of "
|
||||
"possible vulnerable keys. This may "
|
||||
"indicate a vulnerability to this form of "
|
||||
"user defined attack.").format(failed_strings))
|
||||
|
||||
self.diff_signals.register(time_diff(self))
|
||||
if "TIME_DIFF_OVER" in self.diff_signals:
|
||||
self.register_issue(
|
||||
defect_type="user_defined_string_timing",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.LOW,
|
||||
description=(_("A response to one of the payload requests has "
|
||||
"taken too long compared to the baseline "
|
||||
"request. This could indicate a vulnerability "
|
||||
"to time-based injection attacks using the user"
|
||||
" provided strings.")))
|
||||
|
||||
@classmethod
|
||||
def get_test_cases(cls, filename, file_content, meta_vars):
|
||||
"""Generates test cases if a payload file is provided."""
|
||||
conf_var = CONF.user_defined.payload
|
||||
if conf_var is None or not os.path.isfile(conf_var):
|
||||
return
|
||||
cls.failures = []
|
||||
prefix_name = "{filename}_{test_name}_{fuzz_file}_".format(
|
||||
filename=filename,
|
||||
test_name=cls.test_name,
|
||||
fuzz_file=cls.data_key)
|
||||
fr = syntribos.tests.fuzz.datagen.fuzz_request(
|
||||
cls.init_req, cls._get_strings(), cls.parameter_location,
|
||||
prefix_name)
|
||||
for fuzz_name, request, fuzz_string, param_path in fr:
|
||||
yield cls.extend_class(fuzz_name, fuzz_string, param_path,
|
||||
{"request": request})
|
||||
|
||||
|
||||
class UserDefinedVulnParams(UserDefinedVulnBody):
|
||||
"""Test for user defined vulnerabilities in HTTP params."""
|
||||
|
||||
test_name = "USER_DEFINED_VULN_PARAMS"
|
||||
parameter_location = "params"
|
||||
|
||||
|
||||
class UserDefinedVulnHeaders(UserDefinedVulnBody):
|
||||
"""Test for user defined vulnerabilities in HTTP header."""
|
||||
|
||||
test_name = "USER_DEFINED_VULN_HEADERS"
|
||||
parameter_location = "headers"
|
||||
|
||||
|
||||
class UserDefinedVulnURL(UserDefinedVulnBody):
|
||||
"""Test for user defined vulnerabilities in HTTP URL."""
|
||||
|
||||
test_name = "USER_DEFINED_VULN_URL"
|
||||
parameter_location = "url"
|
||||
url_var = "FUZZ"
|
@ -1,110 +0,0 @@
|
||||
# Copyright 2015 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from oslo_config import cfg
|
||||
|
||||
import syntribos
|
||||
from syntribos.checks import has_string as has_string
|
||||
from syntribos.checks import time_diff as time_diff
|
||||
from syntribos.clients.http import parser
|
||||
from syntribos.tests.fuzz import base_fuzz
|
||||
import syntribos.tests.fuzz.datagen
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class XMLExternalEntityBody(base_fuzz.BaseFuzzTestCase):
|
||||
"""Test for XML-external-entity injection vulnerabilities in HTTP body."""
|
||||
|
||||
test_name = "XML_EXTERNAL_ENTITY_BODY"
|
||||
parameter_location = "data"
|
||||
dtds_data_key = "xml-external.txt"
|
||||
failure_keys = [
|
||||
'root:',
|
||||
'root@',
|
||||
'daemon:',
|
||||
'sys:',
|
||||
'[boot loader]',
|
||||
'[operating systems]',
|
||||
'multi(0)',
|
||||
'disk(0)',
|
||||
'partition']
|
||||
|
||||
@classmethod
|
||||
def get_test_cases(cls, filename, file_content, meta_vars):
|
||||
"""Makes sure API call supports XML
|
||||
|
||||
Overrides parent fuzz test generation, if API method does not support
|
||||
XML, do not generate tests.
|
||||
"""
|
||||
# Send request for different content-types
|
||||
request_obj = parser.create_request(
|
||||
file_content, CONF.syntribos.endpoint, meta_vars)
|
||||
|
||||
prepared_copy = request_obj.get_prepared_copy()
|
||||
prepared_copy.headers['content-type'] = "application/json"
|
||||
prepared_copy_xml = prepared_copy.get_prepared_copy()
|
||||
prepared_copy_xml.headers['content-type'] = "application/xml"
|
||||
|
||||
init_response, init_signals = cls.client.send_request(prepared_copy)
|
||||
_, xml_signals = cls.client.send_request(
|
||||
prepared_copy_xml)
|
||||
|
||||
cls.init_resp = init_response
|
||||
cls.init_signals = init_signals
|
||||
|
||||
if ("HTTP_CONTENT_TYPE_XML" not in init_signals and
|
||||
"HTTP_CONTENT_TYPE_XML" not in xml_signals):
|
||||
return
|
||||
|
||||
# iterate through permutations of doctype declarations and fuzz fields
|
||||
dtds = cls._get_strings(cls.dtds_data_key)
|
||||
for d_num, dtd in enumerate(dtds):
|
||||
prefix_name = "{filename}_{test_name}_{fuzz_file}{d_index}_"
|
||||
prefix_name = prefix_name.format(
|
||||
filename=filename, test_name=cls.test_name,
|
||||
fuzz_file=cls.dtds_data_key, d_index=d_num)
|
||||
fr = syntribos.tests.fuzz.datagen.fuzz_request(
|
||||
request_obj, ["&xxe;"], cls.parameter_location, prefix_name)
|
||||
for fuzz_name, request, fuzz_string, param_path in fr:
|
||||
request.data = "{0}\n{1}".format(dtd, request.data)
|
||||
yield cls.extend_class(fuzz_name, fuzz_string, param_path,
|
||||
{"request": request})
|
||||
|
||||
def test_case(self):
|
||||
self.run_default_checks()
|
||||
self.test_signals.register(has_string(self))
|
||||
if "FAILURE_KEYS_PRESENT" in self.test_signals:
|
||||
failed_strings = self.test_signals.find(
|
||||
slugs="FAILURE_KEYS_PRESENT")[0].data["failed_strings"]
|
||||
self.register_issue(
|
||||
defect_type="xml_strings",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.LOW,
|
||||
description=("The string(s): '{0}', known to be commonly "
|
||||
"returned after a successful XML external entity "
|
||||
"attack, have been found in the response. This "
|
||||
"could indicate a vulnerability to XML external "
|
||||
"entity attacks.").format(failed_strings))
|
||||
|
||||
self.diff_signals.register(time_diff(self))
|
||||
if "TIME_DIFF_OVER" in self.diff_signals:
|
||||
self.register_issue(
|
||||
defect_type="xml_timing",
|
||||
severity=syntribos.MEDIUM,
|
||||
confidence=syntribos.LOW,
|
||||
description=("The time it took to resolve a request with an "
|
||||
"invalid URL in the DTD takes too long compared "
|
||||
"to the baseline request. This could reflect a "
|
||||
"vulnerability to an XML external entity attack.")
|
||||
)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user