opendev: Remove content and leave an URL to the GitHub repository
Change-Id: I82a3238b6a8c21e6bb8056aca22ef71af9ea2538
This commit is contained in:
@@ -1,3 +0,0 @@
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
exclude = 'migrations'
|
||||
@@ -1,17 +0,0 @@
|
||||
# editorconfig.org
|
||||
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.py]
|
||||
indent_size = 4
|
||||
@@ -1,21 +0,0 @@
|
||||
---
|
||||
name: "\U0001F31F Feature or improvement opportunities"
|
||||
about: Suggest an idea that would improve the project.
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- Thank you for taking the time to create this issue. Your feedback is appreciated ! -->
|
||||
<!-- Consider reading the documentation on https://ara.readthedocs.io/en/latest/ and joining us on Slack or IRC: https://ara.recordsansible.org/community/ -->
|
||||
|
||||
|
||||
## What is the idea ?
|
||||
<!--
|
||||
Include relevant information to help the community help you. Some examples:
|
||||
- the component that you are creating this issue about (api server, api client, web ui, ansible plugins, etc.)
|
||||
- linux distribution, version of python, version of ara and ansible
|
||||
- how is ara installed (from source, pypi, in a container, etc.) and how you are running it (database backend, wsgi server, etc.)
|
||||
-->
|
||||
|
||||
26
.github/ISSUE_TEMPLATE/issues_bugs_problems.md
vendored
26
.github/ISSUE_TEMPLATE/issues_bugs_problems.md
vendored
@@ -1,26 +0,0 @@
|
||||
---
|
||||
name: "\U0001F41E Issues, bugs and problems"
|
||||
about: Contribute a report and describe what should have happened.
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- Thank you for taking the time to create this issue. Your feedback is appreciated ! -->
|
||||
<!-- Consider reading the documentation on https://ara.readthedocs.io/en/latest/ and joining us on Slack or IRC: https://ara.recordsansible.org/community/ -->
|
||||
|
||||
|
||||
## What is the issue ?
|
||||
<!--
|
||||
Include relevant information to help the community help you. Some examples:
|
||||
- the component that you are creating this issue about (api server, api client, web ui, ansible plugins, etc.)
|
||||
- linux distribution, version of python, version of ara and ansible
|
||||
- how is ara installed (from source, pypi, in a container, etc.) and how you are running it (database backend, wsgi server, etc.)
|
||||
- debugging logs by setting ARA_DEBUG to True and ARA_LOG_LEVEL to DEBUG
|
||||
- instructions on how to reproduce the issue
|
||||
-->
|
||||
|
||||
|
||||
## What should be happening ?
|
||||
|
||||
103
.gitignore
vendored
103
.gitignore
vendored
@@ -1,103 +0,0 @@
|
||||
# Created by .ignore support plugin (hsz.mobi)
|
||||
### Python template
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
pip-wheel-metadata/
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
reports/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# IPython Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# dotenv
|
||||
.env
|
||||
|
||||
# vscode stuff
|
||||
.vscode
|
||||
|
||||
# virtualenv
|
||||
.venv/
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
db.sqlite3
|
||||
www/
|
||||
data/
|
||||
# Failed playbook integration test files
|
||||
*.retry
|
||||
674
LICENSE
674
LICENSE
@@ -1,674 +0,0 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
{one line to give the program's name and a brief idea of what it does.}
|
||||
Copyright (C) {year} {name of author}
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
{project} Copyright (C) {year} {fullname}
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
||||
205
README.rst
205
README.rst
@@ -1,204 +1,5 @@
|
||||
ARA Records Ansible
|
||||
===================
|
||||
This project has been moved
|
||||
---------------------------
|
||||
|
||||
ARA Records Ansible and makes it easier to understand and troubleshoot.
|
||||
This project's code and code review is now on GitHub: https://github.com/ansible-community/ara
|
||||
|
||||
It's another recursive acronym.
|
||||
|
||||
.. image:: doc/source/_static/ara-with-icon.png
|
||||
|
||||
What it does
|
||||
============
|
||||
|
||||
Simple to install and get started, ara provides reporting by saving detailed and granular results of ``ansible`` and ``ansible-playbook`` commands wherever you run them:
|
||||
|
||||
- by hand or from a script
|
||||
- from a laptop, a desktop, a container or a server
|
||||
- for development, CI or production
|
||||
- from a linux distribution or even on OS X (as long as you have ``python >= 3.5``)
|
||||
- from tools such as AWX or Tower, Jenkins, GitLab CI, Rundeck, Zuul, Molecule, ansible-pull, ansible-test or ansible-runner
|
||||
|
||||
By default, ara's Ansible callback plugin will record data to a local sqlite database without requiring you to run a server or a service:
|
||||
|
||||
.. image:: doc/source/_static/ara-quickstart-default.gif
|
||||
|
||||
ara can also provide a single pane of glass when recording data from multiple locations by pointing the callback plugin to a running API server:
|
||||
|
||||
.. image:: doc/source/_static/ara-quickstart-server.gif
|
||||
|
||||
The data is then made available for browsing, searching and querying over the included reporting interface, a CLI client as well as a REST API.
|
||||
|
||||
How it works
|
||||
============
|
||||
|
||||
ARA Records Ansible execution results to sqlite, mysql or postgresql databases by
|
||||
using an `Ansible callback plugin <https://docs.ansible.com/ansible/latest/plugins/callback.html>`_.
|
||||
|
||||
This callback plugin leverages built-in python API clients to send data to a REST API server:
|
||||
|
||||
.. image:: doc/source/_static/graphs/recording-workflow.png
|
||||
|
||||
What it looks like
|
||||
==================
|
||||
|
||||
API browser
|
||||
-----------
|
||||
|
||||
Included by the API server with django-rest-framework, the API browser allows
|
||||
users to navigate the different API endpoints and query recorded data.
|
||||
|
||||
.. image:: doc/source/_static/ui-api-browser.png
|
||||
|
||||
Reporting interface
|
||||
-------------------
|
||||
|
||||
A simple reporting interface built-in to the API server without any extra
|
||||
dependencies.
|
||||
|
||||
.. image:: doc/source/_static/ui-playbook-details.png
|
||||
|
||||
ara CLI
|
||||
-------
|
||||
|
||||
A built-in CLI client for querying and managing playbooks and their recorded data.
|
||||
|
||||
.. image:: doc/source/_static/cli-playbook-list.png
|
||||
|
||||
The full list of commands, their arguments as well as examples can be found in
|
||||
the `CLI documentation <https://ara.readthedocs.io/en/latest/cli.html#cli-ara-api-client>`_.
|
||||
|
||||
Getting started
|
||||
===============
|
||||
|
||||
Requirements
|
||||
------------
|
||||
|
||||
- Any recent Linux distribution or Mac OS with python >=3.5 available
|
||||
- The ara Ansible plugins must be installed for the same python interpreter as Ansible itself
|
||||
|
||||
For RHEL 7 and CentOS 7 it is recommended to run the API server in a container due to missing or outdated dependencies.
|
||||
See this `issue <https://github.com/ansible-community/ara/issues/99>`_ for more information.
|
||||
|
||||
Recording playbooks without an API server
|
||||
-----------------------------------------
|
||||
|
||||
With defaults and using a local sqlite database:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Install Ansible and ARA (with API server dependencies) for the current user
|
||||
python3 -m pip install --user ansible "ara[server]"
|
||||
|
||||
# Configure Ansible to use the ARA callback plugin
|
||||
export ANSIBLE_CALLBACK_PLUGINS="$(python3 -m ara.setup.callback_plugins)"
|
||||
|
||||
# Run an Ansible playbook
|
||||
ansible-playbook playbook.yaml
|
||||
|
||||
# Use the CLI to see recorded playbooks
|
||||
ara playbook list
|
||||
|
||||
# Start the built-in development server to browse recorded results
|
||||
ara-manage runserver
|
||||
|
||||
Recording playbooks with an API server
|
||||
--------------------------------------
|
||||
|
||||
You can get an API server deployed using the `ara Ansible collection <https://github.com/ansible-community/ara-collection>`_
|
||||
or get started quickly using the container images from `DockerHub <https://hub.docker.com/r/recordsansible/ara-api>`_ and
|
||||
`quay.io <https://quay.io/repository/recordsansible/ara-api>`_:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Create a directory for a volume to store settings and a sqlite database
|
||||
mkdir -p ~/.ara/server
|
||||
|
||||
# Start an API server with podman from the image on DockerHub:
|
||||
podman run --name api-server --detach --tty \
|
||||
--volume ~/.ara/server:/opt/ara:z -p 8000:8000 \
|
||||
docker.io/recordsansible/ara-api:latest
|
||||
|
||||
# or with docker from the image on quay.io:
|
||||
docker run --name api-server --detach --tty \
|
||||
--volume ~/.ara/server:/opt/ara:z -p 8000:8000 \
|
||||
quay.io/recordsansible/ara-api:latest
|
||||
|
||||
Once the server is running, ara's Ansible callback plugin must be installed and configured to send data to it:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Install Ansible and ARA (without API server dependencies) for the current user
|
||||
python3 -m pip install --user ansible ara
|
||||
|
||||
# Configure Ansible to use the ARA callback plugin
|
||||
export ANSIBLE_CALLBACK_PLUGINS="$(python3 -m ara.setup.callback_plugins)"
|
||||
|
||||
# Set up the ARA callback to know where the API server is located
|
||||
export ARA_API_CLIENT="http"
|
||||
export ARA_API_SERVER="http://127.0.0.1:8000"
|
||||
|
||||
# Run an Ansible playbook
|
||||
ansible-playbook playbook.yaml
|
||||
|
||||
# Use the CLI to see recorded playbooks
|
||||
ara playbook list
|
||||
|
||||
Data will be available on the API server in real time as the playbook progresses and completes.
|
||||
|
||||
You can read more about how container images are built and how to run them in the `documentation <https://ara.readthedocs.io/en/latest/container-images.html>`_.
|
||||
|
||||
Live demo
|
||||
=========
|
||||
|
||||
A live demo is deployed with the ara Ansible collection from `Ansible galaxy <https://galaxy.ansible.com/recordsansible/ara>`_.
|
||||
|
||||
It is available at https://demo.recordsansible.org.
|
||||
|
||||
Documentation
|
||||
=============
|
||||
|
||||
Documentation for installing, configuring, running and using ARA is
|
||||
available on `readthedocs.io <https://ara.readthedocs.io>`_.
|
||||
|
||||
Community and getting help
|
||||
==========================
|
||||
|
||||
- Bugs, issues and enhancements: https://github.com/ansible-community/ara/issues
|
||||
- IRC: #ara on `Freenode <https://webchat.freenode.net/?channels=#ara>`_
|
||||
- Slack: https://arecordsansible.slack.com (`invitation link <https://join.slack.com/t/arecordsansible/shared_invite/enQtMjMxNzI4ODAxMDQxLTU2NTU3YjMwYzRlYmRkZTVjZTFiOWIxNjE5NGRhMDQ3ZTgzZmQyZTY2NzY5YmZmNDA5ZWY4YTY1Y2Y1ODBmNzc>`_)
|
||||
|
||||
- Website and blog: https://ara.recordsansible.org
|
||||
- Twitter: https://twitter.com/recordsansible
|
||||
|
||||
Contributing
|
||||
============
|
||||
|
||||
Contributions to the project are welcome and appreciated !
|
||||
|
||||
Get started with the `contributor's documentation <https://ara.readthedocs.io/en/latest/contributing.html>`_.
|
||||
|
||||
Authors
|
||||
=======
|
||||
|
||||
Contributors to the project can be viewed on
|
||||
`GitHub <https://github.com/ansible-community/ara/graphs/contributors>`_.
|
||||
|
||||
Copyright
|
||||
=========
|
||||
|
||||
::
|
||||
|
||||
Copyright (c) 2021 The ARA Records Ansible authors
|
||||
|
||||
ARA Records Ansible is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
ARA Records Ansible is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with ARA Records Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
# Copyright (c) 2019 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.contrib import admin
|
||||
from django.contrib.auth.models import Group
|
||||
|
||||
from ara.api import models
|
||||
|
||||
|
||||
class RecordAdmin(admin.ModelAdmin):
|
||||
list_display = ("id", "key", "value", "type")
|
||||
search_fields = ("key", "value", "type")
|
||||
ordering = ("key",)
|
||||
|
||||
|
||||
admin.site.register(models.Record, RecordAdmin)
|
||||
admin.site.unregister(Group)
|
||||
@@ -1,22 +0,0 @@
|
||||
# Copyright (c) 2019 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
name = "ara.api"
|
||||
@@ -1,26 +0,0 @@
|
||||
# Copyright (c) 2019 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework import permissions
|
||||
|
||||
|
||||
class APIAccessPermission(permissions.BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
if request.method in permissions.SAFE_METHODS:
|
||||
return request.user.is_authenticated if settings.READ_LOGIN_REQUIRED else True
|
||||
return request.user.is_authenticated if settings.WRITE_LOGIN_REQUIRED else True
|
||||
@@ -1,116 +0,0 @@
|
||||
# Copyright (c) 2019 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA Records Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA Records Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA Records Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import collections
|
||||
import hashlib
|
||||
import json
|
||||
import zlib
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from ara.api import models
|
||||
|
||||
# Constants used for defaults which rely on compression so we don't need to
|
||||
# reproduce this code elsewhere.
|
||||
EMPTY_DICT = zlib.compress(json.dumps({}).encode("utf8"))
|
||||
EMPTY_LIST = zlib.compress(json.dumps([]).encode("utf8"))
|
||||
EMPTY_STRING = zlib.compress(json.dumps("").encode("utf8"))
|
||||
|
||||
|
||||
class CompressedTextField(serializers.CharField):
|
||||
"""
|
||||
Compresses text before storing it in the database.
|
||||
Decompresses text from the database before serving it.
|
||||
"""
|
||||
|
||||
def to_representation(self, obj):
|
||||
return zlib.decompress(obj).decode("utf8")
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return zlib.compress(data.encode("utf8"))
|
||||
|
||||
|
||||
class CompressedObjectField(serializers.JSONField):
|
||||
"""
|
||||
Serializes/compresses an object (i.e, list, dict) before storing it in the
|
||||
database.
|
||||
Decompresses/deserializes an object before serving it.
|
||||
"""
|
||||
|
||||
def to_representation(self, obj):
|
||||
return json.loads(zlib.decompress(obj).decode("utf8"))
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return zlib.compress(json.dumps(data).encode("utf8"))
|
||||
|
||||
|
||||
class FileContentField(serializers.CharField):
|
||||
"""
|
||||
Compresses text before storing it in the database.
|
||||
Decompresses text from the database before serving it.
|
||||
"""
|
||||
|
||||
def to_representation(self, obj):
|
||||
return zlib.decompress(obj.contents).decode("utf8")
|
||||
|
||||
def to_internal_value(self, data):
|
||||
contents = data.encode("utf8")
|
||||
sha1 = hashlib.sha1(contents).hexdigest()
|
||||
content_file, created = models.FileContent.objects.get_or_create(
|
||||
sha1=sha1, defaults={"sha1": sha1, "contents": zlib.compress(contents)}
|
||||
)
|
||||
return content_file
|
||||
|
||||
|
||||
class CreatableSlugRelatedField(serializers.SlugRelatedField):
|
||||
"""
|
||||
A SlugRelatedField that supports get_or_create.
|
||||
Used for creating or retrieving labels by name.
|
||||
"""
|
||||
|
||||
def to_representation(self, obj):
|
||||
return {"id": obj.id, "name": obj.name}
|
||||
|
||||
# Overriding RelatedField.to_representation causes error in Browseable API
|
||||
# https://github.com/encode/django-rest-framework/issues/5141
|
||||
def get_choices(self, cutoff=None):
|
||||
queryset = self.get_queryset()
|
||||
if queryset is None:
|
||||
# Ensure that field.choices returns something sensible
|
||||
# even when accessed with a read-only field.
|
||||
return {}
|
||||
|
||||
if cutoff is not None:
|
||||
queryset = queryset[:cutoff]
|
||||
|
||||
return collections.OrderedDict(
|
||||
[
|
||||
(
|
||||
# This is the only line that differs
|
||||
# from the RelatedField's implementation
|
||||
item.pk,
|
||||
self.display_value(item),
|
||||
)
|
||||
for item in queryset
|
||||
]
|
||||
)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
return self.get_queryset().get_or_create(**{self.slug_field: data})[0]
|
||||
except (TypeError, ValueError):
|
||||
self.fail("invalid")
|
||||
@@ -1,215 +0,0 @@
|
||||
# Copyright (c) 2019 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import django_filters
|
||||
from django.db import models as django_models
|
||||
|
||||
from ara.api import models as ara_models
|
||||
|
||||
|
||||
class BaseFilter(django_filters.rest_framework.FilterSet):
|
||||
created_before = django_filters.IsoDateTimeFilter(field_name="created", lookup_expr="lte")
|
||||
created_after = django_filters.IsoDateTimeFilter(field_name="created", lookup_expr="gte")
|
||||
updated_before = django_filters.IsoDateTimeFilter(field_name="updated", lookup_expr="lte")
|
||||
updated_after = django_filters.IsoDateTimeFilter(field_name="updated", lookup_expr="gte")
|
||||
|
||||
# fmt: off
|
||||
filter_overrides = {
|
||||
django_models.DateTimeField: {
|
||||
'filter_class': django_filters.IsoDateTimeFilter
|
||||
},
|
||||
}
|
||||
# fmt: on
|
||||
|
||||
|
||||
class DateFilter(BaseFilter):
|
||||
started_before = django_filters.IsoDateTimeFilter(field_name="started", lookup_expr="lte")
|
||||
started_after = django_filters.IsoDateTimeFilter(field_name="started", lookup_expr="gte")
|
||||
ended_before = django_filters.IsoDateTimeFilter(field_name="ended", lookup_expr="lte")
|
||||
ended_after = django_filters.IsoDateTimeFilter(field_name="ended", lookup_expr="gte")
|
||||
|
||||
|
||||
class LabelFilter(BaseFilter):
|
||||
# fmt: off
|
||||
order = django_filters.OrderingFilter(
|
||||
fields=(
|
||||
("id", "id"),
|
||||
("created", "created"),
|
||||
("updated", "updated")
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class PlaybookFilter(DateFilter):
|
||||
ansible_version = django_filters.CharFilter(field_name="ansible_version", lookup_expr="icontains")
|
||||
controller = django_filters.CharFilter(field_name="controller", lookup_expr="icontains")
|
||||
name = django_filters.CharFilter(field_name="name", lookup_expr="icontains")
|
||||
path = django_filters.CharFilter(field_name="path", lookup_expr="icontains")
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
field_name="status", choices=ara_models.Playbook.STATUS, lookup_expr="iexact"
|
||||
)
|
||||
label = django_filters.CharFilter(field_name="labels", lookup_expr="name__iexact")
|
||||
|
||||
# fmt: off
|
||||
order = django_filters.OrderingFilter(
|
||||
fields=(
|
||||
("id", "id"),
|
||||
("created", "created"),
|
||||
("updated", "updated"),
|
||||
("started", "started"),
|
||||
("ended", "ended"),
|
||||
("duration", "duration"),
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class PlayFilter(DateFilter):
|
||||
playbook = django_filters.NumberFilter(field_name="playbook__id", lookup_expr="exact")
|
||||
uuid = django_filters.UUIDFilter(field_name="uuid", lookup_expr="exact")
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
field_name="status", choices=ara_models.Play.STATUS, lookup_expr="iexact"
|
||||
)
|
||||
name = django_filters.CharFilter(field_name="name", lookup_expr="icontains")
|
||||
|
||||
# fmt: off
|
||||
order = django_filters.OrderingFilter(
|
||||
fields=(
|
||||
("id", "id"),
|
||||
("created", "created"),
|
||||
("updated", "updated"),
|
||||
("started", "started"),
|
||||
("ended", "ended"),
|
||||
("duration", "duration"),
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class TaskFilter(DateFilter):
|
||||
playbook = django_filters.NumberFilter(field_name="playbook__id", lookup_expr="exact")
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
field_name="status", choices=ara_models.Task.STATUS, lookup_expr="iexact"
|
||||
)
|
||||
name = django_filters.CharFilter(field_name="name", lookup_expr="icontains")
|
||||
action = django_filters.CharFilter(field_name="action", lookup_expr="iexact")
|
||||
path = django_filters.CharFilter(field_name="file__path", lookup_expr="icontains")
|
||||
handler = django_filters.BooleanFilter(field_name="handler", lookup_expr="exact")
|
||||
|
||||
# fmt: off
|
||||
order = django_filters.OrderingFilter(
|
||||
fields=(
|
||||
("id", "id"),
|
||||
("created", "created"),
|
||||
("updated", "updated"),
|
||||
("started", "started"),
|
||||
("ended", "ended"),
|
||||
("duration", "duration"),
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class HostFilter(BaseFilter):
|
||||
playbook = django_filters.NumberFilter(field_name="playbook__id", lookup_expr="exact")
|
||||
name = django_filters.CharFilter(field_name="name", lookup_expr="icontains")
|
||||
|
||||
# For example: /api/v1/hosts/failed__gt=0 to return hosts with 1 failure or more
|
||||
changed__gt = django_filters.NumberFilter(field_name="changed", lookup_expr="gt")
|
||||
changed__lt = django_filters.NumberFilter(field_name="changed", lookup_expr="lt")
|
||||
failed__gt = django_filters.NumberFilter(field_name="failed", lookup_expr="gt")
|
||||
failed__lt = django_filters.NumberFilter(field_name="failed", lookup_expr="lt")
|
||||
ok__gt = django_filters.NumberFilter(field_name="ok", lookup_expr="gt")
|
||||
ok__lt = django_filters.NumberFilter(field_name="ok", lookup_expr="lt")
|
||||
skipped__gt = django_filters.NumberFilter(field_name="skipped", lookup_expr="gt")
|
||||
skipped__lt = django_filters.NumberFilter(field_name="skipped", lookup_expr="lt")
|
||||
unreachable__gt = django_filters.NumberFilter(field_name="unreachable", lookup_expr="gt")
|
||||
unreachable__lt = django_filters.NumberFilter(field_name="unreachable", lookup_expr="lt")
|
||||
|
||||
# fmt: off
|
||||
order = django_filters.OrderingFilter(
|
||||
fields=(
|
||||
("id", "id"),
|
||||
("created", "created"),
|
||||
("updated", "updated"),
|
||||
("name", "name"),
|
||||
("changed", "changed"),
|
||||
("failed", "failed"),
|
||||
("ok", "ok"),
|
||||
("skipped", "skipped"),
|
||||
("unreachable", "unreachable"),
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class ResultFilter(DateFilter):
|
||||
playbook = django_filters.NumberFilter(field_name="playbook__id", lookup_expr="exact")
|
||||
task = django_filters.NumberFilter(field_name="task__id", lookup_expr="exact")
|
||||
play = django_filters.NumberFilter(field_name="play__id", lookup_expr="exact")
|
||||
host = django_filters.NumberFilter(field_name="host__id", lookup_expr="exact")
|
||||
changed = django_filters.BooleanFilter(field_name="changed", lookup_expr="exact")
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
field_name="status", choices=ara_models.Result.STATUS, lookup_expr="iexact"
|
||||
)
|
||||
ignore_errors = django_filters.BooleanFilter(field_name="ignore_errors", lookup_expr="exact")
|
||||
|
||||
# fmt: off
|
||||
order = django_filters.OrderingFilter(
|
||||
fields=(
|
||||
("id", "id"),
|
||||
("created", "created"),
|
||||
("updated", "updated"),
|
||||
("started", "started"),
|
||||
("ended", "ended"),
|
||||
("duration", "duration"),
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class FileFilter(BaseFilter):
|
||||
playbook = django_filters.NumberFilter(field_name="playbook__id", lookup_expr="exact")
|
||||
path = django_filters.CharFilter(field_name="path", lookup_expr="icontains")
|
||||
|
||||
# fmt: off
|
||||
order = django_filters.OrderingFilter(
|
||||
fields=(
|
||||
("id", "id"),
|
||||
("created", "created"),
|
||||
("updated", "updated"),
|
||||
("path", "path")
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class RecordFilter(BaseFilter):
|
||||
playbook = django_filters.NumberFilter(field_name="playbook__id", lookup_expr="exact")
|
||||
key = django_filters.CharFilter(field_name="key", lookup_expr="exact")
|
||||
|
||||
# fmt: off
|
||||
order = django_filters.OrderingFilter(
|
||||
fields=(
|
||||
("id", "id"),
|
||||
("created", "created"),
|
||||
("updated", "updated"),
|
||||
("key", "key")
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
@@ -1,99 +0,0 @@
|
||||
import logging
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from ara.clients.utils import get_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Deletes playbooks from the database based on their age"
|
||||
deleted = 0
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--client",
|
||||
type=str,
|
||||
default="offline",
|
||||
help="API client to use for the query: 'offline' or 'http' (default: 'offline')",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--endpoint",
|
||||
type=str,
|
||||
default="http://127.0.0.1:8000",
|
||||
help="API endpoint to use for the query (default: 'http://127.0.0.1:8000')",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--username", type=str, default=None, help="API username to use for the query (default: None)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--password", type=str, default=None, help="API password to use for the query (default: None)"
|
||||
)
|
||||
parser.add_argument("--insecure", action="store_true", help="Disables SSL certificate validation")
|
||||
parser.add_argument("--timeout", type=int, default=10, help="Timeout for API queries (default: 10)")
|
||||
parser.add_argument(
|
||||
"--days", type=int, default=31, help="Delete playbooks started this many days ago (default: 31)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--confirm",
|
||||
action="store_true",
|
||||
help="Confirm deletion of playbooks, otherwise runs without deleting any playbook",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
logger.warn("This command has been replaced by 'ara playbook prune' in 1.5. It will be removed in 1.6.")
|
||||
|
||||
client = options.get("client")
|
||||
endpoint = options.get("endpoint")
|
||||
username = options.get("username")
|
||||
password = options.get("password")
|
||||
insecure = options.get("insecure")
|
||||
timeout = options.get("timeout")
|
||||
days = options.get("days")
|
||||
confirm = options.get("confirm")
|
||||
|
||||
# Get an instance of either an offline or http client with the specified parameters.
|
||||
# When using the offline client, don't run SQL migrations.
|
||||
api_client = get_client(
|
||||
client=client,
|
||||
endpoint=endpoint,
|
||||
username=username,
|
||||
password=password,
|
||||
verify=False if insecure else True,
|
||||
timeout=timeout,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
if not confirm:
|
||||
logger.info("--confirm was not specified, no playbooks will be deleted")
|
||||
|
||||
# generate a timestamp from n days ago in a format we can query the API with
|
||||
# ex: 2019-11-21T00:57:41.702229
|
||||
limit_date = (datetime.now() - timedelta(days=days)).isoformat()
|
||||
|
||||
logger.info("Querying %s/api/v1/playbooks/?started_before=%s" % (endpoint, limit_date))
|
||||
playbooks = api_client.get("/api/v1/playbooks", started_before=limit_date)
|
||||
|
||||
# TODO: Improve client validation and exception handling
|
||||
if "count" not in playbooks:
|
||||
# If we didn't get an answer we can parse, it's probably due to an error 500, 403, 401, etc.
|
||||
# The client would have logged the error.
|
||||
logger.error("Client failed to retrieve results, see logs for ara.clients.offline or ara.clients.http.")
|
||||
sys.exit(1)
|
||||
|
||||
logger.info("Found %s playbooks matching query" % playbooks["count"])
|
||||
|
||||
for playbook in playbooks["results"]:
|
||||
if not confirm:
|
||||
msg = "Dry-run: playbook {id} ({path}) would have been deleted, start date: {started}"
|
||||
logger.info(msg.format(id=playbook["id"], path=playbook["path"], started=playbook["started"]))
|
||||
else:
|
||||
msg = "Deleting playbook {id} ({path}), start date: {started}"
|
||||
logger.info(msg.format(id=playbook["id"], path=playbook["path"], started=playbook["started"]))
|
||||
api_client.delete("/api/v1/playbooks/%s" % playbook["id"])
|
||||
self.deleted += 1
|
||||
|
||||
logger.info("%s playbooks deleted" % self.deleted)
|
||||
@@ -1,192 +0,0 @@
|
||||
# Generated by Django 2.2.1 on 2019-05-17 10:13
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='File',
|
||||
fields=[
|
||||
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('path', models.CharField(max_length=255)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'files',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='FileContent',
|
||||
fields=[
|
||||
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('sha1', models.CharField(max_length=40, unique=True)),
|
||||
('contents', models.BinaryField(max_length=4294967295)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'file_contents',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Host',
|
||||
fields=[
|
||||
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('facts', models.BinaryField(max_length=4294967295)),
|
||||
('alias', models.CharField(max_length=255, null=True)),
|
||||
('changed', models.IntegerField(default=0)),
|
||||
('failed', models.IntegerField(default=0)),
|
||||
('ok', models.IntegerField(default=0)),
|
||||
('skipped', models.IntegerField(default=0)),
|
||||
('unreachable', models.IntegerField(default=0)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'hosts',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Label',
|
||||
fields=[
|
||||
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'labels',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Play',
|
||||
fields=[
|
||||
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('started', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('ended', models.DateTimeField(blank=True, null=True)),
|
||||
('name', models.CharField(blank=True, max_length=255, null=True)),
|
||||
('uuid', models.UUIDField()),
|
||||
('status', models.CharField(choices=[('unknown', 'unknown'), ('running', 'running'), ('completed', 'completed')], default='unknown', max_length=25)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'plays',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Playbook',
|
||||
fields=[
|
||||
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('started', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('ended', models.DateTimeField(blank=True, null=True)),
|
||||
('name', models.CharField(max_length=255, null=True)),
|
||||
('ansible_version', models.CharField(max_length=255)),
|
||||
('status', models.CharField(choices=[('unknown', 'unknown'), ('running', 'running'), ('completed', 'completed'), ('failed', 'failed')], default='unknown', max_length=25)),
|
||||
('arguments', models.BinaryField(max_length=4294967295)),
|
||||
('path', models.CharField(max_length=255)),
|
||||
('labels', models.ManyToManyField(to='api.Label')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'playbooks',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Task',
|
||||
fields=[
|
||||
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('started', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('ended', models.DateTimeField(blank=True, null=True)),
|
||||
('name', models.TextField(blank=True, null=True)),
|
||||
('action', models.TextField()),
|
||||
('lineno', models.IntegerField()),
|
||||
('tags', models.BinaryField(max_length=4294967295)),
|
||||
('handler', models.BooleanField()),
|
||||
('status', models.CharField(choices=[('unknown', 'unknown'), ('running', 'running'), ('completed', 'completed')], default='unknown', max_length=25)),
|
||||
('file', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tasks', to='api.File')),
|
||||
('play', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tasks', to='api.Play')),
|
||||
('playbook', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tasks', to='api.Playbook')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'tasks',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Result',
|
||||
fields=[
|
||||
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('started', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('ended', models.DateTimeField(blank=True, null=True)),
|
||||
('status', models.CharField(choices=[('ok', 'ok'), ('failed', 'failed'), ('skipped', 'skipped'), ('unreachable', 'unreachable'), ('changed', 'changed'), ('ignored', 'ignored'), ('unknown', 'unknown')], default='unknown', max_length=25)),
|
||||
('content', models.BinaryField(max_length=4294967295)),
|
||||
('host', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='results', to='api.Host')),
|
||||
('play', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='results', to='api.Play')),
|
||||
('playbook', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='results', to='api.Playbook')),
|
||||
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='results', to='api.Task')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'results',
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='play',
|
||||
name='playbook',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='plays', to='api.Playbook'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='host',
|
||||
name='playbook',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='hosts', to='api.Playbook'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='file',
|
||||
name='content',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='api.FileContent'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='file',
|
||||
name='playbook',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='api.Playbook'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Record',
|
||||
fields=[
|
||||
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('key', models.CharField(max_length=255)),
|
||||
('value', models.BinaryField(max_length=4294967295)),
|
||||
('type', models.CharField(max_length=255)),
|
||||
('playbook', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='records', to='api.Playbook')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'records',
|
||||
'unique_together': {('key', 'playbook')},
|
||||
},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='host',
|
||||
unique_together={('name', 'playbook')},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='file',
|
||||
unique_together={('path', 'playbook')},
|
||||
),
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
# Generated by Django 2.2.1 on 2019-05-23 17:34
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='host',
|
||||
name='alias',
|
||||
),
|
||||
]
|
||||
@@ -1,23 +0,0 @@
|
||||
# Generated by Django 2.2.1 on 2019-05-30 16:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0002_remove_host_alias'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='result',
|
||||
name='changed',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='result',
|
||||
name='ignore_errors',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,50 +0,0 @@
|
||||
# Generated by Django 2.2.7 on 2019-11-08 16:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def move_to_duration(apps, schema_editor):
|
||||
# We can't import the model directly as it may be a newer
|
||||
# version than this migration expects. We use the historical version.
|
||||
duration_models = ['Playbook', 'Play', 'Task', 'Result']
|
||||
for duration_model in duration_models:
|
||||
model = apps.get_model('api', duration_model)
|
||||
for obj in model.objects.all():
|
||||
if obj.duration is not None:
|
||||
continue
|
||||
if obj.ended is not None:
|
||||
obj.duration = obj.ended - obj.started
|
||||
else:
|
||||
obj.duration = obj.updated - obj.started
|
||||
obj.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0003_add_missing_result_properties'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='play',
|
||||
name='duration',
|
||||
field=models.DurationField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='playbook',
|
||||
name='duration',
|
||||
field=models.DurationField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='result',
|
||||
name='duration',
|
||||
field=models.DurationField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='task',
|
||||
name='duration',
|
||||
field=models.DurationField(blank=True, null=True),
|
||||
),
|
||||
migrations.RunPython(move_to_duration)
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 2.2.9 on 2020-02-03 17:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0004_duration_in_database'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='label',
|
||||
name='name',
|
||||
field=models.CharField(max_length=255, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -1,20 +0,0 @@
|
||||
# Generated by Django 2.2.16 on 2020-09-06 18:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0005_unique_label_names'),
|
||||
]
|
||||
|
||||
# Previously, choices included "ignored" and "changed" but these were never used
|
||||
# See: https://github.com/ansible-community/ara/issues/150
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='result',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('ok', 'ok'), ('failed', 'failed'), ('skipped', 'skipped'), ('unreachable', 'unreachable'), ('unknown', 'unknown')], default='unknown', max_length=25),
|
||||
),
|
||||
]
|
||||
@@ -1,28 +0,0 @@
|
||||
# Generated by Django 2.2.16 on 2020-09-17 12:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0006_remove_result_statuses'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='play',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('unknown', 'unknown'), ('running', 'running'), ('completed', 'completed'), ('expired', 'expired')], default='unknown', max_length=25),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='playbook',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('unknown', 'unknown'), ('expired', 'expired'), ('running', 'running'), ('completed', 'completed'), ('failed', 'failed')], default='unknown', max_length=25),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='task',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('unknown', 'unknown'), ('running', 'running'), ('completed', 'completed'), ('expired', 'expired')], default='unknown', max_length=25),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 2.2.17 on 2020-12-04 04:38
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0007_add_expired_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='playbook',
|
||||
name='controller',
|
||||
field=models.CharField(default='localhost', max_length=255),
|
||||
),
|
||||
]
|
||||
@@ -1,282 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
class Base(models.Model):
|
||||
"""
|
||||
Abstract base model part of every model
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
id = models.BigAutoField(primary_key=True, editable=False)
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
updated = models.DateTimeField(auto_now=True)
|
||||
|
||||
|
||||
class Duration(Base):
|
||||
"""
|
||||
Abstract model for models with a concept of duration
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
started = models.DateTimeField(default=timezone.now)
|
||||
ended = models.DateTimeField(blank=True, null=True)
|
||||
duration = models.DurationField(blank=True, null=True)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# Compute duration based on available timestamps
|
||||
if self.ended is not None:
|
||||
self.duration = self.ended - self.started
|
||||
return super(Duration, self).save(*args, **kwargs)
|
||||
|
||||
|
||||
class Label(Base):
|
||||
"""
|
||||
A label is a generic container meant to group or correlate different
|
||||
playbooks. It could be a single playbook run. It could be a "group" of
|
||||
playbooks.
|
||||
It could represent phases or dynamic logical grouping and tagging of
|
||||
playbook runs.
|
||||
You could have a label named "failures" and make it so failed playbooks
|
||||
are added to this report, for example.
|
||||
The main purpose of this is to make the labels customizable by the user.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
db_table = "labels"
|
||||
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
|
||||
def __str__(self):
|
||||
return "<Label %s: %s>" % (self.id, self.name)
|
||||
|
||||
|
||||
class Playbook(Duration):
|
||||
"""
|
||||
An entry in the 'playbooks' table represents a single execution of the
|
||||
ansible or ansible-playbook commands. All the data for that execution
|
||||
is tied back to this one playbook.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
db_table = "playbooks"
|
||||
|
||||
# A playbook in ARA can be running (in progress), completed (succeeded) or failed.
|
||||
UNKNOWN = "unknown"
|
||||
RUNNING = "running"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
EXPIRED = "expired"
|
||||
STATUS = (
|
||||
(UNKNOWN, "unknown"),
|
||||
(EXPIRED, "expired"),
|
||||
(RUNNING, "running"),
|
||||
(COMPLETED, "completed"),
|
||||
(FAILED, "failed"),
|
||||
)
|
||||
|
||||
name = models.CharField(max_length=255, null=True)
|
||||
ansible_version = models.CharField(max_length=255)
|
||||
status = models.CharField(max_length=25, choices=STATUS, default=UNKNOWN)
|
||||
arguments = models.BinaryField(max_length=(2 ** 32) - 1)
|
||||
path = models.CharField(max_length=255)
|
||||
labels = models.ManyToManyField(Label)
|
||||
controller = models.CharField(max_length=255, default="localhost")
|
||||
|
||||
def __str__(self):
|
||||
return "<Playbook %s>" % self.id
|
||||
|
||||
|
||||
class FileContent(Base):
|
||||
"""
|
||||
Contents of a uniquely stored and compressed file.
|
||||
Running the same playbook twice will yield two playbook files but just
|
||||
one file contents.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
db_table = "file_contents"
|
||||
|
||||
sha1 = models.CharField(max_length=40, unique=True)
|
||||
contents = models.BinaryField(max_length=(2 ** 32) - 1)
|
||||
|
||||
def __str__(self):
|
||||
return "<FileContent %s:%s>" % (self.id, self.sha1)
|
||||
|
||||
|
||||
class File(Base):
|
||||
"""
|
||||
Data about Ansible files (playbooks, tasks, role files, var files, etc).
|
||||
Multiple files can reference the same FileContent record.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
db_table = "files"
|
||||
unique_together = ("path", "playbook")
|
||||
|
||||
path = models.CharField(max_length=255)
|
||||
content = models.ForeignKey(FileContent, on_delete=models.CASCADE, related_name="files")
|
||||
playbook = models.ForeignKey(Playbook, on_delete=models.CASCADE, related_name="files")
|
||||
|
||||
def __str__(self):
|
||||
return "<File %s:%s>" % (self.id, self.path)
|
||||
|
||||
|
||||
class Record(Base):
|
||||
"""
|
||||
A rudimentary key/value table to associate arbitrary data to a playbook.
|
||||
Used with the ara_record and ara_read Ansible modules.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
db_table = "records"
|
||||
unique_together = ("key", "playbook")
|
||||
|
||||
key = models.CharField(max_length=255)
|
||||
value = models.BinaryField(max_length=(2 ** 32) - 1)
|
||||
type = models.CharField(max_length=255)
|
||||
playbook = models.ForeignKey(Playbook, on_delete=models.CASCADE, related_name="records")
|
||||
|
||||
def __str__(self):
|
||||
return "<Record %s:%s>" % (self.id, self.key)
|
||||
|
||||
|
||||
class Play(Duration):
|
||||
"""
|
||||
Data about Ansible plays.
|
||||
Hosts, tasks and results are childrens of an Ansible play.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
db_table = "plays"
|
||||
|
||||
# A play in ARA can be running (in progress) or completed (regardless of success or failure)
|
||||
UNKNOWN = "unknown"
|
||||
RUNNING = "running"
|
||||
COMPLETED = "completed"
|
||||
EXPIRED = "expired"
|
||||
STATUS = ((UNKNOWN, "unknown"), (RUNNING, "running"), (COMPLETED, "completed"), (EXPIRED, "expired"))
|
||||
|
||||
name = models.CharField(max_length=255, blank=True, null=True)
|
||||
uuid = models.UUIDField()
|
||||
status = models.CharField(max_length=25, choices=STATUS, default=UNKNOWN)
|
||||
playbook = models.ForeignKey(Playbook, on_delete=models.CASCADE, related_name="plays")
|
||||
|
||||
def __str__(self):
|
||||
return "<Play %s:%s>" % (self.id, self.name)
|
||||
|
||||
|
||||
class Task(Duration):
|
||||
"""Data about Ansible tasks."""
|
||||
|
||||
class Meta:
|
||||
db_table = "tasks"
|
||||
|
||||
# A task in ARA can be running (in progress) or completed (regardless of success or failure)
|
||||
# Actual task statuses (such as failed, skipped, etc.) are actually in the Results table.
|
||||
UNKNOWN = "unknown"
|
||||
RUNNING = "running"
|
||||
COMPLETED = "completed"
|
||||
EXPIRED = "expired"
|
||||
STATUS = ((UNKNOWN, "unknown"), (RUNNING, "running"), (COMPLETED, "completed"), (EXPIRED, "expired"))
|
||||
|
||||
name = models.TextField(blank=True, null=True)
|
||||
action = models.TextField()
|
||||
lineno = models.IntegerField()
|
||||
tags = models.BinaryField(max_length=(2 ** 32) - 1)
|
||||
handler = models.BooleanField()
|
||||
status = models.CharField(max_length=25, choices=STATUS, default=UNKNOWN)
|
||||
|
||||
play = models.ForeignKey(Play, on_delete=models.CASCADE, related_name="tasks")
|
||||
file = models.ForeignKey(File, on_delete=models.CASCADE, related_name="tasks")
|
||||
playbook = models.ForeignKey(Playbook, on_delete=models.CASCADE, related_name="tasks")
|
||||
|
||||
def __str__(self):
|
||||
return "<Task %s:%s>" % (self.name, self.id)
|
||||
|
||||
|
||||
class Host(Base):
|
||||
"""
|
||||
Data about Ansible hosts.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
db_table = "hosts"
|
||||
unique_together = ("name", "playbook")
|
||||
|
||||
name = models.CharField(max_length=255)
|
||||
facts = models.BinaryField(max_length=(2 ** 32) - 1)
|
||||
|
||||
changed = models.IntegerField(default=0)
|
||||
failed = models.IntegerField(default=0)
|
||||
ok = models.IntegerField(default=0)
|
||||
skipped = models.IntegerField(default=0)
|
||||
unreachable = models.IntegerField(default=0)
|
||||
|
||||
playbook = models.ForeignKey(Playbook, on_delete=models.CASCADE, related_name="hosts")
|
||||
|
||||
def __str__(self):
|
||||
return "<Host %s:%s>" % (self.id, self.name)
|
||||
|
||||
|
||||
class Result(Duration):
|
||||
"""
|
||||
Data about Ansible results.
|
||||
A task can have many results if the task is run on multiple hosts.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
db_table = "results"
|
||||
|
||||
# Ansible statuses
|
||||
OK = "ok"
|
||||
FAILED = "failed"
|
||||
SKIPPED = "skipped"
|
||||
UNREACHABLE = "unreachable"
|
||||
# ARA specific status, it's the default when not specified
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
# fmt:off
|
||||
STATUS = (
|
||||
(OK, "ok"),
|
||||
(FAILED, "failed"),
|
||||
(SKIPPED, "skipped"),
|
||||
(UNREACHABLE, "unreachable"),
|
||||
(UNKNOWN, "unknown"),
|
||||
)
|
||||
# fmt:on
|
||||
|
||||
status = models.CharField(max_length=25, choices=STATUS, default=UNKNOWN)
|
||||
changed = models.BooleanField(default=False)
|
||||
ignore_errors = models.BooleanField(default=False)
|
||||
|
||||
# todo use a single Content table
|
||||
content = models.BinaryField(max_length=(2 ** 32) - 1)
|
||||
host = models.ForeignKey(Host, on_delete=models.CASCADE, related_name="results")
|
||||
task = models.ForeignKey(Task, on_delete=models.CASCADE, related_name="results")
|
||||
play = models.ForeignKey(Play, on_delete=models.CASCADE, related_name="results")
|
||||
playbook = models.ForeignKey(Playbook, on_delete=models.CASCADE, related_name="results")
|
||||
|
||||
def __str__(self):
|
||||
return "<Result %s, %s>" % (self.id, self.status)
|
||||
@@ -1,374 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from ara.api import fields as ara_fields, models
|
||||
|
||||
|
||||
class ResultStatusSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
status = serializers.SerializerMethodField()
|
||||
|
||||
@staticmethod
|
||||
def get_status(obj):
|
||||
if obj.status == "ok" and obj.changed:
|
||||
return "changed"
|
||||
elif obj.status == "failed" and obj.ignore_errors:
|
||||
return "ignored"
|
||||
else:
|
||||
return obj.status
|
||||
|
||||
|
||||
class TaskPathSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
path = serializers.SerializerMethodField()
|
||||
|
||||
@staticmethod
|
||||
def get_path(obj):
|
||||
return obj.file.path
|
||||
|
||||
|
||||
class ItemCountSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
# For counting relationships to other objects
|
||||
items = serializers.SerializerMethodField()
|
||||
|
||||
@staticmethod
|
||||
def get_items(obj):
|
||||
types = ["plays", "tasks", "results", "hosts", "files", "records"]
|
||||
items = {item: getattr(obj, item).count() for item in types if hasattr(obj, item)}
|
||||
return items
|
||||
|
||||
|
||||
class FileSha1Serializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
# For retrieving the sha1 of a file's contents
|
||||
sha1 = serializers.SerializerMethodField()
|
||||
|
||||
@staticmethod
|
||||
def get_sha1(obj):
|
||||
return obj.content.sha1
|
||||
|
||||
|
||||
#######
|
||||
# Simple serializers provide lightweight representations of objects suitable for inclusion in other objects
|
||||
#######
|
||||
|
||||
|
||||
class SimpleLabelSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Label
|
||||
exclude = ("created", "updated")
|
||||
|
||||
|
||||
class SimplePlaybookSerializer(ItemCountSerializer):
|
||||
class Meta:
|
||||
model = models.Playbook
|
||||
exclude = ("arguments", "created", "updated")
|
||||
|
||||
labels = SimpleLabelSerializer(many=True, read_only=True, default=[])
|
||||
|
||||
|
||||
class SimplePlaySerializer(ItemCountSerializer):
|
||||
class Meta:
|
||||
model = models.Play
|
||||
exclude = ("playbook", "uuid", "created", "updated")
|
||||
|
||||
|
||||
class SimpleTaskSerializer(ItemCountSerializer, TaskPathSerializer):
|
||||
class Meta:
|
||||
model = models.Task
|
||||
exclude = ("playbook", "play", "created", "updated")
|
||||
|
||||
tags = ara_fields.CompressedObjectField(read_only=True)
|
||||
|
||||
|
||||
class SimpleHostSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Host
|
||||
exclude = ("playbook", "facts", "created", "updated")
|
||||
|
||||
|
||||
class SimpleFileSerializer(FileSha1Serializer):
|
||||
class Meta:
|
||||
model = models.File
|
||||
exclude = ("playbook", "content", "created", "updated")
|
||||
|
||||
|
||||
#######
|
||||
# Detailed serializers returns every field of an object as well as a simple
|
||||
# representation of relationships to other objects.
|
||||
#######
|
||||
|
||||
|
||||
class DetailedLabelSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Label
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class DetailedPlaybookSerializer(ItemCountSerializer):
|
||||
class Meta:
|
||||
model = models.Playbook
|
||||
fields = "__all__"
|
||||
|
||||
arguments = ara_fields.CompressedObjectField(default=ara_fields.EMPTY_DICT, read_only=True)
|
||||
labels = SimpleLabelSerializer(many=True, read_only=True, default=[])
|
||||
|
||||
|
||||
class DetailedPlaySerializer(ItemCountSerializer):
|
||||
class Meta:
|
||||
model = models.Play
|
||||
fields = "__all__"
|
||||
|
||||
playbook = SimplePlaybookSerializer(read_only=True)
|
||||
|
||||
|
||||
class DetailedTaskSerializer(ItemCountSerializer, TaskPathSerializer):
|
||||
class Meta:
|
||||
model = models.Task
|
||||
fields = "__all__"
|
||||
|
||||
playbook = SimplePlaybookSerializer(read_only=True)
|
||||
play = SimplePlaySerializer(read_only=True)
|
||||
file = SimpleFileSerializer(read_only=True)
|
||||
tags = ara_fields.CompressedObjectField(read_only=True)
|
||||
|
||||
|
||||
class DetailedHostSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Host
|
||||
fields = "__all__"
|
||||
|
||||
playbook = SimplePlaybookSerializer(read_only=True)
|
||||
facts = ara_fields.CompressedObjectField(read_only=True)
|
||||
|
||||
|
||||
class DetailedResultSerializer(ResultStatusSerializer):
|
||||
class Meta:
|
||||
model = models.Result
|
||||
fields = "__all__"
|
||||
|
||||
playbook = SimplePlaybookSerializer(read_only=True)
|
||||
play = SimplePlaySerializer(read_only=True)
|
||||
task = SimpleTaskSerializer(read_only=True)
|
||||
host = SimpleHostSerializer(read_only=True)
|
||||
content = ara_fields.CompressedObjectField(read_only=True)
|
||||
|
||||
|
||||
class DetailedFileSerializer(FileSha1Serializer):
|
||||
class Meta:
|
||||
model = models.File
|
||||
fields = "__all__"
|
||||
|
||||
playbook = SimplePlaybookSerializer(read_only=True)
|
||||
content = ara_fields.FileContentField(read_only=True)
|
||||
|
||||
|
||||
class DetailedRecordSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Record
|
||||
fields = "__all__"
|
||||
|
||||
playbook = SimplePlaybookSerializer(read_only=True)
|
||||
value = ara_fields.CompressedObjectField(read_only=True)
|
||||
|
||||
|
||||
#######
|
||||
# List serializers returns lightweight fields about objects.
|
||||
# Relationships are represented by numerical IDs.
|
||||
#######
|
||||
|
||||
|
||||
class ListLabelSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Label
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ListPlaybookSerializer(ItemCountSerializer):
|
||||
class Meta:
|
||||
model = models.Playbook
|
||||
fields = "__all__"
|
||||
|
||||
arguments = ara_fields.CompressedObjectField(default=ara_fields.EMPTY_DICT, read_only=True)
|
||||
labels = SimpleLabelSerializer(many=True, read_only=True, default=[])
|
||||
|
||||
|
||||
class ListPlaySerializer(ItemCountSerializer):
|
||||
class Meta:
|
||||
model = models.Play
|
||||
fields = "__all__"
|
||||
|
||||
playbook = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
|
||||
|
||||
class ListTaskSerializer(ItemCountSerializer, TaskPathSerializer):
|
||||
class Meta:
|
||||
model = models.Task
|
||||
fields = "__all__"
|
||||
|
||||
tags = ara_fields.CompressedObjectField(read_only=True)
|
||||
play = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
|
||||
|
||||
class ListHostSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Host
|
||||
exclude = ("facts",)
|
||||
|
||||
playbook = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
|
||||
|
||||
class ListResultSerializer(ResultStatusSerializer):
|
||||
class Meta:
|
||||
model = models.Result
|
||||
exclude = ("content",)
|
||||
|
||||
playbook = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
play = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
task = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
host = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
|
||||
|
||||
class ListFileSerializer(FileSha1Serializer):
|
||||
class Meta:
|
||||
model = models.File
|
||||
exclude = ("content",)
|
||||
|
||||
playbook = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
|
||||
|
||||
class ListRecordSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Record
|
||||
exclude = ("value",)
|
||||
|
||||
playbook = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
|
||||
|
||||
#######
|
||||
# Default serializers represents objects as they are modelized in the database.
|
||||
# They are used for creating/updating/destroying objects.
|
||||
#######
|
||||
|
||||
|
||||
class LabelSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Label
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class PlaybookSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Playbook
|
||||
fields = "__all__"
|
||||
|
||||
arguments = ara_fields.CompressedObjectField(default=ara_fields.EMPTY_DICT)
|
||||
labels = ara_fields.CreatableSlugRelatedField(
|
||||
many=True, slug_field="name", queryset=models.Label.objects.all(), required=False
|
||||
)
|
||||
|
||||
|
||||
class PlaySerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Play
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class TaskSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Task
|
||||
fields = "__all__"
|
||||
|
||||
tags = ara_fields.CompressedObjectField(default=ara_fields.EMPTY_LIST, help_text="A list containing Ansible tags")
|
||||
|
||||
|
||||
class HostSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Host
|
||||
fields = "__all__"
|
||||
|
||||
facts = ara_fields.CompressedObjectField(default=ara_fields.EMPTY_DICT)
|
||||
|
||||
def get_unique_together_validators(self):
|
||||
"""
|
||||
Hosts have a "unique together" constraint for host.name and play.id.
|
||||
We want to have a "get_or_create" facility and in order to do that, we
|
||||
must manage the validation during the creation, not before.
|
||||
Overriding this method effectively disables this validator.
|
||||
"""
|
||||
return []
|
||||
|
||||
def create(self, validated_data):
|
||||
host, created = models.Host.objects.get_or_create(
|
||||
name=validated_data["name"], playbook=validated_data["playbook"], defaults=validated_data
|
||||
)
|
||||
return host
|
||||
|
||||
|
||||
class ResultSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Result
|
||||
fields = "__all__"
|
||||
|
||||
content = ara_fields.CompressedObjectField(default=ara_fields.EMPTY_DICT)
|
||||
|
||||
|
||||
class FileSerializer(FileSha1Serializer):
|
||||
class Meta:
|
||||
model = models.File
|
||||
fields = "__all__"
|
||||
|
||||
content = ara_fields.FileContentField()
|
||||
|
||||
def get_unique_together_validators(self):
|
||||
"""
|
||||
Files have a "unique together" constraint for file.path and playbook.id.
|
||||
We want to have a "get_or_create" facility and in order to do that, we
|
||||
must manage the validation during the creation, not before.
|
||||
Overriding this method effectively disables this validator.
|
||||
"""
|
||||
return []
|
||||
|
||||
def create(self, validated_data):
|
||||
file_, created = models.File.objects.get_or_create(
|
||||
path=validated_data["path"],
|
||||
content=validated_data["content"],
|
||||
playbook=validated_data["playbook"],
|
||||
defaults=validated_data,
|
||||
)
|
||||
return file_
|
||||
|
||||
|
||||
class RecordSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Record
|
||||
fields = "__all__"
|
||||
|
||||
value = ara_fields.CompressedObjectField(
|
||||
default=ara_fields.EMPTY_STRING, help_text="A string, list, dict, json or other formatted data"
|
||||
)
|
||||
@@ -1,139 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import logging
|
||||
|
||||
import factory
|
||||
|
||||
try:
|
||||
from factory import DjangoModelFactory
|
||||
except ImportError:
|
||||
# >3.0 moved the location of DjangoModelFactory
|
||||
from factory.django import DjangoModelFactory
|
||||
|
||||
from ara.api import models
|
||||
from ara.api.tests import utils
|
||||
|
||||
logging.getLogger("factory").setLevel(logging.INFO)
|
||||
|
||||
# constants for things like compressed byte strings or objects
|
||||
FILE_CONTENTS = "---\n# Example file"
|
||||
HOST_FACTS = {"ansible_fqdn": "hostname", "ansible_distribution": "CentOS"}
|
||||
PLAYBOOK_ARGUMENTS = {"ansible_version": "2.5.5", "inventory": "/etc/ansible/hosts"}
|
||||
RESULT_CONTENTS = {"results": [{"msg": "something happened"}]}
|
||||
TASK_TAGS = ["always", "never"]
|
||||
RECORD_LIST = ["one", "two", "three"]
|
||||
|
||||
|
||||
class PlaybookFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = models.Playbook
|
||||
|
||||
controller = "localhost"
|
||||
name = "test-playbook"
|
||||
ansible_version = "2.4.0"
|
||||
status = "running"
|
||||
arguments = utils.compressed_obj(PLAYBOOK_ARGUMENTS)
|
||||
path = "/path/playbook.yml"
|
||||
|
||||
|
||||
class FileContentFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = models.FileContent
|
||||
django_get_or_create = ("sha1",)
|
||||
|
||||
sha1 = utils.sha1(FILE_CONTENTS)
|
||||
contents = utils.compressed_str(FILE_CONTENTS)
|
||||
|
||||
|
||||
class FileFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = models.File
|
||||
|
||||
path = "/path/playbook.yml"
|
||||
content = factory.SubFactory(FileContentFactory)
|
||||
playbook = factory.SubFactory(PlaybookFactory)
|
||||
|
||||
|
||||
class LabelFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = models.Label
|
||||
|
||||
name = "test label"
|
||||
|
||||
|
||||
class PlayFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = models.Play
|
||||
|
||||
name = "test play"
|
||||
status = "running"
|
||||
uuid = "5c5f67b9-e63c-6297-80da-000000000005"
|
||||
playbook = factory.SubFactory(PlaybookFactory)
|
||||
|
||||
|
||||
class TaskFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = models.Task
|
||||
|
||||
name = "test task"
|
||||
status = "running"
|
||||
action = "setup"
|
||||
lineno = 2
|
||||
handler = False
|
||||
tags = utils.compressed_obj(TASK_TAGS)
|
||||
play = factory.SubFactory(PlayFactory)
|
||||
file = factory.SubFactory(FileFactory)
|
||||
playbook = factory.SubFactory(PlaybookFactory)
|
||||
|
||||
|
||||
class HostFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = models.Host
|
||||
|
||||
facts = utils.compressed_obj(HOST_FACTS)
|
||||
name = "hostname"
|
||||
playbook = factory.SubFactory(PlaybookFactory)
|
||||
changed = 0
|
||||
failed = 0
|
||||
ok = 0
|
||||
skipped = 0
|
||||
unreachable = 0
|
||||
|
||||
|
||||
class ResultFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = models.Result
|
||||
|
||||
content = utils.compressed_obj(RESULT_CONTENTS)
|
||||
status = "ok"
|
||||
host = factory.SubFactory(HostFactory)
|
||||
task = factory.SubFactory(TaskFactory)
|
||||
play = factory.SubFactory(PlayFactory)
|
||||
playbook = factory.SubFactory(PlaybookFactory)
|
||||
changed = False
|
||||
ignore_errors = False
|
||||
|
||||
|
||||
class RecordFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = models.Record
|
||||
|
||||
key = "record-key"
|
||||
value = utils.compressed_obj(RECORD_LIST)
|
||||
type = "list"
|
||||
playbook = factory.SubFactory(PlaybookFactory)
|
||||
@@ -1,74 +0,0 @@
|
||||
# Copyright (c) 2019 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.conf import settings
|
||||
from django.test import RequestFactory, TestCase, override_settings
|
||||
|
||||
from ara.api.auth import APIAccessPermission
|
||||
|
||||
|
||||
class User:
|
||||
is_authenticated = True
|
||||
|
||||
|
||||
class AnonymousUser(User):
|
||||
is_authenticated = False
|
||||
|
||||
|
||||
class PermissionBackendTestCase(TestCase):
|
||||
def setUp(self):
|
||||
factory = RequestFactory()
|
||||
self.anon_get_request = factory.get("/")
|
||||
self.anon_get_request.user = AnonymousUser()
|
||||
self.anon_post_request = factory.post("/")
|
||||
self.anon_post_request.user = AnonymousUser()
|
||||
|
||||
self.authed_get_request = factory.get("/")
|
||||
self.authed_get_request.user = User()
|
||||
self.authed_post_request = factory.post("/")
|
||||
self.authed_post_request.user = User()
|
||||
|
||||
@override_settings(READ_LOGIN_REQUIRED=False, WRITE_LOGIN_REQUIRED=True)
|
||||
def test_anonymous_read_access(self):
|
||||
backend = APIAccessPermission()
|
||||
|
||||
# Writes are blocked (just to show it has no affect on read)
|
||||
self.assertFalse(backend.has_permission(self.anon_post_request, None))
|
||||
|
||||
# Reads are allowed based on READ_LOGIN_REQUIRED
|
||||
self.assertTrue(backend.has_permission(self.anon_get_request, None))
|
||||
settings.READ_LOGIN_REQUIRED = True
|
||||
self.assertFalse(backend.has_permission(self.anon_get_request, None))
|
||||
|
||||
@override_settings(READ_LOGIN_REQUIRED=True, WRITE_LOGIN_REQUIRED=False)
|
||||
def test_anonymous_write_access(self):
|
||||
backend = APIAccessPermission()
|
||||
|
||||
# Reads are blocked (just to show it has no affect on write)
|
||||
self.assertFalse(backend.has_permission(self.anon_get_request, None))
|
||||
|
||||
# Writes are allowed based on WRITE_LOGIN_REQUIRED
|
||||
self.assertTrue(backend.has_permission(self.anon_post_request, None))
|
||||
settings.WRITE_LOGIN_REQUIRED = True
|
||||
self.assertFalse(backend.has_permission(self.anon_post_request, None))
|
||||
|
||||
@override_settings(READ_LOGIN_REQUIRED=True, WRITE_LOGIN_REQUIRED=True)
|
||||
def test_auth_access(self):
|
||||
backend = APIAccessPermission()
|
||||
|
||||
self.assertTrue(backend.has_permission(self.authed_get_request, None))
|
||||
self.assertTrue(backend.has_permission(self.authed_post_request, None))
|
||||
@@ -1,180 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from ara.api import models, serializers
|
||||
from ara.api.tests import factories, utils
|
||||
|
||||
|
||||
class FileTestCase(APITestCase):
|
||||
def test_file_factory(self):
|
||||
file_content = factories.FileContentFactory()
|
||||
file = factories.FileFactory(path="/path/playbook.yml", content=file_content)
|
||||
self.assertEqual(file.path, "/path/playbook.yml")
|
||||
self.assertEqual(file.content.sha1, file_content.sha1)
|
||||
|
||||
def test_file_serializer(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
serializer = serializers.FileSerializer(
|
||||
data={"path": "/path/playbook.yml", "content": factories.FILE_CONTENTS, "playbook": playbook.id}
|
||||
)
|
||||
serializer.is_valid()
|
||||
file = serializer.save()
|
||||
file.refresh_from_db()
|
||||
self.assertEqual(file.content.sha1, utils.sha1(factories.FILE_CONTENTS))
|
||||
|
||||
def test_create_file_with_same_content_create_only_one_file_content(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
serializer = serializers.FileSerializer(
|
||||
data={"path": "/path/1/playbook.yml", "content": factories.FILE_CONTENTS, "playbook": playbook.id}
|
||||
)
|
||||
serializer.is_valid()
|
||||
file_content = serializer.save()
|
||||
file_content.refresh_from_db()
|
||||
|
||||
serializer2 = serializers.FileSerializer(
|
||||
data={"path": "/path/2/playbook.yml", "content": factories.FILE_CONTENTS, "playbook": playbook.id}
|
||||
)
|
||||
serializer2.is_valid()
|
||||
file_content = serializer2.save()
|
||||
file_content.refresh_from_db()
|
||||
|
||||
self.assertEqual(2, models.File.objects.all().count())
|
||||
self.assertEqual(1, models.FileContent.objects.all().count())
|
||||
|
||||
def test_create_file(self):
|
||||
self.assertEqual(0, models.File.objects.count())
|
||||
playbook = factories.PlaybookFactory()
|
||||
request = self.client.post(
|
||||
"/api/v1/files", {"path": "/path/playbook.yml", "content": factories.FILE_CONTENTS, "playbook": playbook.id}
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.File.objects.count())
|
||||
|
||||
def test_post_same_file_for_a_playbook(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertEqual(0, models.File.objects.count())
|
||||
request = self.client.post(
|
||||
"/api/v1/files", {"path": "/path/playbook.yml", "content": factories.FILE_CONTENTS, "playbook": playbook.id}
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.File.objects.count())
|
||||
|
||||
request = self.client.post(
|
||||
"/api/v1/files", {"path": "/path/playbook.yml", "content": factories.FILE_CONTENTS, "playbook": playbook.id}
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.File.objects.count())
|
||||
|
||||
def test_get_no_files(self):
|
||||
request = self.client.get("/api/v1/files")
|
||||
self.assertEqual(0, len(request.data["results"]))
|
||||
|
||||
def test_get_files(self):
|
||||
file = factories.FileFactory()
|
||||
request = self.client.get("/api/v1/files")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(file.path, request.data["results"][0]["path"])
|
||||
|
||||
def test_get_file(self):
|
||||
file = factories.FileFactory()
|
||||
request = self.client.get("/api/v1/files/%s" % file.id)
|
||||
self.assertEqual(file.path, request.data["path"])
|
||||
self.assertEqual(file.content.sha1, request.data["sha1"])
|
||||
|
||||
def test_update_file(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
file = factories.FileFactory(playbook=playbook)
|
||||
old_sha1 = file.content.sha1
|
||||
self.assertNotEqual("/path/new_playbook.yml", file.path)
|
||||
request = self.client.put(
|
||||
"/api/v1/files/%s" % file.id,
|
||||
{"path": "/path/new_playbook.yml", "content": "# playbook", "playbook": playbook.id},
|
||||
)
|
||||
self.assertEqual(200, request.status_code)
|
||||
file_updated = models.File.objects.get(id=file.id)
|
||||
self.assertEqual("/path/new_playbook.yml", file_updated.path)
|
||||
self.assertNotEqual(old_sha1, file_updated.content.sha1)
|
||||
|
||||
def test_partial_update_file(self):
|
||||
file = factories.FileFactory()
|
||||
self.assertNotEqual("/path/new_playbook.yml", file.path)
|
||||
request = self.client.patch("/api/v1/files/%s" % file.id, {"path": "/path/new_playbook.yml"})
|
||||
self.assertEqual(200, request.status_code)
|
||||
file_updated = models.File.objects.get(id=file.id)
|
||||
self.assertEqual("/path/new_playbook.yml", file_updated.path)
|
||||
|
||||
def test_delete_file(self):
|
||||
file = factories.FileFactory()
|
||||
self.assertEqual(1, models.File.objects.all().count())
|
||||
request = self.client.delete("/api/v1/files/%s" % file.id)
|
||||
self.assertEqual(204, request.status_code)
|
||||
self.assertEqual(0, models.File.objects.all().count())
|
||||
|
||||
def test_get_file_by_date(self):
|
||||
file = factories.FileFactory()
|
||||
|
||||
past = datetime.datetime.now() - datetime.timedelta(hours=12)
|
||||
negative_date_fields = ["created_before", "updated_before"]
|
||||
positive_date_fields = ["created_after", "updated_after"]
|
||||
|
||||
# Expect no file when searching before it was created
|
||||
for field in negative_date_fields:
|
||||
request = self.client.get("/api/v1/files?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 0)
|
||||
|
||||
# Expect a file when searching after it was created
|
||||
for field in positive_date_fields:
|
||||
request = self.client.get("/api/v1/files?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 1)
|
||||
self.assertEqual(request.data["results"][0]["id"], file.id)
|
||||
|
||||
def test_get_file_order(self):
|
||||
first_file = factories.FileFactory(path="/root/file.yaml")
|
||||
second_file = factories.FileFactory(path="/root/some/path/file.yaml")
|
||||
|
||||
# Ensure we have two objects
|
||||
request = self.client.get("/api/v1/files")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
order_fields = ["id", "created", "updated", "path"]
|
||||
# Ascending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/files?order=%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], first_file.id)
|
||||
|
||||
# Descending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/files?order=-%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], second_file.id)
|
||||
|
||||
def test_get_file_by_path(self):
|
||||
# Create two files with similar paths
|
||||
first_file = factories.FileFactory(path="/root/file.yaml")
|
||||
factories.FileFactory(path="/root/some/path/file.yaml")
|
||||
|
||||
# Exact search should match one
|
||||
request = self.client.get("/api/v1/files?path=/root/file.yaml")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(first_file.path, request.data["results"][0]["path"])
|
||||
|
||||
# Partial match should match both files
|
||||
request = self.client.get("/api/v1/files?path=file.yaml")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
@@ -1,26 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from ara.api.tests import factories
|
||||
|
||||
|
||||
class FileContentTestCase(APITestCase):
|
||||
def test_file_content_factory(self):
|
||||
file_content = factories.FileContentFactory(sha1="413a2f16b8689267b7d0c2e10cdd19bf3e54208d")
|
||||
self.assertEqual(file_content.sha1, "413a2f16b8689267b7d0c2e10cdd19bf3e54208d")
|
||||
@@ -1,184 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from ara.api import models, serializers
|
||||
from ara.api.tests import factories, utils
|
||||
|
||||
|
||||
class HostTestCase(APITestCase):
|
||||
def test_host_factory(self):
|
||||
host = factories.HostFactory(name="testhost")
|
||||
self.assertEqual(host.name, "testhost")
|
||||
|
||||
def test_host_serializer(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
serializer = serializers.HostSerializer(data={"name": "serializer", "playbook": playbook.id})
|
||||
serializer.is_valid()
|
||||
host = serializer.save()
|
||||
host.refresh_from_db()
|
||||
self.assertEqual(host.name, "serializer")
|
||||
self.assertEqual(host.playbook.id, playbook.id)
|
||||
|
||||
def test_host_serializer_compress_facts(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
serializer = serializers.HostSerializer(
|
||||
data={"name": "compress", "facts": factories.HOST_FACTS, "playbook": playbook.id}
|
||||
)
|
||||
serializer.is_valid()
|
||||
host = serializer.save()
|
||||
host.refresh_from_db()
|
||||
self.assertEqual(host.facts, utils.compressed_obj(factories.HOST_FACTS))
|
||||
|
||||
def test_host_serializer_decompress_facts(self):
|
||||
host = factories.HostFactory(facts=utils.compressed_obj(factories.HOST_FACTS))
|
||||
serializer = serializers.HostSerializer(instance=host)
|
||||
self.assertEqual(serializer.data["facts"], factories.HOST_FACTS)
|
||||
|
||||
def test_get_no_hosts(self):
|
||||
request = self.client.get("/api/v1/hosts")
|
||||
self.assertEqual(0, len(request.data["results"]))
|
||||
|
||||
def test_get_hosts(self):
|
||||
host = factories.HostFactory()
|
||||
request = self.client.get("/api/v1/hosts")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(host.name, request.data["results"][0]["name"])
|
||||
|
||||
def test_delete_host(self):
|
||||
host = factories.HostFactory()
|
||||
self.assertEqual(1, models.Host.objects.all().count())
|
||||
request = self.client.delete("/api/v1/hosts/%s" % host.id)
|
||||
self.assertEqual(204, request.status_code)
|
||||
self.assertEqual(0, models.Host.objects.all().count())
|
||||
|
||||
def test_create_host(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertEqual(0, models.Host.objects.count())
|
||||
request = self.client.post("/api/v1/hosts", {"name": "create", "playbook": playbook.id})
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Host.objects.count())
|
||||
|
||||
def test_post_same_host_for_a_playbook(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertEqual(0, models.Host.objects.count())
|
||||
request = self.client.post("/api/v1/hosts", {"name": "create", "playbook": playbook.id})
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Host.objects.count())
|
||||
|
||||
request = self.client.post("/api/v1/hosts", {"name": "create", "playbook": playbook.id})
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Host.objects.count())
|
||||
|
||||
def test_partial_update_host(self):
|
||||
host = factories.HostFactory()
|
||||
self.assertNotEqual("foo", host.name)
|
||||
request = self.client.patch("/api/v1/hosts/%s" % host.id, {"name": "foo"})
|
||||
self.assertEqual(200, request.status_code)
|
||||
host_updated = models.Host.objects.get(id=host.id)
|
||||
self.assertEqual("foo", host_updated.name)
|
||||
|
||||
def test_get_host(self):
|
||||
host = factories.HostFactory()
|
||||
request = self.client.get("/api/v1/hosts/%s" % host.id)
|
||||
self.assertEqual(host.name, request.data["name"])
|
||||
|
||||
def test_get_hosts_by_playbook(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
host = factories.HostFactory(name="host1", playbook=playbook)
|
||||
factories.HostFactory(name="host2", playbook=playbook)
|
||||
request = self.client.get("/api/v1/hosts?playbook=%s" % playbook.id)
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
self.assertEqual(host.name, request.data["results"][0]["name"])
|
||||
self.assertEqual("host2", request.data["results"][1]["name"])
|
||||
|
||||
def test_get_hosts_by_name(self):
|
||||
# Create a playbook and two hosts
|
||||
playbook = factories.PlaybookFactory()
|
||||
host = factories.HostFactory(name="host1", playbook=playbook)
|
||||
factories.HostFactory(name="host2", playbook=playbook)
|
||||
|
||||
# Query for the first host name and expect one result
|
||||
request = self.client.get("/api/v1/hosts?name=%s" % host.name)
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(host.name, request.data["results"][0]["name"])
|
||||
|
||||
def test_get_hosts_by_stats(self):
|
||||
# Create two hosts with different stats
|
||||
first_host = factories.HostFactory(name="first_host", changed=2, failed=2, ok=2, skipped=2, unreachable=2)
|
||||
second_host = factories.HostFactory(name="second_host", changed=0, failed=0, ok=0, skipped=0, unreachable=0)
|
||||
|
||||
# There must be two distinct hosts
|
||||
request = self.client.get("/api/v1/hosts")
|
||||
self.assertEqual(2, request.data["count"])
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
statuses = ["changed", "failed", "ok", "skipped", "unreachable"]
|
||||
|
||||
# Searching for > should only return the first host
|
||||
for status in statuses:
|
||||
request = self.client.get("/api/v1/hosts?%s__gt=1" % status)
|
||||
self.assertEqual(1, request.data["count"])
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(first_host.id, request.data["results"][0]["id"])
|
||||
|
||||
# Searching for < should only return the second host
|
||||
for status in statuses:
|
||||
request = self.client.get("/api/v1/hosts?%s__lt=1" % status)
|
||||
self.assertEqual(1, request.data["count"])
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(second_host.id, request.data["results"][0]["id"])
|
||||
|
||||
def test_get_host_by_date(self):
|
||||
host = factories.HostFactory()
|
||||
|
||||
past = datetime.datetime.now() - datetime.timedelta(hours=12)
|
||||
negative_date_fields = ["created_before", "updated_before"]
|
||||
positive_date_fields = ["created_after", "updated_after"]
|
||||
|
||||
# Expect no host when searching before it was created
|
||||
for field in negative_date_fields:
|
||||
request = self.client.get("/api/v1/hosts?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 0)
|
||||
|
||||
# Expect a host when searching after it was created
|
||||
for field in positive_date_fields:
|
||||
request = self.client.get("/api/v1/hosts?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 1)
|
||||
self.assertEqual(request.data["results"][0]["id"], host.id)
|
||||
|
||||
def test_get_host_order(self):
|
||||
first_host = factories.HostFactory(name="alpha")
|
||||
second_host = factories.HostFactory(name="beta", changed=10, failed=10, ok=10, skipped=10, unreachable=10)
|
||||
|
||||
# Ensure we have two objects
|
||||
request = self.client.get("/api/v1/hosts")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
order_fields = ["id", "created", "updated", "name", "changed", "failed", "ok", "skipped", "unreachable"]
|
||||
# Ascending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/hosts?order=%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], first_host.id)
|
||||
|
||||
# Descending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/hosts?order=-%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], second_host.id)
|
||||
@@ -1,117 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
|
||||
from django.db.utils import IntegrityError
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from ara.api import models, serializers
|
||||
from ara.api.tests import factories
|
||||
|
||||
|
||||
class LabelTestCase(APITestCase):
|
||||
def test_label_factory(self):
|
||||
label = factories.LabelFactory(name="factory")
|
||||
self.assertEqual(label.name, "factory")
|
||||
|
||||
def test_label_serializer(self):
|
||||
serializer = serializers.LabelSerializer(data={"name": "serializer"})
|
||||
serializer.is_valid()
|
||||
label = serializer.save()
|
||||
label.refresh_from_db()
|
||||
self.assertEqual(label.name, "serializer")
|
||||
|
||||
def test_create_label(self):
|
||||
self.assertEqual(0, models.Label.objects.count())
|
||||
request = self.client.post("/api/v1/labels", {"name": "compress"})
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Label.objects.count())
|
||||
|
||||
def test_get_no_labels(self):
|
||||
request = self.client.get("/api/v1/labels")
|
||||
self.assertEqual(0, len(request.data["results"]))
|
||||
|
||||
def test_get_labels(self):
|
||||
label = factories.LabelFactory()
|
||||
request = self.client.get("/api/v1/labels")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(label.name, request.data["results"][0]["name"])
|
||||
|
||||
def test_get_label(self):
|
||||
label = factories.LabelFactory()
|
||||
request = self.client.get("/api/v1/labels/%s" % label.id)
|
||||
self.assertEqual(label.name, request.data["name"])
|
||||
|
||||
def test_partial_update_label(self):
|
||||
label = factories.LabelFactory()
|
||||
self.assertNotEqual("updated", label.name)
|
||||
request = self.client.patch("/api/v1/labels/%s" % label.id, {"name": "updated"})
|
||||
self.assertEqual(200, request.status_code)
|
||||
label_updated = models.Label.objects.get(id=label.id)
|
||||
self.assertEqual("updated", label_updated.name)
|
||||
|
||||
def test_delete_label(self):
|
||||
label = factories.LabelFactory()
|
||||
self.assertEqual(1, models.Label.objects.all().count())
|
||||
request = self.client.delete("/api/v1/labels/%s" % label.id)
|
||||
self.assertEqual(204, request.status_code)
|
||||
self.assertEqual(0, models.Label.objects.all().count())
|
||||
|
||||
def test_get_label_by_date(self):
|
||||
label = factories.LabelFactory()
|
||||
|
||||
past = datetime.datetime.now() - datetime.timedelta(hours=12)
|
||||
negative_date_fields = ["created_before", "updated_before"]
|
||||
positive_date_fields = ["created_after", "updated_after"]
|
||||
|
||||
# Expect no label when searching before it was created
|
||||
for field in negative_date_fields:
|
||||
request = self.client.get("/api/v1/labels?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 0)
|
||||
|
||||
# Expect a label when searching after it was created
|
||||
for field in positive_date_fields:
|
||||
request = self.client.get("/api/v1/labels?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 1)
|
||||
self.assertEqual(request.data["results"][0]["id"], label.id)
|
||||
|
||||
def test_get_label_order(self):
|
||||
first_label = factories.LabelFactory(name="first")
|
||||
second_label = factories.LabelFactory(name="second")
|
||||
|
||||
# Ensure we have two objects
|
||||
request = self.client.get("/api/v1/labels")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
order_fields = ["id", "created", "updated"]
|
||||
# Ascending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/labels?order=%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["name"], first_label.name)
|
||||
|
||||
# Descending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/labels?order=-%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["name"], second_label.name)
|
||||
|
||||
def test_unique_label_names(self):
|
||||
# Create a first label
|
||||
factories.LabelFactory(name="label")
|
||||
with self.assertRaises(IntegrityError):
|
||||
# Creating a second label with the same name should yield an exception
|
||||
factories.LabelFactory(name="label")
|
||||
@@ -1,200 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
|
||||
from django.utils import timezone
|
||||
from django.utils.dateparse import parse_duration
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from ara.api import models, serializers
|
||||
from ara.api.tests import factories
|
||||
|
||||
|
||||
class PlayTestCase(APITestCase):
|
||||
def test_play_factory(self):
|
||||
play = factories.PlayFactory(name="play factory")
|
||||
self.assertEqual(play.name, "play factory")
|
||||
|
||||
def test_play_serializer(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
serializer = serializers.PlaySerializer(
|
||||
data={
|
||||
"name": "serializer",
|
||||
"status": "completed",
|
||||
"uuid": "5c5f67b9-e63c-6297-80da-000000000005",
|
||||
"playbook": playbook.id,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
play = serializer.save()
|
||||
play.refresh_from_db()
|
||||
self.assertEqual(play.name, "serializer")
|
||||
self.assertEqual(play.status, "completed")
|
||||
|
||||
def test_get_no_plays(self):
|
||||
request = self.client.get("/api/v1/plays")
|
||||
self.assertEqual(0, len(request.data["results"]))
|
||||
|
||||
def test_get_plays(self):
|
||||
play = factories.PlayFactory()
|
||||
request = self.client.get("/api/v1/plays")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(play.name, request.data["results"][0]["name"])
|
||||
|
||||
def test_delete_play(self):
|
||||
play = factories.PlayFactory()
|
||||
self.assertEqual(1, models.Play.objects.all().count())
|
||||
request = self.client.delete("/api/v1/plays/%s" % play.id)
|
||||
self.assertEqual(204, request.status_code)
|
||||
self.assertEqual(0, models.Play.objects.all().count())
|
||||
|
||||
def test_create_play(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertEqual(0, models.Play.objects.count())
|
||||
request = self.client.post(
|
||||
"/api/v1/plays",
|
||||
{
|
||||
"name": "create",
|
||||
"status": "running",
|
||||
"uuid": "5c5f67b9-e63c-6297-80da-000000000005",
|
||||
"playbook": playbook.id,
|
||||
},
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Play.objects.count())
|
||||
|
||||
def test_partial_update_play(self):
|
||||
play = factories.PlayFactory()
|
||||
self.assertNotEqual("update", play.name)
|
||||
request = self.client.patch("/api/v1/plays/%s" % play.id, {"name": "update"})
|
||||
self.assertEqual(200, request.status_code)
|
||||
play_updated = models.Play.objects.get(id=play.id)
|
||||
self.assertEqual("update", play_updated.name)
|
||||
|
||||
def test_expired_play(self):
|
||||
play = factories.PlayFactory(status="running")
|
||||
self.assertEqual("running", play.status)
|
||||
|
||||
request = self.client.patch("/api/v1/plays/%s" % play.id, {"status": "expired"})
|
||||
self.assertEqual(200, request.status_code)
|
||||
play_updated = models.Play.objects.get(id=play.id)
|
||||
self.assertEqual("expired", play_updated.status)
|
||||
|
||||
def test_get_play(self):
|
||||
play = factories.PlayFactory()
|
||||
request = self.client.get("/api/v1/plays/%s" % play.id)
|
||||
self.assertEqual(play.name, request.data["name"])
|
||||
|
||||
def test_get_play_by_playbook(self):
|
||||
play = factories.PlayFactory(name="play1")
|
||||
factories.PlayFactory(name="play2")
|
||||
request = self.client.get("/api/v1/plays?playbook=1")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(play.name, request.data["results"][0]["name"])
|
||||
|
||||
def test_get_plays_by_name(self):
|
||||
# Create a playbook and two plays
|
||||
playbook = factories.PlaybookFactory()
|
||||
play = factories.PlayFactory(name="first_play", playbook=playbook)
|
||||
factories.TaskFactory(name="second_play", playbook=playbook)
|
||||
|
||||
# Query for the first play name and expect one result
|
||||
request = self.client.get("/api/v1/plays?name=%s" % play.name)
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(play.name, request.data["results"][0]["name"])
|
||||
|
||||
def test_get_play_by_uuid(self):
|
||||
play = factories.PlayFactory(name="play1", uuid="6b838b6f-cfc7-4e11-a264-73df8683ee0e")
|
||||
factories.PlayFactory(name="play2")
|
||||
request = self.client.get("/api/v1/plays?uuid=6b838b6f-cfc7-4e11-a264-73df8683ee0e")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(play.name, request.data["results"][0]["name"])
|
||||
|
||||
def test_get_play_duration(self):
|
||||
started = timezone.now()
|
||||
ended = started + datetime.timedelta(hours=1)
|
||||
play = factories.PlayFactory(started=started, ended=ended)
|
||||
request = self.client.get("/api/v1/plays/%s" % play.id)
|
||||
self.assertEqual(parse_duration(request.data["duration"]), ended - started)
|
||||
|
||||
def test_get_play_by_date(self):
|
||||
play = factories.PlayFactory()
|
||||
|
||||
past = datetime.datetime.now() - datetime.timedelta(hours=12)
|
||||
negative_date_fields = ["created_before", "started_before", "updated_before"]
|
||||
positive_date_fields = ["created_after", "started_after", "updated_after"]
|
||||
|
||||
# Expect no play when searching before it was created
|
||||
for field in negative_date_fields:
|
||||
request = self.client.get("/api/v1/plays?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 0)
|
||||
|
||||
# Expect a play when searching after it was created
|
||||
for field in positive_date_fields:
|
||||
request = self.client.get("/api/v1/plays?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 1)
|
||||
self.assertEqual(request.data["results"][0]["id"], play.id)
|
||||
|
||||
def test_get_play_order(self):
|
||||
old_started = timezone.now() - datetime.timedelta(hours=12)
|
||||
old_ended = old_started + datetime.timedelta(minutes=30)
|
||||
old_play = factories.PlayFactory(started=old_started, ended=old_ended)
|
||||
new_started = timezone.now() - datetime.timedelta(hours=6)
|
||||
new_ended = new_started + datetime.timedelta(hours=1)
|
||||
new_play = factories.PlayFactory(started=new_started, ended=new_ended)
|
||||
|
||||
# Ensure we have two objects
|
||||
request = self.client.get("/api/v1/plays")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
order_fields = ["id", "created", "updated", "started", "ended", "duration"]
|
||||
# Ascending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/plays?order=%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], old_play.id)
|
||||
|
||||
# Descending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/plays?order=-%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], new_play.id)
|
||||
|
||||
def test_update_wrong_play_status(self):
|
||||
play = factories.PlayFactory()
|
||||
self.assertNotEqual("wrong", play.status)
|
||||
request = self.client.patch("/api/v1/plays/%s" % play.id, {"status": "wrong"})
|
||||
self.assertEqual(400, request.status_code)
|
||||
play_updated = models.Play.objects.get(id=play.id)
|
||||
self.assertNotEqual("wrong", play_updated.status)
|
||||
|
||||
def test_get_play_by_status(self):
|
||||
play = factories.PlayFactory(status="running")
|
||||
factories.PlayFactory(status="completed")
|
||||
factories.PlayFactory(status="unknown")
|
||||
|
||||
# Confirm we have three objects
|
||||
request = self.client.get("/api/v1/plays")
|
||||
self.assertEqual(3, len(request.data["results"]))
|
||||
|
||||
# Test single status
|
||||
request = self.client.get("/api/v1/plays?status=running")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(play.status, request.data["results"][0]["status"])
|
||||
|
||||
# Test multiple status
|
||||
request = self.client.get("/api/v1/plays?status=running&status=completed")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
@@ -1,285 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
|
||||
from django.utils import timezone
|
||||
from django.utils.dateparse import parse_duration
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from ara.api import models, serializers
|
||||
from ara.api.tests import factories, utils
|
||||
|
||||
|
||||
class PlaybookTestCase(APITestCase):
|
||||
def test_playbook_factory(self):
|
||||
playbook = factories.PlaybookFactory(ansible_version="2.4.0")
|
||||
self.assertEqual(playbook.ansible_version, "2.4.0")
|
||||
|
||||
def test_playbook_serializer(self):
|
||||
serializer = serializers.PlaybookSerializer(
|
||||
data={
|
||||
"controller": "serializer",
|
||||
"name": "serializer-playbook",
|
||||
"ansible_version": "2.4.0",
|
||||
"path": "/path/playbook.yml",
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
playbook = serializer.save()
|
||||
playbook.refresh_from_db()
|
||||
self.assertEqual(playbook.controller, "serializer")
|
||||
self.assertEqual(playbook.name, "serializer-playbook")
|
||||
self.assertEqual(playbook.ansible_version, "2.4.0")
|
||||
self.assertEqual(playbook.status, "unknown")
|
||||
|
||||
def test_playbook_serializer_compress_arguments(self):
|
||||
serializer = serializers.PlaybookSerializer(
|
||||
data={"ansible_version": "2.4.0", "path": "/path/playbook.yml", "arguments": factories.PLAYBOOK_ARGUMENTS}
|
||||
)
|
||||
serializer.is_valid()
|
||||
playbook = serializer.save()
|
||||
playbook.refresh_from_db()
|
||||
self.assertEqual(playbook.arguments, utils.compressed_obj(factories.PLAYBOOK_ARGUMENTS))
|
||||
|
||||
def test_playbook_serializer_decompress_arguments(self):
|
||||
playbook = factories.PlaybookFactory(arguments=utils.compressed_obj(factories.PLAYBOOK_ARGUMENTS))
|
||||
serializer = serializers.PlaybookSerializer(instance=playbook)
|
||||
self.assertEqual(serializer.data["arguments"], factories.PLAYBOOK_ARGUMENTS)
|
||||
|
||||
def test_get_no_playbooks(self):
|
||||
request = self.client.get("/api/v1/playbooks")
|
||||
self.assertEqual(0, len(request.data["results"]))
|
||||
|
||||
def test_get_playbooks(self):
|
||||
expected_playbook = factories.PlaybookFactory()
|
||||
request = self.client.get("/api/v1/playbooks")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(1, request.data["count"])
|
||||
playbook = request.data["results"][0]
|
||||
self.assertEqual(playbook["ansible_version"], expected_playbook.ansible_version)
|
||||
|
||||
def test_delete_playbook(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertEqual(1, models.Playbook.objects.all().count())
|
||||
request = self.client.delete("/api/v1/playbooks/%s" % playbook.id)
|
||||
self.assertEqual(204, request.status_code)
|
||||
self.assertEqual(0, models.Playbook.objects.all().count())
|
||||
|
||||
def test_create_playbook(self):
|
||||
self.assertEqual(0, models.Playbook.objects.count())
|
||||
request = self.client.post(
|
||||
"/api/v1/playbooks", {"ansible_version": "2.4.0", "status": "running", "path": "/path/playbook.yml"}
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Playbook.objects.count())
|
||||
self.assertEqual(request.data["status"], "running")
|
||||
|
||||
def test_create_playbook_with_labels(self):
|
||||
self.assertEqual(0, models.Playbook.objects.count())
|
||||
labels = ["test-label", "another-test-label"]
|
||||
request = self.client.post(
|
||||
"/api/v1/playbooks",
|
||||
{"ansible_version": "2.4.0", "status": "running", "path": "/path/playbook.yml", "labels": labels},
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Playbook.objects.count())
|
||||
self.assertEqual(request.data["status"], "running")
|
||||
self.assertEqual(sorted([label["name"] for label in request.data["labels"]]), sorted(labels))
|
||||
|
||||
def test_partial_update_playbook(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertNotEqual("completed", playbook.status)
|
||||
request = self.client.patch("/api/v1/playbooks/%s" % playbook.id, {"status": "completed"})
|
||||
self.assertEqual(200, request.status_code)
|
||||
playbook_updated = models.Playbook.objects.get(id=playbook.id)
|
||||
self.assertEqual("completed", playbook_updated.status)
|
||||
|
||||
def test_update_wrong_playbook_status(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertNotEqual("wrong", playbook.status)
|
||||
request = self.client.patch("/api/v1/playbooks/%s" % playbook.id, {"status": "wrong"})
|
||||
self.assertEqual(400, request.status_code)
|
||||
playbook_updated = models.Playbook.objects.get(id=playbook.id)
|
||||
self.assertNotEqual("wrong", playbook_updated.status)
|
||||
|
||||
def test_expired_playbook(self):
|
||||
playbook = factories.PlaybookFactory(status="running")
|
||||
self.assertEqual("running", playbook.status)
|
||||
|
||||
request = self.client.patch("/api/v1/playbooks/%s" % playbook.id, {"status": "expired"})
|
||||
self.assertEqual(200, request.status_code)
|
||||
playbook_updated = models.Playbook.objects.get(id=playbook.id)
|
||||
self.assertEqual("expired", playbook_updated.status)
|
||||
|
||||
def test_get_playbook(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
request = self.client.get("/api/v1/playbooks/%s" % playbook.id)
|
||||
self.assertEqual(playbook.ansible_version, request.data["ansible_version"])
|
||||
|
||||
def test_get_playbook_by_controller(self):
|
||||
playbook = factories.PlaybookFactory(name="playbook1", controller="controller-one")
|
||||
factories.PlaybookFactory(name="playbook2", controller="controller-two")
|
||||
|
||||
# Test exact match
|
||||
request = self.client.get("/api/v1/playbooks?controller=controller-one")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(playbook.name, request.data["results"][0]["name"])
|
||||
self.assertEqual(playbook.controller, request.data["results"][0]["controller"])
|
||||
|
||||
# Test partial match
|
||||
request = self.client.get("/api/v1/playbooks?controller=controller")
|
||||
self.assertEqual(len(request.data["results"]), 2)
|
||||
|
||||
def test_get_playbook_by_name(self):
|
||||
playbook = factories.PlaybookFactory(name="playbook1")
|
||||
factories.PlaybookFactory(name="playbook2")
|
||||
|
||||
# Test exact match
|
||||
request = self.client.get("/api/v1/playbooks?name=playbook1")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(playbook.name, request.data["results"][0]["name"])
|
||||
|
||||
# Test partial match
|
||||
request = self.client.get("/api/v1/playbooks?name=playbook")
|
||||
self.assertEqual(len(request.data["results"]), 2)
|
||||
|
||||
def test_get_playbook_by_ansible_version(self):
|
||||
playbook = factories.PlaybookFactory(name="playbook1", ansible_version="2.9.1")
|
||||
factories.PlaybookFactory(name="playbook2", ansible_version="2.8.2")
|
||||
|
||||
# Test exact match
|
||||
request = self.client.get("/api/v1/playbooks?ansible_version=2.9.1")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(playbook.name, request.data["results"][0]["name"])
|
||||
|
||||
# Test partial match
|
||||
request = self.client.get("/api/v1/playbooks?ansible_version=2.9")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(playbook.name, request.data["results"][0]["name"])
|
||||
|
||||
def test_get_playbook_by_path(self):
|
||||
playbook = factories.PlaybookFactory(path="/root/playbook.yml")
|
||||
factories.PlaybookFactory(path="/home/playbook.yml")
|
||||
|
||||
# Test exact match
|
||||
request = self.client.get("/api/v1/playbooks?path=/root/playbook.yml")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(playbook.path, request.data["results"][0]["path"])
|
||||
|
||||
# Test partial match
|
||||
request = self.client.get("/api/v1/playbooks?path=playbook.yml")
|
||||
self.assertEqual(len(request.data["results"]), 2)
|
||||
|
||||
def test_patch_playbook_name(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
new_name = "foo"
|
||||
self.assertNotEqual(playbook.name, new_name)
|
||||
request = self.client.patch("/api/v1/playbooks/%s" % playbook.id, {"name": new_name})
|
||||
self.assertEqual(200, request.status_code)
|
||||
playbook_updated = models.Playbook.objects.get(id=playbook.id)
|
||||
self.assertEqual(playbook_updated.name, new_name)
|
||||
|
||||
def test_get_playbook_by_status(self):
|
||||
playbook = factories.PlaybookFactory(status="failed")
|
||||
factories.PlaybookFactory(status="completed")
|
||||
factories.PlaybookFactory(status="running")
|
||||
|
||||
# Confirm we have three objects
|
||||
request = self.client.get("/api/v1/playbooks")
|
||||
self.assertEqual(3, len(request.data["results"]))
|
||||
|
||||
# Test single status
|
||||
request = self.client.get("/api/v1/playbooks?status=failed")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(playbook.status, request.data["results"][0]["status"])
|
||||
|
||||
# Test multiple status
|
||||
request = self.client.get("/api/v1/playbooks?status=failed&status=completed")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
def test_get_playbook_duration(self):
|
||||
started = timezone.now()
|
||||
ended = started + datetime.timedelta(hours=1)
|
||||
playbook = factories.PlaybookFactory(started=started, ended=ended)
|
||||
request = self.client.get("/api/v1/playbooks/%s" % playbook.id)
|
||||
self.assertEqual(parse_duration(request.data["duration"]), ended - started)
|
||||
|
||||
def test_get_playbook_by_date(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
|
||||
past = datetime.datetime.now() - datetime.timedelta(hours=12)
|
||||
negative_date_fields = ["created_before", "started_before", "updated_before"]
|
||||
positive_date_fields = ["created_after", "started_after", "updated_after"]
|
||||
|
||||
# Expect no playbook when searching before it was created
|
||||
for field in negative_date_fields:
|
||||
request = self.client.get("/api/v1/playbooks?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 0)
|
||||
|
||||
# Expect a playbook when searching after it was created
|
||||
for field in positive_date_fields:
|
||||
request = self.client.get("/api/v1/playbooks?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 1)
|
||||
self.assertEqual(request.data["results"][0]["id"], playbook.id)
|
||||
|
||||
def test_get_playbook_order(self):
|
||||
old_started = timezone.now() - datetime.timedelta(hours=12)
|
||||
old_ended = old_started + datetime.timedelta(minutes=30)
|
||||
old_playbook = factories.PlaybookFactory(started=old_started, ended=old_ended)
|
||||
new_started = timezone.now() - datetime.timedelta(hours=6)
|
||||
new_ended = new_started + datetime.timedelta(hours=1)
|
||||
new_playbook = factories.PlaybookFactory(started=new_started, ended=new_ended)
|
||||
|
||||
# Ensure we have two objects
|
||||
request = self.client.get("/api/v1/playbooks")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
order_fields = ["id", "created", "updated", "started", "ended", "duration"]
|
||||
# Ascending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/playbooks?order=%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], old_playbook.id)
|
||||
|
||||
# Descending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/playbooks?order=-%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], new_playbook.id)
|
||||
|
||||
def test_patch_playbook_labels(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
labels = ["test-label", "another-test-label"]
|
||||
self.assertNotEqual(playbook.labels, labels)
|
||||
request = self.client.patch("/api/v1/playbooks/%s" % playbook.id, {"labels": labels})
|
||||
self.assertEqual(200, request.status_code)
|
||||
playbook_updated = models.Playbook.objects.get(id=playbook.id)
|
||||
self.assertEqual([label.name for label in playbook_updated.labels.all()], labels)
|
||||
|
||||
def test_get_playbook_by_label(self):
|
||||
# Create two playbooks, one with labels and one without
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.client.patch("/api/v1/playbooks/%s" % playbook.id, {"labels": ["test-label"]})
|
||||
factories.PlaybookFactory()
|
||||
|
||||
# Ensure we have two objects when searching without labels
|
||||
request = self.client.get("/api/v1/playbooks")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
# Search with label and ensure we have the right one
|
||||
request = self.client.get("/api/v1/playbooks?label=%s" % "test-label")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(request.data["results"][0]["labels"][0]["name"], "test-label")
|
||||
@@ -1,153 +0,0 @@
|
||||
import datetime
|
||||
from unittest import skip
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.management import call_command
|
||||
from django.test import LiveServerTestCase, TestCase, override_settings
|
||||
|
||||
from ara.api import models
|
||||
from ara.api.tests import factories
|
||||
|
||||
|
||||
class LogCheckerMixin(object):
|
||||
def run_prune_command(self, *args, **opts):
|
||||
# the command uses logging instead of prints so we need to use assertLogs
|
||||
# to retrieve and test the output
|
||||
with self.assertLogs("ara.api.management.commands.prune", "INFO") as logs:
|
||||
call_command("prune", *args, **opts)
|
||||
return logs.output
|
||||
|
||||
|
||||
class PruneTestCase(TestCase, LogCheckerMixin):
|
||||
@skip("TODO: Why aren't logs captured properly for this test ?")
|
||||
def test_prune_without_playbooks_and_confirm(self):
|
||||
output = self.run_prune_command()
|
||||
self.assertIn(
|
||||
"INFO:ara.api.management.commands.prune:--confirm was not specified, no playbooks will be deleted", output
|
||||
)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:Found 0 playbooks matching query", output)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:0 playbooks deleted", output)
|
||||
|
||||
@skip("TODO: Why aren't logs captured properly for this test ?")
|
||||
def test_prune_without_playbooks(self):
|
||||
args = ["--confirm"]
|
||||
output = self.run_prune_command(*args)
|
||||
self.assertNotIn(
|
||||
"INFO:ara.api.management.commands.prune:--confirm was not specified, no playbooks will be deleted", output
|
||||
)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:Found 0 playbooks matching query", output)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:0 playbooks deleted", output)
|
||||
|
||||
|
||||
class PruneCmdTestCase(LiveServerTestCase, LogCheckerMixin):
|
||||
@skip("TODO: Why aren't logs captured properly for this test ?")
|
||||
def test_prune_with_no_matching_playbook(self):
|
||||
# Create a playbook with start date as of now
|
||||
factories.PlaybookFactory()
|
||||
self.assertEqual(1, models.Playbook.objects.all().count())
|
||||
|
||||
args = ["--confirm"]
|
||||
output = self.run_prune_command(*args)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:Found 0 playbooks matching query", output)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:0 playbooks deleted", output)
|
||||
self.assertEqual(1, models.Playbook.objects.all().count())
|
||||
|
||||
@skip("TODO: Why aren't logs captured properly for this test ?")
|
||||
def test_prune_with_matching_playbook(self):
|
||||
# Create a playbook with an old start date
|
||||
old_timestamp = datetime.datetime.now() - datetime.timedelta(days=60)
|
||||
factories.PlaybookFactory(started=old_timestamp)
|
||||
self.assertEqual(1, models.Playbook.objects.all().count())
|
||||
|
||||
args = ["--confirm"]
|
||||
output = self.run_prune_command(*args)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:Found 1 playbooks matching query", output)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:1 playbooks deleted", output)
|
||||
self.assertEqual(0, models.Playbook.objects.all().count())
|
||||
|
||||
def test_prune_with_no_matching_playbook_with_http_client(self):
|
||||
# Create a playbook with start date as of now
|
||||
factories.PlaybookFactory()
|
||||
self.assertEqual(1, models.Playbook.objects.all().count())
|
||||
|
||||
args = ["--confirm", "--client", "http", "--endpoint", self.live_server_url]
|
||||
output = self.run_prune_command(*args)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:Found 0 playbooks matching query", output)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:0 playbooks deleted", output)
|
||||
self.assertEqual(1, models.Playbook.objects.all().count())
|
||||
|
||||
def test_prune_with_matching_playbook_with_http_client(self):
|
||||
# Create a playbook with an old start date
|
||||
old_timestamp = datetime.datetime.now() - datetime.timedelta(days=60)
|
||||
factories.PlaybookFactory(started=old_timestamp)
|
||||
self.assertEqual(1, models.Playbook.objects.all().count())
|
||||
|
||||
args = ["--confirm", "--client", "http", "--endpoint", self.live_server_url]
|
||||
output = self.run_prune_command(*args)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:Found 1 playbooks matching query", output)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:1 playbooks deleted", output)
|
||||
self.assertEqual(0, models.Playbook.objects.all().count())
|
||||
|
||||
@override_settings(READ_LOGIN_REQUIRED=True, WRITE_LOGIN_REQUIRED=True)
|
||||
def test_prune_without_authenticated_http_client(self):
|
||||
args = ["--confirm", "--client", "http", "--endpoint", self.live_server_url]
|
||||
with self.assertRaises(SystemExit):
|
||||
self.run_prune_command(*args)
|
||||
|
||||
@override_settings(READ_LOGIN_REQUIRED=True, WRITE_LOGIN_REQUIRED=True)
|
||||
def test_prune_with_authenticated_http_client(self):
|
||||
# Create a user
|
||||
self.user = User.objects.create_superuser("prune", "prune@example.org", "password")
|
||||
|
||||
# Create a playbook with an old start date
|
||||
old_timestamp = datetime.datetime.now() - datetime.timedelta(days=60)
|
||||
factories.PlaybookFactory(started=old_timestamp)
|
||||
self.assertEqual(1, models.Playbook.objects.all().count())
|
||||
|
||||
args = [
|
||||
"--confirm",
|
||||
"--client",
|
||||
"http",
|
||||
"--endpoint",
|
||||
self.live_server_url,
|
||||
"--username",
|
||||
"prune",
|
||||
"--password",
|
||||
"password",
|
||||
]
|
||||
output = self.run_prune_command(*args)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:Found 1 playbooks matching query", output)
|
||||
self.assertIn("INFO:ara.api.management.commands.prune:1 playbooks deleted", output)
|
||||
self.assertEqual(0, models.Playbook.objects.all().count())
|
||||
|
||||
@override_settings(READ_LOGIN_REQUIRED=True, WRITE_LOGIN_REQUIRED=True)
|
||||
def test_prune_with_bad_authentication_http_client(self):
|
||||
# Create a user
|
||||
self.user = User.objects.create_superuser("prune", "prune@example.org", "password")
|
||||
|
||||
# Create a playbook with an old start date
|
||||
old_timestamp = datetime.datetime.now() - datetime.timedelta(days=60)
|
||||
factories.PlaybookFactory(started=old_timestamp)
|
||||
self.assertEqual(1, models.Playbook.objects.all().count())
|
||||
|
||||
# Set up arguments with a wrong password
|
||||
args = [
|
||||
"--confirm",
|
||||
"--client",
|
||||
"http",
|
||||
"--endpoint",
|
||||
self.live_server_url,
|
||||
"--username",
|
||||
"prune",
|
||||
"--password",
|
||||
"somethingelse",
|
||||
]
|
||||
|
||||
with self.assertRaises(SystemExit):
|
||||
self.run_prune_command(*args)
|
||||
# TODO: the assertRaises prevents us from looking at the output
|
||||
# output = run_prune_command(*args)
|
||||
# self.assertIn("Client failed to retrieve results, see logs for ara.clients.offline or ara.clients.http.", output) # noqa
|
||||
|
||||
# Nothing should have been deleted because the command failed
|
||||
self.assertEqual(1, models.Playbook.objects.all().count())
|
||||
@@ -1,187 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from ara.api import models, serializers
|
||||
from ara.api.tests import factories, utils
|
||||
|
||||
|
||||
class RecordTestCase(APITestCase):
|
||||
def test_record_factory(self):
|
||||
record = factories.RecordFactory(key="test")
|
||||
self.assertEqual(record.key, "test")
|
||||
|
||||
def test_record_serializer(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
serializer = serializers.RecordSerializer(
|
||||
data={"key": "test", "value": factories.RECORD_LIST, "type": "list", "playbook": playbook.id}
|
||||
)
|
||||
serializer.is_valid()
|
||||
record = serializer.save()
|
||||
record.refresh_from_db()
|
||||
self.assertEqual(record.key, "test")
|
||||
self.assertEqual(record.value, utils.compressed_obj(factories.RECORD_LIST))
|
||||
self.assertEqual(record.type, "list")
|
||||
|
||||
def test_get_no_records(self):
|
||||
request = self.client.get("/api/v1/records")
|
||||
self.assertEqual(0, len(request.data["results"]))
|
||||
|
||||
def test_get_record(self):
|
||||
record = factories.RecordFactory()
|
||||
request = self.client.get("/api/v1/records")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(record.key, request.data["results"][0]["key"])
|
||||
|
||||
def test_delete_record(self):
|
||||
record = factories.RecordFactory()
|
||||
self.assertEqual(1, models.Record.objects.all().count())
|
||||
request = self.client.delete("/api/v1/records/%s" % record.id)
|
||||
self.assertEqual(204, request.status_code)
|
||||
self.assertEqual(0, models.Record.objects.all().count())
|
||||
|
||||
def test_create_text_record(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertEqual(0, models.Record.objects.count())
|
||||
request = self.client.post(
|
||||
"/api/v1/records", {"key": "test", "value": "value", "type": "text", "playbook": playbook.id}
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Record.objects.count())
|
||||
|
||||
def test_create_list_record(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertEqual(0, models.Record.objects.count())
|
||||
test_list = factories.RECORD_LIST
|
||||
request = self.client.post(
|
||||
"/api/v1/records", {"key": "listrecord", "value": test_list, "type": "list", "playbook": playbook.id}
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Record.objects.count())
|
||||
self.assertEqual(test_list, request.data["value"])
|
||||
|
||||
def test_create_dict_record(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertEqual(0, models.Record.objects.count())
|
||||
test_dict = {"a": "dictionary"}
|
||||
request = self.client.post(
|
||||
"/api/v1/records", {"key": "dictrecord", "value": test_dict, "type": "dict", "playbook": playbook.id}
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Record.objects.count())
|
||||
self.assertEqual(test_dict, request.data["value"])
|
||||
|
||||
def test_create_json_record(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertEqual(0, models.Record.objects.count())
|
||||
test_json = '{"a": "dictionary"}'
|
||||
request = self.client.post(
|
||||
"/api/v1/records", {"key": "dictrecord", "value": test_json, "type": "json", "playbook": playbook.id}
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Record.objects.count())
|
||||
self.assertEqual(test_json, request.data["value"])
|
||||
|
||||
def test_create_url_record(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
self.assertEqual(0, models.Record.objects.count())
|
||||
test_url = "https://ara.recordsansible.org"
|
||||
request = self.client.post(
|
||||
"/api/v1/records", {"key": "dictrecord", "value": test_url, "type": "url", "playbook": playbook.id}
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Record.objects.count())
|
||||
self.assertEqual(test_url, request.data["value"])
|
||||
|
||||
def test_partial_update_record(self):
|
||||
record = factories.RecordFactory()
|
||||
self.assertNotEqual("update", record.key)
|
||||
request = self.client.patch("/api/v1/records/%s" % record.id, {"key": "update"})
|
||||
self.assertEqual(200, request.status_code)
|
||||
record_updated = models.Record.objects.get(id=record.id)
|
||||
self.assertEqual("update", record_updated.key)
|
||||
|
||||
def test_get_records_by_playbook(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
record = factories.RecordFactory(playbook=playbook, key="by_playbook")
|
||||
factories.RecordFactory(key="another_record")
|
||||
request = self.client.get("/api/v1/records?playbook=%s" % playbook.id)
|
||||
self.assertEqual(2, models.Record.objects.all().count())
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(record.key, request.data["results"][0]["key"])
|
||||
self.assertEqual(record.playbook.id, request.data["results"][0]["playbook"])
|
||||
|
||||
def test_get_records_by_key(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
record = factories.RecordFactory(playbook=playbook, key="by_key")
|
||||
factories.RecordFactory(key="another_record")
|
||||
request = self.client.get("/api/v1/records?key=%s" % record.key)
|
||||
self.assertEqual(2, models.Record.objects.all().count())
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(record.key, request.data["results"][0]["key"])
|
||||
self.assertEqual(record.playbook.id, request.data["results"][0]["playbook"])
|
||||
|
||||
def test_get_records_by_playbook_and_key(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
record = factories.RecordFactory(playbook=playbook, key="by_playbook_and_key")
|
||||
factories.RecordFactory(playbook=playbook, key="another_record_in_playbook")
|
||||
factories.RecordFactory(key="another_record_in_another_playbook")
|
||||
request = self.client.get("/api/v1/records?playbook=%s&key=%s" % (playbook.id, record.key))
|
||||
self.assertEqual(3, models.Record.objects.all().count())
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(record.key, request.data["results"][0]["key"])
|
||||
self.assertEqual(record.playbook.id, request.data["results"][0]["playbook"])
|
||||
|
||||
def test_get_record_by_date(self):
|
||||
record = factories.RecordFactory()
|
||||
|
||||
past = datetime.datetime.now() - datetime.timedelta(hours=12)
|
||||
negative_date_fields = ["created_before", "updated_before"]
|
||||
positive_date_fields = ["created_after", "updated_after"]
|
||||
|
||||
# Expect no record when searching before it was created
|
||||
for field in negative_date_fields:
|
||||
request = self.client.get("/api/v1/records?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 0)
|
||||
|
||||
# Expect a record when searching after it was created
|
||||
for field in positive_date_fields:
|
||||
request = self.client.get("/api/v1/records?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 1)
|
||||
self.assertEqual(request.data["results"][0]["id"], record.id)
|
||||
|
||||
def test_get_record_order(self):
|
||||
first_record = factories.RecordFactory(key="alpha")
|
||||
second_record = factories.RecordFactory(key="beta")
|
||||
|
||||
# Ensure we have two objects
|
||||
request = self.client.get("/api/v1/records")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
order_fields = ["id", "created", "updated", "key"]
|
||||
# Ascending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/records?order=%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], first_record.id)
|
||||
|
||||
# Descending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/records?order=-%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], second_record.id)
|
||||
@@ -1,286 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
|
||||
from django.utils import timezone
|
||||
from django.utils.dateparse import parse_duration
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from ara.api import models, serializers
|
||||
from ara.api.tests import factories, utils
|
||||
|
||||
|
||||
class ResultTestCase(APITestCase):
|
||||
def test_result_factory(self):
|
||||
result = factories.ResultFactory(status="failed")
|
||||
self.assertEqual(result.status, "failed")
|
||||
|
||||
def test_result_serializer(self):
|
||||
host = factories.HostFactory()
|
||||
task = factories.TaskFactory()
|
||||
serializer = serializers.ResultSerializer(
|
||||
data={
|
||||
"status": "skipped",
|
||||
"host": host.id,
|
||||
"task": task.id,
|
||||
"play": task.play.id,
|
||||
"playbook": task.playbook.id,
|
||||
"changed": False,
|
||||
"ignore_errors": False,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
result = serializer.save()
|
||||
result.refresh_from_db()
|
||||
self.assertEqual(result.status, "skipped")
|
||||
self.assertEqual(result.changed, False)
|
||||
self.assertEqual(result.ignore_errors, False)
|
||||
self.assertEqual(result.host.id, host.id)
|
||||
self.assertEqual(result.task.id, task.id)
|
||||
|
||||
def test_result_serializer_compress_content(self):
|
||||
host = factories.HostFactory()
|
||||
task = factories.TaskFactory()
|
||||
serializer = serializers.ResultSerializer(
|
||||
data={
|
||||
"content": factories.RESULT_CONTENTS,
|
||||
"status": "ok",
|
||||
"host": host.id,
|
||||
"task": task.id,
|
||||
"play": task.play.id,
|
||||
"playbook": task.playbook.id,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
result = serializer.save()
|
||||
result.refresh_from_db()
|
||||
self.assertEqual(result.content, utils.compressed_obj(factories.RESULT_CONTENTS))
|
||||
|
||||
def test_result_serializer_decompress_content(self):
|
||||
result = factories.ResultFactory(content=utils.compressed_obj(factories.RESULT_CONTENTS))
|
||||
serializer = serializers.ResultSerializer(instance=result)
|
||||
self.assertEqual(serializer.data["content"], factories.RESULT_CONTENTS)
|
||||
|
||||
def test_get_no_results(self):
|
||||
request = self.client.get("/api/v1/results")
|
||||
self.assertEqual(0, len(request.data["results"]))
|
||||
|
||||
def test_get_results(self):
|
||||
result = factories.ResultFactory()
|
||||
request = self.client.get("/api/v1/results")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(result.status, request.data["results"][0]["status"])
|
||||
|
||||
def test_delete_result(self):
|
||||
result = factories.ResultFactory()
|
||||
self.assertEqual(1, models.Result.objects.all().count())
|
||||
request = self.client.delete("/api/v1/results/%s" % result.id)
|
||||
self.assertEqual(204, request.status_code)
|
||||
self.assertEqual(0, models.Result.objects.all().count())
|
||||
|
||||
def test_create_result(self):
|
||||
host = factories.HostFactory()
|
||||
task = factories.TaskFactory()
|
||||
self.assertEqual(0, models.Result.objects.count())
|
||||
request = self.client.post(
|
||||
"/api/v1/results",
|
||||
{
|
||||
"content": factories.RESULT_CONTENTS,
|
||||
"status": "ok",
|
||||
"host": host.id,
|
||||
"task": task.id,
|
||||
"play": task.play.id,
|
||||
"playbook": task.playbook.id,
|
||||
"changed": True,
|
||||
"ignore_errors": False,
|
||||
},
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(request.data["changed"], True)
|
||||
self.assertEqual(request.data["ignore_errors"], False)
|
||||
self.assertEqual(1, models.Result.objects.count())
|
||||
|
||||
def test_partial_update_result(self):
|
||||
result = factories.ResultFactory()
|
||||
self.assertNotEqual("unreachable", result.status)
|
||||
request = self.client.patch("/api/v1/results/%s" % result.id, {"status": "unreachable"})
|
||||
self.assertEqual(200, request.status_code)
|
||||
result_updated = models.Result.objects.get(id=result.id)
|
||||
self.assertEqual("unreachable", result_updated.status)
|
||||
|
||||
def test_get_result(self):
|
||||
result = factories.ResultFactory()
|
||||
request = self.client.get("/api/v1/results/%s" % result.id)
|
||||
self.assertEqual(result.status, request.data["status"])
|
||||
|
||||
def test_get_result_by_association(self):
|
||||
# Create two results in necessarily two different playbooks with different children:
|
||||
# playbook -> play -> task -> result <- host
|
||||
first_result = factories.ResultFactory()
|
||||
second_result = factories.ResultFactory()
|
||||
|
||||
# the fields with the association ids
|
||||
associations = ["playbook", "play", "task", "host"]
|
||||
|
||||
# Validate that we somehow didn't wind up with the same association ids
|
||||
for association in associations:
|
||||
first = getattr(first_result, association)
|
||||
second = getattr(second_result, association)
|
||||
self.assertNotEqual(first.id, second.id)
|
||||
|
||||
# In other words, there must be two distinct results
|
||||
request = self.client.get("/api/v1/results")
|
||||
self.assertEqual(2, request.data["count"])
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
# Searching for the first_result associations should only yield one result
|
||||
for association in associations:
|
||||
assoc_id = getattr(first_result, association).id
|
||||
results = self.client.get("/api/v1/results?%s=%s" % (association, assoc_id))
|
||||
self.assertEqual(1, results.data["count"])
|
||||
self.assertEqual(1, len(results.data["results"]))
|
||||
self.assertEqual(assoc_id, results.data["results"][0][association])
|
||||
|
||||
def test_get_result_by_statuses(self):
|
||||
failed_result = factories.ResultFactory(status="failed")
|
||||
skipped_result = factories.ResultFactory(status="skipped")
|
||||
factories.ResultFactory(status="ok")
|
||||
results = self.client.get("/api/v1/results").data["results"]
|
||||
self.assertEqual(3, len(results))
|
||||
|
||||
results = self.client.get("/api/v1/results?status=failed").data["results"]
|
||||
self.assertEqual(1, len(results))
|
||||
self.assertEqual(failed_result.status, results[0]["status"])
|
||||
|
||||
results = self.client.get("/api/v1/results?status=skipped").data["results"]
|
||||
self.assertEqual(1, len(results))
|
||||
self.assertEqual(skipped_result.status, results[0]["status"])
|
||||
|
||||
results = self.client.get("/api/v1/results?status=failed&status=skipped").data["results"]
|
||||
self.assertEqual(2, len(results))
|
||||
self.assertEqual(failed_result.status, results[1]["status"])
|
||||
self.assertEqual(skipped_result.status, results[0]["status"])
|
||||
|
||||
def test_result_status_serializer(self):
|
||||
ok = factories.ResultFactory(status="ok")
|
||||
result = self.client.get("/api/v1/results/%s" % ok.id)
|
||||
self.assertEqual(result.data["status"], "ok")
|
||||
|
||||
changed = factories.ResultFactory(status="ok", changed=True)
|
||||
result = self.client.get("/api/v1/results/%s" % changed.id)
|
||||
self.assertEqual(result.data["status"], "changed")
|
||||
|
||||
failed = factories.ResultFactory(status="failed")
|
||||
result = self.client.get("/api/v1/results/%s" % failed.id)
|
||||
self.assertEqual(result.data["status"], "failed")
|
||||
|
||||
ignored = factories.ResultFactory(status="failed", ignore_errors=True)
|
||||
result = self.client.get("/api/v1/results/%s" % ignored.id)
|
||||
self.assertEqual(result.data["status"], "ignored")
|
||||
|
||||
skipped = factories.ResultFactory(status="skipped")
|
||||
result = self.client.get("/api/v1/results/%s" % skipped.id)
|
||||
self.assertEqual(result.data["status"], "skipped")
|
||||
|
||||
unreachable = factories.ResultFactory(status="unreachable")
|
||||
result = self.client.get("/api/v1/results/%s" % unreachable.id)
|
||||
self.assertEqual(result.data["status"], "unreachable")
|
||||
|
||||
def test_get_result_with_ignore_errors(self):
|
||||
failed = factories.ResultFactory(status="failed", ignore_errors=False)
|
||||
ignored = factories.ResultFactory(status="failed", ignore_errors=True)
|
||||
|
||||
# Searching for failed should return both
|
||||
results = self.client.get("/api/v1/results?status=failed").data["results"]
|
||||
self.assertEqual(2, len(results))
|
||||
|
||||
# Searching for failed with ignore_errors=True should only return the ignored result
|
||||
results = self.client.get("/api/v1/results?status=failed&ignore_errors=true").data["results"]
|
||||
self.assertEqual(1, len(results))
|
||||
self.assertEqual(ignored.id, results[0]["id"])
|
||||
|
||||
# Searching for failed with ignore_errors=False should only return the failed result
|
||||
results = self.client.get("/api/v1/results?status=failed&ignore_errors=false").data["results"]
|
||||
self.assertEqual(1, len(results))
|
||||
self.assertEqual(failed.id, results[0]["id"])
|
||||
|
||||
def test_get_result_duration(self):
|
||||
started = timezone.now()
|
||||
ended = started + datetime.timedelta(hours=1)
|
||||
result = factories.ResultFactory(started=started, ended=ended)
|
||||
request = self.client.get("/api/v1/results/%s" % result.id)
|
||||
self.assertEqual(parse_duration(request.data["duration"]), ended - started)
|
||||
|
||||
def test_get_result_by_date(self):
|
||||
result = factories.ResultFactory()
|
||||
|
||||
past = datetime.datetime.now() - datetime.timedelta(hours=12)
|
||||
negative_date_fields = ["created_before", "started_before", "updated_before"]
|
||||
positive_date_fields = ["created_after", "started_after", "updated_after"]
|
||||
|
||||
# Expect no result when searching before it was created
|
||||
for field in negative_date_fields:
|
||||
request = self.client.get("/api/v1/results?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 0)
|
||||
|
||||
# Expect a result when searching after it was created
|
||||
for field in positive_date_fields:
|
||||
request = self.client.get("/api/v1/results?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 1)
|
||||
self.assertEqual(request.data["results"][0]["id"], result.id)
|
||||
|
||||
def test_get_result_order(self):
|
||||
old_started = timezone.now() - datetime.timedelta(hours=12)
|
||||
old_ended = old_started + datetime.timedelta(minutes=30)
|
||||
old_result = factories.ResultFactory(started=old_started, ended=old_ended)
|
||||
new_started = timezone.now() - datetime.timedelta(hours=6)
|
||||
new_ended = new_started + datetime.timedelta(hours=1)
|
||||
new_result = factories.ResultFactory(started=new_started, ended=new_ended)
|
||||
|
||||
# Ensure we have two objects
|
||||
request = self.client.get("/api/v1/results")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
order_fields = ["id", "created", "updated", "started", "ended", "duration"]
|
||||
# Ascending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/results?order=%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], old_result.id)
|
||||
|
||||
# Descending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/results?order=-%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], new_result.id)
|
||||
|
||||
def test_get_changed_results(self):
|
||||
changed_result = factories.ResultFactory(changed=True)
|
||||
unchanged_result = factories.ResultFactory(changed=False)
|
||||
|
||||
# Assert two results
|
||||
results = self.client.get("/api/v1/results").data["results"]
|
||||
self.assertEqual(2, len(results))
|
||||
|
||||
# Assert one changed
|
||||
results = self.client.get("/api/v1/results?changed=true").data["results"]
|
||||
self.assertEqual(1, len(results))
|
||||
self.assertEqual(results[0]["id"], changed_result.id)
|
||||
|
||||
# Assert one unchanged
|
||||
results = self.client.get("/api/v1/results?changed=false").data["results"]
|
||||
self.assertEqual(1, len(results))
|
||||
self.assertEqual(results[0]["id"], unchanged_result.id)
|
||||
@@ -1,263 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
|
||||
from django.utils import timezone
|
||||
from django.utils.dateparse import parse_duration
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from ara.api import models, serializers
|
||||
from ara.api.tests import factories, utils
|
||||
|
||||
|
||||
class TaskTestCase(APITestCase):
|
||||
def test_task_factory(self):
|
||||
task = factories.TaskFactory(name="factory")
|
||||
self.assertEqual(task.name, "factory")
|
||||
|
||||
def test_task_serializer(self):
|
||||
play = factories.PlayFactory()
|
||||
file = factories.FileFactory()
|
||||
serializer = serializers.TaskSerializer(
|
||||
data={
|
||||
"name": "serializer",
|
||||
"action": "test",
|
||||
"lineno": 2,
|
||||
"status": "completed",
|
||||
"handler": False,
|
||||
"play": play.id,
|
||||
"file": file.id,
|
||||
"playbook": play.playbook.id,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
task = serializer.save()
|
||||
task.refresh_from_db()
|
||||
self.assertEqual(task.name, "serializer")
|
||||
self.assertEqual(task.status, "completed")
|
||||
|
||||
def test_task_serializer_compress_tags(self):
|
||||
play = factories.PlayFactory()
|
||||
file = factories.FileFactory()
|
||||
serializer = serializers.TaskSerializer(
|
||||
data={
|
||||
"name": "compress",
|
||||
"action": "test",
|
||||
"lineno": 2,
|
||||
"status": "running",
|
||||
"handler": False,
|
||||
"play": play.id,
|
||||
"file": file.id,
|
||||
"tags": factories.TASK_TAGS,
|
||||
"playbook": play.playbook.id,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
task = serializer.save()
|
||||
task.refresh_from_db()
|
||||
self.assertEqual(task.tags, utils.compressed_obj(factories.TASK_TAGS))
|
||||
|
||||
def test_task_serializer_decompress_tags(self):
|
||||
task = factories.TaskFactory(tags=utils.compressed_obj(factories.TASK_TAGS))
|
||||
serializer = serializers.TaskSerializer(instance=task)
|
||||
self.assertEqual(serializer.data["tags"], factories.TASK_TAGS)
|
||||
|
||||
def test_get_no_tasks(self):
|
||||
request = self.client.get("/api/v1/tasks")
|
||||
self.assertEqual(0, len(request.data["results"]))
|
||||
|
||||
def test_get_tasks(self):
|
||||
task = factories.TaskFactory()
|
||||
request = self.client.get("/api/v1/tasks")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(task.name, request.data["results"][0]["name"])
|
||||
|
||||
def test_delete_task(self):
|
||||
task = factories.TaskFactory()
|
||||
self.assertEqual(1, models.Task.objects.all().count())
|
||||
request = self.client.delete("/api/v1/tasks/%s" % task.id)
|
||||
self.assertEqual(204, request.status_code)
|
||||
self.assertEqual(0, models.Task.objects.all().count())
|
||||
|
||||
def test_create_task(self):
|
||||
play = factories.PlayFactory()
|
||||
file = factories.FileFactory()
|
||||
self.assertEqual(0, models.Task.objects.count())
|
||||
request = self.client.post(
|
||||
"/api/v1/tasks",
|
||||
{
|
||||
"name": "create",
|
||||
"action": "test",
|
||||
"lineno": 2,
|
||||
"handler": False,
|
||||
"status": "running",
|
||||
"play": play.id,
|
||||
"file": file.id,
|
||||
"playbook": play.playbook.id,
|
||||
},
|
||||
)
|
||||
self.assertEqual(201, request.status_code)
|
||||
self.assertEqual(1, models.Task.objects.count())
|
||||
|
||||
def test_partial_update_task(self):
|
||||
task = factories.TaskFactory()
|
||||
self.assertNotEqual("update", task.name)
|
||||
request = self.client.patch("/api/v1/tasks/%s" % task.id, {"name": "update"})
|
||||
self.assertEqual(200, request.status_code)
|
||||
task_updated = models.Task.objects.get(id=task.id)
|
||||
self.assertEqual("update", task_updated.name)
|
||||
|
||||
def test_expired_task(self):
|
||||
task = factories.TaskFactory(status="running")
|
||||
self.assertEqual("running", task.status)
|
||||
|
||||
request = self.client.patch("/api/v1/tasks/%s" % task.id, {"status": "expired"})
|
||||
self.assertEqual(200, request.status_code)
|
||||
task_updated = models.Task.objects.get(id=task.id)
|
||||
self.assertEqual("expired", task_updated.status)
|
||||
|
||||
def test_get_task(self):
|
||||
task = factories.TaskFactory()
|
||||
request = self.client.get("/api/v1/tasks/%s" % task.id)
|
||||
self.assertEqual(task.name, request.data["name"])
|
||||
|
||||
def test_get_tasks_by_playbook(self):
|
||||
playbook = factories.PlaybookFactory()
|
||||
task = factories.TaskFactory(name="task1", playbook=playbook)
|
||||
factories.TaskFactory(name="task2", playbook=playbook)
|
||||
request = self.client.get("/api/v1/tasks?playbook=%s" % playbook.id)
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
self.assertEqual(task.name, request.data["results"][1]["name"])
|
||||
self.assertEqual("task2", request.data["results"][0]["name"])
|
||||
|
||||
def test_get_tasks_by_name(self):
|
||||
# Create a playbook and two tasks
|
||||
playbook = factories.PlaybookFactory()
|
||||
task = factories.TaskFactory(name="task1", playbook=playbook)
|
||||
factories.TaskFactory(name="task2", playbook=playbook)
|
||||
|
||||
# Query for the first task name and expect one result
|
||||
request = self.client.get("/api/v1/tasks?name=%s" % task.name)
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(task.name, request.data["results"][0]["name"])
|
||||
|
||||
def test_get_task_duration(self):
|
||||
started = timezone.now()
|
||||
ended = started + datetime.timedelta(hours=1)
|
||||
task = factories.TaskFactory(started=started, ended=ended)
|
||||
request = self.client.get("/api/v1/tasks/%s" % task.id)
|
||||
self.assertEqual(parse_duration(request.data["duration"]), ended - started)
|
||||
|
||||
def test_get_task_by_date(self):
|
||||
task = factories.TaskFactory()
|
||||
|
||||
past = datetime.datetime.now() - datetime.timedelta(hours=12)
|
||||
negative_date_fields = ["created_before", "started_before", "updated_before"]
|
||||
positive_date_fields = ["created_after", "started_after", "updated_after"]
|
||||
|
||||
# Expect no task when searching before it was created
|
||||
for field in negative_date_fields:
|
||||
request = self.client.get("/api/v1/tasks?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 0)
|
||||
|
||||
# Expect a task when searching after it was created
|
||||
for field in positive_date_fields:
|
||||
request = self.client.get("/api/v1/tasks?%s=%s" % (field, past.isoformat()))
|
||||
self.assertEqual(request.data["count"], 1)
|
||||
self.assertEqual(request.data["results"][0]["id"], task.id)
|
||||
|
||||
def test_get_task_order(self):
|
||||
old_started = timezone.now() - datetime.timedelta(hours=12)
|
||||
old_ended = old_started + datetime.timedelta(minutes=30)
|
||||
old_task = factories.TaskFactory(started=old_started, ended=old_ended)
|
||||
new_started = timezone.now() - datetime.timedelta(hours=6)
|
||||
new_ended = new_started + datetime.timedelta(hours=1)
|
||||
new_task = factories.TaskFactory(started=new_started, ended=new_ended)
|
||||
|
||||
# Ensure we have two objects
|
||||
request = self.client.get("/api/v1/tasks")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
order_fields = ["id", "created", "updated", "started", "ended", "duration"]
|
||||
# Ascending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/tasks?order=%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], old_task.id)
|
||||
|
||||
# Descending order
|
||||
for field in order_fields:
|
||||
request = self.client.get("/api/v1/tasks?order=-%s" % field)
|
||||
self.assertEqual(request.data["results"][0]["id"], new_task.id)
|
||||
|
||||
def test_update_wrong_task_status(self):
|
||||
task = factories.TaskFactory()
|
||||
self.assertNotEqual("wrong", task.status)
|
||||
request = self.client.patch("/api/v1/tasks/%s" % task.id, {"status": "wrong"})
|
||||
self.assertEqual(400, request.status_code)
|
||||
task_updated = models.Task.objects.get(id=task.id)
|
||||
self.assertNotEqual("wrong", task_updated.status)
|
||||
|
||||
def test_get_task_by_status(self):
|
||||
task = factories.TaskFactory(status="running")
|
||||
factories.TaskFactory(status="completed")
|
||||
factories.TaskFactory(status="unknown")
|
||||
|
||||
# Confirm we have three objects
|
||||
request = self.client.get("/api/v1/tasks")
|
||||
self.assertEqual(3, len(request.data["results"]))
|
||||
|
||||
# Test single status
|
||||
request = self.client.get("/api/v1/tasks?status=running")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(task.status, request.data["results"][0]["status"])
|
||||
|
||||
# Test multiple status
|
||||
request = self.client.get("/api/v1/tasks?status=running&status=completed")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
def test_get_task_by_action(self):
|
||||
task = factories.TaskFactory(action="debug")
|
||||
factories.TaskFactory(action="setup")
|
||||
|
||||
# Confirm we have two objects
|
||||
request = self.client.get("/api/v1/tasks")
|
||||
self.assertEqual(2, len(request.data["results"]))
|
||||
|
||||
# Expect the correct single result when searching
|
||||
request = self.client.get("/api/v1/tasks?action=debug")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(task.id, request.data["results"][0]["id"])
|
||||
self.assertEqual(task.action, request.data["results"][0]["action"])
|
||||
|
||||
def test_get_task_by_path(self):
|
||||
# Create two files with different paths
|
||||
first_file = factories.FileFactory(path="/root/roles/foo/tasks/main.yml")
|
||||
second_file = factories.FileFactory(path="/root/roles/bar/tasks/main.yml")
|
||||
|
||||
# Create two tasks using these files
|
||||
task = factories.TaskFactory(file=first_file)
|
||||
factories.TaskFactory(file=second_file)
|
||||
|
||||
# Test exact match
|
||||
request = self.client.get("/api/v1/tasks?path=/root/roles/foo/tasks/main.yml")
|
||||
self.assertEqual(1, len(request.data["results"]))
|
||||
self.assertEqual(task.file.path, request.data["results"][0]["path"])
|
||||
|
||||
# Test partial match
|
||||
request = self.client.get("/api/v1/tasks?path=main.yml")
|
||||
self.assertEqual(len(request.data["results"]), 2)
|
||||
@@ -1,29 +0,0 @@
|
||||
# Copyright (c) 2019 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import pkg_resources
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
|
||||
class RootTestCase(APITestCase):
|
||||
def test_root_endpoint(self):
|
||||
result = self.client.get("/api/")
|
||||
self.assertEqual(set(result.data.keys()), set(["kind", "version", "api"]))
|
||||
self.assertEqual(result.data["kind"], "ara")
|
||||
self.assertEqual(result.data["version"], pkg_resources.get_distribution("ara").version)
|
||||
self.assertTrue(len(result.data["api"]), 1)
|
||||
self.assertTrue(result.data["api"][0].endswith("/api/v1/"))
|
||||
@@ -1,41 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import zlib
|
||||
|
||||
|
||||
def compressed_obj(obj):
|
||||
"""
|
||||
Returns a zlib compressed representation of an object
|
||||
"""
|
||||
return zlib.compress(json.dumps(obj).encode("utf-8"))
|
||||
|
||||
|
||||
def compressed_str(obj):
|
||||
"""
|
||||
Returns a zlib compressed representation of a string
|
||||
"""
|
||||
return zlib.compress(obj.encode("utf-8"))
|
||||
|
||||
|
||||
def sha1(obj):
|
||||
"""
|
||||
Returns the sha1 of a compressed string or an object
|
||||
"""
|
||||
return hashlib.sha1(obj.encode("utf8")).hexdigest()
|
||||
@@ -1,32 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from rest_framework.routers import DefaultRouter
|
||||
|
||||
from ara.api import views
|
||||
|
||||
router = DefaultRouter(trailing_slash=False)
|
||||
router.register("labels", views.LabelViewSet, basename="label")
|
||||
router.register("playbooks", views.PlaybookViewSet, basename="playbook")
|
||||
router.register("plays", views.PlayViewSet, basename="play")
|
||||
router.register("tasks", views.TaskViewSet, basename="task")
|
||||
router.register("hosts", views.HostViewSet, basename="host")
|
||||
router.register("results", views.ResultViewSet, basename="result")
|
||||
router.register("files", views.FileViewSet, basename="file")
|
||||
router.register("records", views.RecordViewSet, basename="record")
|
||||
|
||||
urlpatterns = router.urls
|
||||
152
ara/api/views.py
152
ara/api/views.py
@@ -1,152 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of ARA Records Ansible.
|
||||
#
|
||||
# ARA is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# ARA is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from rest_framework import viewsets
|
||||
|
||||
from ara.api import filters, models, serializers
|
||||
|
||||
|
||||
class LabelViewSet(viewsets.ModelViewSet):
|
||||
queryset = models.Label.objects.all()
|
||||
filterset_class = filters.LabelFilter
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "list":
|
||||
return serializers.ListLabelSerializer
|
||||
elif self.action == "retrieve":
|
||||
return serializers.DetailedLabelSerializer
|
||||
else:
|
||||
# create/update/destroy
|
||||
return serializers.LabelSerializer
|
||||
|
||||
|
||||
class PlaybookViewSet(viewsets.ModelViewSet):
|
||||
filterset_class = filters.PlaybookFilter
|
||||
|
||||
def get_queryset(self):
|
||||
statuses = self.request.GET.getlist("status")
|
||||
if statuses:
|
||||
return models.Playbook.objects.filter(status__in=statuses).order_by("-id")
|
||||
return models.Playbook.objects.all().order_by("-id")
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "list":
|
||||
return serializers.ListPlaybookSerializer
|
||||
elif self.action == "retrieve":
|
||||
return serializers.DetailedPlaybookSerializer
|
||||
else:
|
||||
# create/update/destroy
|
||||
return serializers.PlaybookSerializer
|
||||
|
||||
|
||||
class PlayViewSet(viewsets.ModelViewSet):
|
||||
filterset_class = filters.PlayFilter
|
||||
|
||||
def get_queryset(self):
|
||||
statuses = self.request.GET.getlist("status")
|
||||
if statuses:
|
||||
return models.Play.objects.filter(status__in=statuses).order_by("-id")
|
||||
return models.Play.objects.all().order_by("-id")
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "list":
|
||||
return serializers.ListPlaySerializer
|
||||
elif self.action == "retrieve":
|
||||
return serializers.DetailedPlaySerializer
|
||||
else:
|
||||
# create/update/destroy
|
||||
return serializers.PlaySerializer
|
||||
|
||||
|
||||
class TaskViewSet(viewsets.ModelViewSet):
|
||||
filterset_class = filters.TaskFilter
|
||||
|
||||
def get_queryset(self):
|
||||
statuses = self.request.GET.getlist("status")
|
||||
if statuses:
|
||||
return models.Task.objects.filter(status__in=statuses).order_by("-id")
|
||||
return models.Task.objects.all().order_by("-id")
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "list":
|
||||
return serializers.ListTaskSerializer
|
||||
elif self.action == "retrieve":
|
||||
return serializers.DetailedTaskSerializer
|
||||
else:
|
||||
# create/update/destroy
|
||||
return serializers.TaskSerializer
|
||||
|
||||
|
||||
class HostViewSet(viewsets.ModelViewSet):
|
||||
queryset = models.Host.objects.all()
|
||||
filterset_class = filters.HostFilter
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "list":
|
||||
return serializers.ListHostSerializer
|
||||
elif self.action == "retrieve":
|
||||
return serializers.DetailedHostSerializer
|
||||
else:
|
||||
# create/update/destroy
|
||||
return serializers.HostSerializer
|
||||
|
||||
|
||||
class ResultViewSet(viewsets.ModelViewSet):
|
||||
filterset_class = filters.ResultFilter
|
||||
|
||||
def get_queryset(self):
|
||||
statuses = self.request.GET.getlist("status")
|
||||
if statuses:
|
||||
return models.Result.objects.filter(status__in=statuses).order_by("-id")
|
||||
return models.Result.objects.all().order_by("-id")
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "list":
|
||||
return serializers.ListResultSerializer
|
||||
elif self.action == "retrieve":
|
||||
return serializers.DetailedResultSerializer
|
||||
else:
|
||||
# create/update/destroy
|
||||
return serializers.ResultSerializer
|
||||
|
||||
|
||||
class FileViewSet(viewsets.ModelViewSet):
|
||||
queryset = models.File.objects.all()
|
||||
filterset_class = filters.FileFilter
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "list":
|
||||
return serializers.ListFileSerializer
|
||||
elif self.action == "retrieve":
|
||||
return serializers.DetailedFileSerializer
|
||||
else:
|
||||
# create/update/destroy
|
||||
return serializers.FileSerializer
|
||||
|
||||
|
||||
class RecordViewSet(viewsets.ModelViewSet):
|
||||
queryset = models.Record.objects.all()
|
||||
filterset_class = filters.RecordFilter
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "list":
|
||||
return serializers.ListRecordSerializer
|
||||
elif self.action == "retrieve":
|
||||
return serializers.DetailedRecordSerializer
|
||||
else:
|
||||
# create/update/destroy
|
||||
return serializers.RecordSerializer
|
||||
@@ -1,89 +0,0 @@
|
||||
# Copyright (c) 2020 The ARA Records Ansible authors
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pbr.version
|
||||
from cliff.app import App
|
||||
from cliff.commandmanager import CommandManager
|
||||
|
||||
CLIENT_VERSION = pbr.version.VersionInfo("ara").release_string()
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def global_arguments(parser):
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"--client",
|
||||
metavar="<client>",
|
||||
default=os.environ.get("ARA_API_CLIENT", "offline"),
|
||||
help=("API client to use, defaults to ARA_API_CLIENT or 'offline'"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--server",
|
||||
metavar="<url>",
|
||||
default=os.environ.get("ARA_API_SERVER", "http://127.0.0.1:8000"),
|
||||
help=("API server endpoint if using http client, defaults to ARA_API_SERVER or 'http://127.0.0.1:8000'"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--timeout",
|
||||
metavar="<seconds>",
|
||||
default=os.environ.get("ARA_API_TIMEOUT", 30),
|
||||
help=("Timeout for requests to API server, defaults to ARA_API_TIMEOUT or 30"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--username",
|
||||
metavar="<username>",
|
||||
default=os.environ.get("ARA_API_USERNAME", None),
|
||||
help=("API server username for authentication, defaults to ARA_API_USERNAME or None"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--password",
|
||||
metavar="<password>",
|
||||
default=os.environ.get("ARA_API_PASSWORD", None),
|
||||
help=("API server password for authentication, defaults to ARA_API_PASSWORD or None"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--insecure",
|
||||
action="store_true",
|
||||
default=os.environ.get("ARA_API_INSECURE", False),
|
||||
help=("Ignore SSL certificate validation, defaults to ARA_API_INSECURE or False"),
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
|
||||
class AraCli(App):
|
||||
def __init__(self):
|
||||
super(AraCli, self).__init__(
|
||||
description="A CLI client to query ARA API servers",
|
||||
version=CLIENT_VERSION,
|
||||
command_manager=CommandManager("ara.cli"),
|
||||
deferred_help=True,
|
||||
)
|
||||
|
||||
def build_option_parser(self, description, version):
|
||||
parser = super(AraCli, self).build_option_parser(description, version)
|
||||
return parser
|
||||
|
||||
def initialize_app(self, argv):
|
||||
log.debug("initialize_app")
|
||||
|
||||
def prepare_to_run_command(self, cmd):
|
||||
log.debug("prepare_to_run_command: %s", cmd.__class__.__name__)
|
||||
|
||||
def clean_up(self, cmd, result, err):
|
||||
log.debug("clean_up %s", cmd.__class__.__name__)
|
||||
if err:
|
||||
log.debug("got an error: %s", err)
|
||||
|
||||
|
||||
def main(argv=sys.argv[1:]):
|
||||
aracli = AraCli()
|
||||
return aracli.run(argv)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
@@ -1,101 +0,0 @@
|
||||
# Copyright (c) 2020 The ARA Records Ansible authors
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# See https://github.com/ansible-community/ara/issues/26 for rationale on expiring
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from cliff.command import Command
|
||||
|
||||
from ara.cli.base import global_arguments
|
||||
from ara.clients.utils import get_client
|
||||
|
||||
|
||||
class ExpireObjects(Command):
|
||||
""" Expires objects that have been in the running state for too long """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
expired = 0
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ExpireObjects, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"--hours",
|
||||
type=int,
|
||||
default=24,
|
||||
help="Expires objects that have been running state for this many hours (default: 24)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--order",
|
||||
metavar="<order>",
|
||||
default="started",
|
||||
help=(
|
||||
"Orders objects by a field ('id', 'created', 'updated', 'started', 'ended')\n"
|
||||
"Defaults to 'started' descending so the oldest objects would be expired first.\n"
|
||||
"The order can be reversed by using '-': ara expire --order=-started"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
metavar="<limit>",
|
||||
default=os.environ.get("ARA_CLI_LIMIT", 200),
|
||||
help=("Only expire the first <limit> determined by the ordering. Defaults to ARA_CLI_LIMIT or 200.")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--confirm",
|
||||
action="store_true",
|
||||
help="Confirm expiration of objects, otherwise runs without expiring any objects",
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
if not args.confirm:
|
||||
self.log.info("--confirm was not specified, no objects will be expired")
|
||||
|
||||
query = dict(status="running")
|
||||
# generate a timestamp from n days ago in a format we can query the API with
|
||||
# ex: 2019-11-21T00:57:41.702229
|
||||
query["updated_before"] = (datetime.now() - timedelta(hours=args.hours)).isoformat()
|
||||
query["order"] = args.order
|
||||
query["limit"] = args.limit
|
||||
|
||||
endpoints = ["/api/v1/playbooks", "/api/v1/plays", "/api/v1/tasks"]
|
||||
for endpoint in endpoints:
|
||||
objects = client.get(endpoint, **query)
|
||||
self.log.info("Found %s objects matching query on %s" % (objects["count"], endpoint))
|
||||
# TODO: Improve client validation and exception handling
|
||||
if "count" not in objects:
|
||||
# If we didn't get an answer we can parse, it's probably due to an error 500, 403, 401, etc.
|
||||
# The client would have logged the error.
|
||||
self.log.error(
|
||||
"Client failed to retrieve results, see logs for ara.clients.offline or ara.clients.http."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
for obj in objects["results"]:
|
||||
link = "%s/%s" % (endpoint, obj["id"])
|
||||
if not args.confirm:
|
||||
self.log.info(
|
||||
"Dry-run: %s would have been expired, status is running since %s" % (link, obj["updated"])
|
||||
)
|
||||
else:
|
||||
self.log.info("Expiring %s, status is running since %s" % (link, obj["updated"]))
|
||||
client.patch(link, status="expired")
|
||||
self.expired += 1
|
||||
|
||||
self.log.info("%s objects expired" % self.expired)
|
||||
428
ara/cli/host.py
428
ara/cli/host.py
@@ -1,428 +0,0 @@
|
||||
# Copyright (c) 2020 The ARA Records Ansible authors
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from cliff.command import Command
|
||||
from cliff.lister import Lister
|
||||
from cliff.show import ShowOne
|
||||
|
||||
import ara.cli.utils as cli_utils
|
||||
from ara.cli.base import global_arguments
|
||||
from ara.clients.utils import get_client
|
||||
|
||||
|
||||
class HostList(Lister):
|
||||
""" Returns a list of hosts based on search queries """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(HostList, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
# Host search arguments and ordering as per ara.api.filters.HostFilter
|
||||
# TODO: non-exhaustive (searching for failed, ok, unreachable, etc.)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
metavar="<name>",
|
||||
default=None,
|
||||
help=("List hosts matching the provided name (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--playbook",
|
||||
metavar="<playbook_id>",
|
||||
default=None,
|
||||
help=("List hosts for a specified playbook id"),
|
||||
)
|
||||
|
||||
changed = parser.add_mutually_exclusive_group()
|
||||
changed.add_argument(
|
||||
"--with-changed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Return hosts with changed results")
|
||||
)
|
||||
changed.add_argument(
|
||||
"--without-changed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Don't return hosts with changed results")
|
||||
)
|
||||
|
||||
failed = parser.add_mutually_exclusive_group()
|
||||
failed.add_argument(
|
||||
"--with-failed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Return hosts with failed results")
|
||||
)
|
||||
failed.add_argument(
|
||||
"--without-failed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Don't return hosts with failed results")
|
||||
)
|
||||
|
||||
unreachable = parser.add_mutually_exclusive_group()
|
||||
unreachable.add_argument(
|
||||
"--with-unreachable",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Return hosts with unreachable results")
|
||||
)
|
||||
unreachable.add_argument(
|
||||
"--without-unreachable",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Don't return hosts with unreachable results")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--resolve",
|
||||
action="store_true",
|
||||
default=os.environ.get("ARA_CLI_RESOLVE", False),
|
||||
help=("Resolve IDs to identifiers (such as path or names). Defaults to ARA_CLI_RESOLVE or False")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--long",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Don't truncate paths")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--order",
|
||||
metavar="<order>",
|
||||
default="-updated",
|
||||
help=(
|
||||
"Orders hosts by a field ('id', 'created', 'updated', 'name')\n"
|
||||
"Defaults to '-updated' descending so the most recent host is at the top.\n"
|
||||
"The order can be reversed by omitting the '-': ara host list --order=updated"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
metavar="<limit>",
|
||||
default=os.environ.get("ARA_CLI_LIMIT", 50),
|
||||
help=("Returns the first <limit> determined by the ordering. Defaults to ARA_CLI_LIMIT or 50.")
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
query = {}
|
||||
if args.name is not None:
|
||||
query["name"] = args.name
|
||||
|
||||
if args.playbook is not None:
|
||||
query["playbook"] = args.playbook
|
||||
|
||||
if args.with_changed:
|
||||
query["changed__gt"] = 0
|
||||
if args.without_changed:
|
||||
query["changed__lt"] = 1
|
||||
if args.with_failed:
|
||||
query["failed__gt"] = 0
|
||||
if args.without_failed:
|
||||
query["failed__lt"] = 1
|
||||
if args.with_unreachable:
|
||||
query["unreachable__gt"] = 0
|
||||
if args.without_unreachable:
|
||||
query["unreachable__lt"] = 1
|
||||
|
||||
query["order"] = args.order
|
||||
query["limit"] = args.limit
|
||||
|
||||
hosts = client.get("/api/v1/hosts", **query)
|
||||
|
||||
if args.resolve:
|
||||
for host in hosts["results"]:
|
||||
playbook = cli_utils.get_playbook(client, host["playbook"])
|
||||
# Paths can easily take up too much width real estate
|
||||
if not args.long:
|
||||
host["playbook"] = "(%s) %s" % (playbook["id"], cli_utils.truncatepath(playbook["path"], 50))
|
||||
else:
|
||||
host["playbook"] = "(%s) %s" % (playbook["id"], playbook["path"])
|
||||
|
||||
columns = ("id", "name", "playbook", "changed", "failed", "ok", "skipped", "unreachable", "updated")
|
||||
# fmt: off
|
||||
return (
|
||||
columns, (
|
||||
[host[column] for column in columns]
|
||||
for host in hosts["results"]
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class HostShow(ShowOne):
|
||||
""" Returns a detailed view of a specified host """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(HostShow, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"host_id",
|
||||
metavar="<host-id>",
|
||||
help="Host to show",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--with-facts",
|
||||
action="store_true",
|
||||
help="Also include host facts in the response (use with '-f json' or '-f yaml')"
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
# TODO: Render json properly in pretty tables
|
||||
if args.with_facts and args.formatter == "table":
|
||||
self.log.warn("Rendering using default table formatter, use '-f yaml' or '-f json' for improved display.")
|
||||
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
# TODO: Improve client to be better at handling exceptions
|
||||
host = client.get("/api/v1/hosts/%s" % args.host_id)
|
||||
if "detail" in host and host["detail"] == "Not found.":
|
||||
self.log.error("Host not found: %s" % args.host_id)
|
||||
sys.exit(1)
|
||||
|
||||
host["report"] = "%s/playbooks/%s.html" % (args.server, host["playbook"]["id"])
|
||||
if args.with_facts:
|
||||
# fmt: off
|
||||
columns = (
|
||||
"id",
|
||||
"report",
|
||||
"name",
|
||||
"changed",
|
||||
"failed",
|
||||
"ok",
|
||||
"skipped",
|
||||
"unreachable",
|
||||
"facts",
|
||||
"updated"
|
||||
)
|
||||
# fmt: on
|
||||
else:
|
||||
# fmt: off
|
||||
columns = (
|
||||
"id",
|
||||
"report",
|
||||
"name",
|
||||
"changed",
|
||||
"failed",
|
||||
"ok",
|
||||
"skipped",
|
||||
"unreachable",
|
||||
"updated"
|
||||
)
|
||||
# fmt: on
|
||||
return (columns, ([host[column] for column in columns]))
|
||||
|
||||
|
||||
class HostDelete(Command):
|
||||
""" Deletes the specified host and associated resources """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(HostDelete, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"host_id",
|
||||
metavar="<host-id>",
|
||||
help="Host to delete",
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
# TODO: Improve client to be better at handling exceptions
|
||||
client.delete("/api/v1/hosts/%s" % args.host_id)
|
||||
|
||||
|
||||
class HostMetrics(Lister):
|
||||
""" Provides metrics about hosts """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(HostMetrics, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
# Host search arguments and ordering as per ara.api.filters.HostFilter
|
||||
# TODO: non-exhaustive (searching for failed, ok, unreachable, etc.)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
metavar="<name>",
|
||||
default=None,
|
||||
help=("Filter for hosts matching the provided name (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--playbook",
|
||||
metavar="<playbook_id>",
|
||||
default=None,
|
||||
help=("Filter for hosts for a specified playbook id"),
|
||||
)
|
||||
|
||||
changed = parser.add_mutually_exclusive_group()
|
||||
changed.add_argument(
|
||||
"--with-changed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Filter for hosts with changed results")
|
||||
)
|
||||
changed.add_argument(
|
||||
"--without-changed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Filter out hosts without changed results")
|
||||
)
|
||||
|
||||
failed = parser.add_mutually_exclusive_group()
|
||||
failed.add_argument(
|
||||
"--with-failed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Filter for hosts with failed results")
|
||||
)
|
||||
failed.add_argument(
|
||||
"--without-failed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Filter out hosts without failed results")
|
||||
)
|
||||
|
||||
unreachable = parser.add_mutually_exclusive_group()
|
||||
unreachable.add_argument(
|
||||
"--with-unreachable",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Filter for hosts with unreachable results")
|
||||
)
|
||||
unreachable.add_argument(
|
||||
"--without-unreachable",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Filter out hosts without unreachable results")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--order",
|
||||
metavar="<order>",
|
||||
default="-updated",
|
||||
help=(
|
||||
"Orders hosts by a field ('id', 'created', 'updated', 'name')\n"
|
||||
"Defaults to '-updated' descending so the most recent host is at the top.\n"
|
||||
"The order can be reversed by omitting the '-': ara host list --order=updated\n"
|
||||
"This influences the API request, not the ordering of the metrics."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
metavar="<limit>",
|
||||
default=os.environ.get("ARA_CLI_LIMIT", 1000),
|
||||
help=("Return metrics for the first <limit> determined by the ordering. Defaults to ARA_CLI_LIMIT or 1000.")
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
query = {}
|
||||
if args.name is not None:
|
||||
query["name"] = args.name
|
||||
|
||||
if args.playbook is not None:
|
||||
query["playbook"] = args.playbook
|
||||
|
||||
if args.with_changed:
|
||||
query["changed__gt"] = 0
|
||||
if args.without_changed:
|
||||
query["changed__lt"] = 1
|
||||
if args.with_failed:
|
||||
query["failed__gt"] = 0
|
||||
if args.without_failed:
|
||||
query["failed__lt"] = 1
|
||||
if args.with_unreachable:
|
||||
query["unreachable__gt"] = 0
|
||||
if args.without_unreachable:
|
||||
query["unreachable__lt"] = 1
|
||||
|
||||
query["order"] = args.order
|
||||
query["limit"] = args.limit
|
||||
|
||||
resp = client.get("/api/v1/hosts", **query)
|
||||
|
||||
# Group hosts by name
|
||||
hosts = {}
|
||||
for host in resp["results"]:
|
||||
name = host["name"]
|
||||
if name not in hosts:
|
||||
hosts[name] = []
|
||||
hosts[name].append(host)
|
||||
|
||||
data = {}
|
||||
for name, host_results in hosts.items():
|
||||
data[name] = {
|
||||
"name": name,
|
||||
"count": len(host_results),
|
||||
"changed": 0,
|
||||
"failed": 0,
|
||||
"ok": 0,
|
||||
"skipped": 0,
|
||||
"unreachable": 0,
|
||||
}
|
||||
|
||||
for host in host_results:
|
||||
for status in ["changed", "failed", "ok", "skipped", "unreachable"]:
|
||||
data[name][status] += host[status]
|
||||
|
||||
columns = ("name", "count", "changed", "failed", "ok", "skipped", "unreachable")
|
||||
# fmt: off
|
||||
return (
|
||||
columns, (
|
||||
[data[host][column] for column in columns]
|
||||
for host in sorted(data.keys())
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
216
ara/cli/play.py
216
ara/cli/play.py
@@ -1,216 +0,0 @@
|
||||
# Copyright (c) 2020 The ARA Records Ansible authors
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from cliff.command import Command
|
||||
from cliff.lister import Lister
|
||||
from cliff.show import ShowOne
|
||||
|
||||
import ara.cli.utils as cli_utils
|
||||
from ara.cli.base import global_arguments
|
||||
from ara.clients.utils import get_client
|
||||
|
||||
|
||||
class PlayList(Lister):
|
||||
""" Returns a list of plays based on search queries """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PlayList, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
# Play search arguments and ordering as per ara.api.filters.PlayFilter
|
||||
parser.add_argument(
|
||||
"--playbook",
|
||||
metavar="<playbook_id>",
|
||||
default=None,
|
||||
help=("List plays for the specified playbook"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
metavar="<name>",
|
||||
default=None,
|
||||
help=("List plays matching the provided name (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--uuid",
|
||||
metavar="<uuid>",
|
||||
default=None,
|
||||
help=("List plays matching the provided uuid (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--status",
|
||||
metavar="<status>",
|
||||
default=None,
|
||||
help=("List plays matching a specific status ('completed', 'running', 'failed')"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--long",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Don't truncate paths")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--resolve",
|
||||
action="store_true",
|
||||
default=os.environ.get("ARA_CLI_RESOLVE", False),
|
||||
help=("Resolve IDs to identifiers (such as path or names). Defaults to ARA_CLI_RESOLVE or False")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--order",
|
||||
metavar="<order>",
|
||||
default="-started",
|
||||
help=(
|
||||
"Orders plays by a field ('id', 'created', 'updated', 'started', 'ended', 'duration')\n"
|
||||
"Defaults to '-started' descending so the most recent playbook is at the top.\n"
|
||||
"The order can be reversed by omitting the '-': ara play list --order=started"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
metavar="<limit>",
|
||||
default=os.environ.get("ARA_CLI_LIMIT", 50),
|
||||
help=("Returns the first <limit> determined by the ordering. Defaults to ARA_CLI_LIMIT or 50.")
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
query = {}
|
||||
if args.playbook is not None:
|
||||
query["playbook"] = args.playbook
|
||||
|
||||
if args.name is not None:
|
||||
query["name"] = args.name
|
||||
|
||||
if args.uuid is not None:
|
||||
query["uuid"] = args.uuid
|
||||
|
||||
if args.status is not None:
|
||||
query["status"] = args.status
|
||||
|
||||
query["order"] = args.order
|
||||
query["limit"] = args.limit
|
||||
|
||||
plays = client.get("/api/v1/plays", **query)
|
||||
for play in plays["results"]:
|
||||
# Send items to columns
|
||||
play["tasks"] = play["items"]["tasks"]
|
||||
play["results"] = play["items"]["results"]
|
||||
|
||||
if args.resolve:
|
||||
playbook = cli_utils.get_playbook(client, play["playbook"])
|
||||
# Paths can easily take up too much width real estate
|
||||
if not args.long:
|
||||
play["playbook"] = "(%s) %s" % (playbook["id"], cli_utils.truncatepath(playbook["path"], 50))
|
||||
else:
|
||||
play["playbook"] = "(%s) %s" % (playbook["id"], playbook["path"])
|
||||
|
||||
columns = ("id", "status", "name", "playbook", "tasks", "results", "started", "duration")
|
||||
# fmt: off
|
||||
return (
|
||||
columns, (
|
||||
[play[column] for column in columns]
|
||||
for play in plays["results"]
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class PlayShow(ShowOne):
|
||||
""" Returns a detailed view of a specified play """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PlayShow, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"play_id",
|
||||
metavar="<play-id>",
|
||||
help="Play to show",
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
# TODO: Improve client to be better at handling exceptions
|
||||
play = client.get("/api/v1/plays/%s" % args.play_id)
|
||||
if "detail" in play and play["detail"] == "Not found.":
|
||||
self.log.error("Play not found: %s" % args.play_id)
|
||||
sys.exit(1)
|
||||
|
||||
playbook = "(%s) %s" % (play["playbook"]["id"], play["playbook"]["name"] or play["playbook"]["path"])
|
||||
play["report"] = "%s/playbooks/%s.html" % (args.server, play["playbook"]["id"])
|
||||
play["playbook"] = playbook
|
||||
|
||||
# fmt: off
|
||||
columns = (
|
||||
"id",
|
||||
"report",
|
||||
"status",
|
||||
"name",
|
||||
"playbook",
|
||||
"started",
|
||||
"ended",
|
||||
"duration",
|
||||
"items",
|
||||
)
|
||||
# fmt: on
|
||||
return (columns, ([play[column] for column in columns]))
|
||||
|
||||
|
||||
class PlayDelete(Command):
|
||||
""" Deletes the specified play and associated resources """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PlayDelete, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"play_id",
|
||||
metavar="<play-id>",
|
||||
help="Play to delete",
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
# TODO: Improve client to be better at handling exceptions
|
||||
client.delete("/api/v1/plays/%s" % args.play_id)
|
||||
@@ -1,594 +0,0 @@
|
||||
# Copyright (c) 2020 The ARA Records Ansible authors
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from cliff.command import Command
|
||||
from cliff.lister import Lister
|
||||
from cliff.show import ShowOne
|
||||
|
||||
import ara.cli.utils as cli_utils
|
||||
from ara.cli.base import global_arguments
|
||||
from ara.clients.utils import get_client
|
||||
|
||||
|
||||
class PlaybookList(Lister):
|
||||
""" Returns a list of playbooks based on search queries """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PlaybookList, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
# Playbook search arguments and ordering as per ara.api.filters.PlaybookFilter
|
||||
parser.add_argument(
|
||||
"--label",
|
||||
metavar="<label>",
|
||||
default=None,
|
||||
help=("List playbooks matching the provided label"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ansible_version",
|
||||
metavar="<ansible_version>",
|
||||
default=None,
|
||||
help=("List playbooks that ran with the specified Ansible version (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--controller",
|
||||
metavar="<controller>",
|
||||
default=None,
|
||||
help=("List playbooks that ran from the provided controller (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
metavar="<name>",
|
||||
default=None,
|
||||
help=("List playbooks matching the provided name (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--path",
|
||||
metavar="<path>",
|
||||
default=None,
|
||||
help=("List playbooks matching the provided path (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--status",
|
||||
metavar="<status>",
|
||||
default=None,
|
||||
help=("List playbooks matching a specific status ('completed', 'running', 'failed')"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--long",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Don't truncate paths and include additional fields: name, plays, files, records")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--order",
|
||||
metavar="<order>",
|
||||
default="-started",
|
||||
help=(
|
||||
"Orders playbooks by a field ('id', 'created', 'updated', 'started', 'ended', 'duration')\n"
|
||||
"Defaults to '-started' descending so the most recent playbook is at the top.\n"
|
||||
"The order can be reversed by omitting the '-': ara playbook list --order=started"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
metavar="<limit>",
|
||||
default=os.environ.get("ARA_CLI_LIMIT", 50),
|
||||
help=("Returns the first <limit> determined by the ordering. Defaults to ARA_CLI_LIMIT or 50.")
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
query = {}
|
||||
if args.label is not None:
|
||||
query["label"] = args.label
|
||||
|
||||
if args.ansible_version is not None:
|
||||
query["ansible_version"] = args.ansible_version
|
||||
|
||||
if args.controller is not None:
|
||||
query["controller"] = args.controller
|
||||
|
||||
if args.name is not None:
|
||||
query["name"] = args.name
|
||||
|
||||
if args.path is not None:
|
||||
query["path"] = args.path
|
||||
|
||||
if args.status is not None:
|
||||
query["status"] = args.status
|
||||
|
||||
query["order"] = args.order
|
||||
query["limit"] = args.limit
|
||||
|
||||
playbooks = client.get("/api/v1/playbooks", **query)
|
||||
for playbook in playbooks["results"]:
|
||||
# Send items to columns
|
||||
playbook["plays"] = playbook["items"]["plays"]
|
||||
playbook["tasks"] = playbook["items"]["tasks"]
|
||||
playbook["results"] = playbook["items"]["results"]
|
||||
playbook["hosts"] = playbook["items"]["hosts"]
|
||||
playbook["files"] = playbook["items"]["files"]
|
||||
playbook["records"] = playbook["items"]["records"]
|
||||
# Paths can easily take up too much width real estate
|
||||
if not args.long:
|
||||
playbook["path"] = cli_utils.truncatepath(playbook["path"], 50)
|
||||
|
||||
# fmt: off
|
||||
if args.long:
|
||||
columns = (
|
||||
"id",
|
||||
"status",
|
||||
"controller",
|
||||
"ansible_version",
|
||||
"name",
|
||||
"path",
|
||||
"plays",
|
||||
"tasks",
|
||||
"results",
|
||||
"hosts",
|
||||
"files",
|
||||
"records",
|
||||
"started",
|
||||
"duration"
|
||||
)
|
||||
else:
|
||||
columns = (
|
||||
"id",
|
||||
"status",
|
||||
"controller",
|
||||
"ansible_version",
|
||||
"path",
|
||||
"tasks",
|
||||
"results",
|
||||
"hosts",
|
||||
"started",
|
||||
"duration"
|
||||
)
|
||||
return (
|
||||
columns, (
|
||||
[playbook[column] for column in columns]
|
||||
for playbook in playbooks["results"]
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class PlaybookShow(ShowOne):
|
||||
""" Returns a detailed view of a specified playbook """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PlaybookShow, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"playbook_id",
|
||||
metavar="<playbook-id>",
|
||||
help="Playbook to show",
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
# TODO: Render json properly in pretty tables
|
||||
if args.formatter == "table":
|
||||
self.log.warn("Rendering using default table formatter, use '-f yaml' or '-f json' for improved display.")
|
||||
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
# TODO: Improve client to be better at handling exceptions
|
||||
playbook = client.get("/api/v1/playbooks/%s" % args.playbook_id)
|
||||
if "detail" in playbook and playbook["detail"] == "Not found.":
|
||||
self.log.error("Playbook not found: %s" % args.playbook_id)
|
||||
sys.exit(1)
|
||||
|
||||
playbook["report"] = "%s/playbooks/%s.html" % (args.server, args.playbook_id)
|
||||
columns = (
|
||||
"id",
|
||||
"report",
|
||||
"controller",
|
||||
"ansible_version",
|
||||
"status",
|
||||
"path",
|
||||
"started",
|
||||
"ended",
|
||||
"duration",
|
||||
"items",
|
||||
"labels",
|
||||
"arguments",
|
||||
)
|
||||
return (columns, ([playbook[column] for column in columns]))
|
||||
|
||||
|
||||
class PlaybookDelete(Command):
|
||||
""" Deletes the specified playbook and associated resources """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PlaybookDelete, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"playbook_id",
|
||||
metavar="<playbook-id>",
|
||||
help="Playbook to delete",
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
# TODO: Improve client to be better at handling exceptions
|
||||
client.delete("/api/v1/playbooks/%s" % args.playbook_id)
|
||||
|
||||
|
||||
class PlaybookPrune(Command):
|
||||
""" Deletes playbooks beyond a specified age in days """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
deleted = 0
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PlaybookPrune, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"--days", type=int, default=31, help="Delete playbooks started this many days ago (default: 31)"
|
||||
)
|
||||
# Playbook search arguments like 'ara playbook list'
|
||||
parser.add_argument(
|
||||
"--label",
|
||||
metavar="<label>",
|
||||
default=None,
|
||||
help=("Only delete playbooks matching the provided label"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
metavar="<name>",
|
||||
default=None,
|
||||
help=("Only delete playbooks matching the provided name (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ansible_version",
|
||||
metavar="<ansible_version>",
|
||||
default=None,
|
||||
help=("Only delete playbooks that ran with the specified Ansible version (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--controller",
|
||||
metavar="<controller>",
|
||||
default=None,
|
||||
help=("Only delete playbooks that ran from the provided controller (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--path",
|
||||
metavar="<path>",
|
||||
default=None,
|
||||
help=("Only delete only playbooks matching the provided path (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--status",
|
||||
metavar="<status>",
|
||||
default=None,
|
||||
help=("Only delete playbooks matching a specific status ('completed', 'running', 'failed')"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--order",
|
||||
metavar="<order>",
|
||||
default="started",
|
||||
help=(
|
||||
"Orders playbooks by a field ('id', 'created', 'updated', 'started', 'ended', 'duration')\n"
|
||||
"Defaults to 'started' descending so the oldest playbook would be deleted first.\n"
|
||||
"The order can be reversed by using '-': ara playbook list --order=-started"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
metavar="<limit>",
|
||||
default=os.environ.get("ARA_CLI_LIMIT", 200),
|
||||
help=("Only delete the first <limit> determined by the ordering. Defaults to ARA_CLI_LIMIT or 200.")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--confirm",
|
||||
action="store_true",
|
||||
help="Confirm deletion of playbooks, otherwise runs without deleting any playbook",
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
if not args.confirm:
|
||||
self.log.info("--confirm was not specified, no playbooks will be deleted")
|
||||
|
||||
query = {}
|
||||
if args.label is not None:
|
||||
query["label"] = args.label
|
||||
|
||||
if args.ansible_version is not None:
|
||||
query["ansible_version"] = args.ansible_version
|
||||
|
||||
if args.controller is not None:
|
||||
query["controller"] = args.controller
|
||||
|
||||
if args.name is not None:
|
||||
query["name"] = args.name
|
||||
|
||||
if args.path is not None:
|
||||
query["path"] = args.path
|
||||
|
||||
if args.status is not None:
|
||||
query["status"] = args.status
|
||||
|
||||
# generate a timestamp from n days ago in a format we can query the API with
|
||||
# ex: 2019-11-21T00:57:41.702229
|
||||
query["started_before"] = (datetime.now() - timedelta(days=args.days)).isoformat()
|
||||
query["order"] = args.order
|
||||
query["limit"] = args.limit
|
||||
|
||||
playbooks = client.get("/api/v1/playbooks", **query)
|
||||
|
||||
# TODO: Improve client validation and exception handling
|
||||
if "count" not in playbooks:
|
||||
# If we didn't get an answer we can parse, it's probably due to an error 500, 403, 401, etc.
|
||||
# The client would have logged the error.
|
||||
self.log.error("Client failed to retrieve results, see logs for ara.clients.offline or ara.clients.http.")
|
||||
sys.exit(1)
|
||||
|
||||
self.log.info("Found %s playbooks matching query" % playbooks["count"])
|
||||
for playbook in playbooks["results"]:
|
||||
if not args.confirm:
|
||||
msg = "Dry-run: playbook {id} ({path}) would have been deleted, start date: {started}"
|
||||
self.log.info(msg.format(id=playbook["id"], path=playbook["path"], started=playbook["started"]))
|
||||
else:
|
||||
msg = "Deleting playbook {id} ({path}), start date: {started}"
|
||||
self.log.info(msg.format(id=playbook["id"], path=playbook["path"], started=playbook["started"]))
|
||||
client.delete("/api/v1/playbooks/%s" % playbook["id"])
|
||||
self.deleted += 1
|
||||
|
||||
self.log.info("%s playbooks deleted" % self.deleted)
|
||||
|
||||
|
||||
class PlaybookMetrics(Lister):
|
||||
""" Provides metrics about playbooks """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PlaybookMetrics, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"--aggregate",
|
||||
choices=["name", "path", "ansible_version", "controller"],
|
||||
default="path",
|
||||
help=("Aggregate playbooks by path, name, ansible version or controller. Defaults to path."),
|
||||
)
|
||||
# Playbook search arguments and ordering as per ara.api.filters.PlaybookFilter
|
||||
parser.add_argument(
|
||||
"--label",
|
||||
metavar="<label>",
|
||||
default=None,
|
||||
help=("List playbooks matching the provided label"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ansible_version",
|
||||
metavar="<ansible_version>",
|
||||
default=None,
|
||||
help=("List playbooks that ran with the specified Ansible version (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
metavar="<name>",
|
||||
default=None,
|
||||
help=("List playbooks matching the provided name (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--controller",
|
||||
metavar="<controller>",
|
||||
default=None,
|
||||
help=("List playbooks that ran from the provided controller (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--path",
|
||||
metavar="<path>",
|
||||
default=None,
|
||||
help=("List playbooks matching the provided path (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--status",
|
||||
metavar="<status>",
|
||||
default=None,
|
||||
help=("List playbooks matching a specific status ('completed', 'running', 'failed')"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--long",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Don't truncate paths and include additional fields: name, plays, files, records")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--order",
|
||||
metavar="<order>",
|
||||
default="-started",
|
||||
help=(
|
||||
"Orders playbooks by a field ('id', 'created', 'updated', 'started', 'ended', 'duration')\n"
|
||||
"Defaults to '-started' descending so the most recent playbook is at the top.\n"
|
||||
"The order can be reversed by omitting the '-': ara playbook list --order=started"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
metavar="<limit>",
|
||||
default=os.environ.get("ARA_CLI_LIMIT", 1000),
|
||||
help=("Returns the first <limit> determined by the ordering. Defaults to ARA_CLI_LIMIT or 1000.")
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
query = {}
|
||||
if args.label is not None:
|
||||
query["label"] = args.label
|
||||
|
||||
if args.ansible_version is not None:
|
||||
query["ansible_version"] = args.ansible_version
|
||||
|
||||
if args.name is not None:
|
||||
query["name"] = args.name
|
||||
|
||||
if args.controller is not None:
|
||||
query["controller"] = args.controller
|
||||
|
||||
if args.path is not None:
|
||||
query["path"] = args.path
|
||||
|
||||
if args.status is not None:
|
||||
query["status"] = args.status
|
||||
|
||||
query["order"] = args.order
|
||||
query["limit"] = args.limit
|
||||
|
||||
playbooks = client.get("/api/v1/playbooks", **query)
|
||||
|
||||
# TODO: This could probably be made more efficient without needing to iterate a second time
|
||||
# Group playbooks by aggregate
|
||||
aggregate = {}
|
||||
for playbook in playbooks["results"]:
|
||||
item = playbook[args.aggregate]
|
||||
if item not in aggregate:
|
||||
aggregate[item] = []
|
||||
aggregate[item].append(playbook)
|
||||
|
||||
data = {}
|
||||
for item, playbooks in aggregate.items():
|
||||
data[item] = {
|
||||
"count": len(playbooks),
|
||||
"hosts": 0,
|
||||
"plays": 0,
|
||||
"tasks": 0,
|
||||
"results": 0,
|
||||
"files": 0,
|
||||
"records": 0,
|
||||
"expired": 0,
|
||||
"failed": 0,
|
||||
"running": 0,
|
||||
"completed": 0,
|
||||
"unknown": 0,
|
||||
"duration_total": "00:00:00.000000",
|
||||
}
|
||||
|
||||
if args.aggregate == "path" and not args.long:
|
||||
data[item]["aggregate"] = cli_utils.truncatepath(item, 50)
|
||||
else:
|
||||
data[item]["aggregate"] = item
|
||||
|
||||
for playbook in playbooks:
|
||||
for status in ["completed", "expired", "failed", "running", "unknown"]:
|
||||
if playbook["status"] == status:
|
||||
data[item][status] += 1
|
||||
|
||||
for obj in ["files", "hosts", "plays", "tasks", "records", "results"]:
|
||||
data[item][obj] += playbook["items"][obj]
|
||||
|
||||
if playbook["duration"] is not None:
|
||||
data[item]["duration_total"] = cli_utils.sum_timedelta(
|
||||
playbook["duration"], data[item]["duration_total"]
|
||||
)
|
||||
|
||||
data[item]["duration_avg"] = cli_utils.avg_timedelta(data[item]["duration_total"], data[item]["count"])
|
||||
|
||||
# fmt: off
|
||||
if args.long:
|
||||
columns = (
|
||||
"aggregate",
|
||||
"count",
|
||||
"duration_total",
|
||||
"duration_avg",
|
||||
"plays",
|
||||
"tasks",
|
||||
"results",
|
||||
"hosts",
|
||||
"files",
|
||||
"records",
|
||||
"completed",
|
||||
"expired",
|
||||
"failed",
|
||||
"running",
|
||||
"unknown"
|
||||
)
|
||||
else:
|
||||
columns = (
|
||||
"aggregate",
|
||||
"count",
|
||||
"duration_total",
|
||||
"duration_avg",
|
||||
"tasks",
|
||||
"results",
|
||||
"hosts",
|
||||
"completed",
|
||||
"failed",
|
||||
"running"
|
||||
)
|
||||
return (
|
||||
columns, (
|
||||
[data[playbook][column] for column in columns]
|
||||
for playbook in sorted(data.keys())
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
@@ -1,196 +0,0 @@
|
||||
# Copyright (c) 2020 The ARA Records Ansible authors
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from cliff.command import Command
|
||||
from cliff.lister import Lister
|
||||
from cliff.show import ShowOne
|
||||
|
||||
import ara.cli.utils as cli_utils
|
||||
from ara.cli.base import global_arguments
|
||||
from ara.clients.utils import get_client
|
||||
|
||||
|
||||
class RecordList(Lister):
|
||||
""" Returns a list of records based on search queries """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(RecordList, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
# Record search arguments and ordering as per ara.api.filters.RecordFilter
|
||||
parser.add_argument(
|
||||
"--playbook",
|
||||
metavar="<playbook_id>",
|
||||
default=None,
|
||||
help=("List records for the specified playbook"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--key",
|
||||
metavar="<key>",
|
||||
default=None,
|
||||
help=("List records matching the specified key"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--long",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Don't truncate paths")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--resolve",
|
||||
action="store_true",
|
||||
default=os.environ.get("ARA_CLI_RESOLVE", False),
|
||||
help=("Resolve IDs to identifiers (such as path or names). Defaults to ARA_CLI_RESOLVE or False")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--order",
|
||||
metavar="<order>",
|
||||
default="-updated",
|
||||
help=(
|
||||
"Orders records by a field ('id', 'created', 'updated', 'key')\n"
|
||||
"Defaults to '-updated' descending so the most recent record is at the top.\n"
|
||||
"The order can be reversed by omitting the '-': ara record list --order=updated"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
metavar="<limit>",
|
||||
default=os.environ.get("ARA_CLI_LIMIT", 50),
|
||||
help=("Returns the first <limit> determined by the ordering. Defaults to ARA_CLI_LIMIT or 50.")
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
query = {}
|
||||
if args.playbook is not None:
|
||||
query["playbook"] = args.playbook
|
||||
|
||||
if args.key is not None:
|
||||
query["key"] = args.key
|
||||
|
||||
query["order"] = args.order
|
||||
query["limit"] = args.limit
|
||||
|
||||
records = client.get("/api/v1/records", **query)
|
||||
if args.resolve:
|
||||
for record in records["results"]:
|
||||
playbook = cli_utils.get_playbook(client, record["playbook"])
|
||||
# Paths can easily take up too much width real estate
|
||||
if not args.long:
|
||||
record["playbook"] = "(%s) %s" % (playbook["id"], cli_utils.truncatepath(playbook["path"], 50))
|
||||
else:
|
||||
record["playbook"] = "(%s) %s" % (playbook["id"], playbook["path"])
|
||||
|
||||
columns = ("id", "key", "type", "playbook", "updated")
|
||||
# fmt: off
|
||||
return (
|
||||
columns, (
|
||||
[record[column] for column in columns]
|
||||
for record in records["results"]
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class RecordShow(ShowOne):
|
||||
""" Returns a detailed view of a specified record """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(RecordShow, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"record_id",
|
||||
metavar="<record-id>",
|
||||
help="Record to show",
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
# TODO: Render json properly in pretty tables
|
||||
if args.formatter == "table":
|
||||
self.log.warn("Rendering using default table formatter, use '-f yaml' or '-f json' for improved display.")
|
||||
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
# TODO: Improve client to be better at handling exceptions
|
||||
record = client.get("/api/v1/records/%s" % args.record_id)
|
||||
if "detail" in record and record["detail"] == "Not found.":
|
||||
self.log.error("Record not found: %s" % args.record_id)
|
||||
sys.exit(1)
|
||||
|
||||
playbook = "(%s) %s" % (record["playbook"]["id"], record["playbook"]["name"] or record["playbook"]["path"])
|
||||
record["report"] = "%s/playbooks/%s.html" % (args.server, record["playbook"]["id"])
|
||||
record["playbook"] = playbook
|
||||
|
||||
# fmt: off
|
||||
columns = (
|
||||
"id",
|
||||
"report",
|
||||
"playbook",
|
||||
"key",
|
||||
"value",
|
||||
"created",
|
||||
"updated",
|
||||
)
|
||||
# fmt: on
|
||||
return (columns, ([record[column] for column in columns]))
|
||||
|
||||
|
||||
class RecordDelete(Command):
|
||||
""" Deletes the specified record and associated resources """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(RecordDelete, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"record_id",
|
||||
metavar="<record-id>",
|
||||
help="Record to delete",
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
# TODO: Improve client to be better at handling exceptions
|
||||
client.delete("/api/v1/records/%s" % args.record_id)
|
||||
@@ -1,309 +0,0 @@
|
||||
# Copyright (c) 2020 The ARA Records Ansible authors
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from cliff.command import Command
|
||||
from cliff.lister import Lister
|
||||
from cliff.show import ShowOne
|
||||
|
||||
import ara.cli.utils as cli_utils
|
||||
from ara.cli.base import global_arguments
|
||||
from ara.clients.utils import get_client
|
||||
|
||||
|
||||
class ResultList(Lister):
|
||||
""" Returns a list of results based on search queries """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ResultList, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
# Result search arguments and ordering as per ara.api.filters.ResultFilter
|
||||
# TODO: non-exhaustive (searching for failed, ok, unreachable, etc.)
|
||||
parser.add_argument(
|
||||
"--playbook",
|
||||
metavar="<playbook_id>",
|
||||
default=None,
|
||||
help=("List results for the specified playbook"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--play",
|
||||
metavar="<play_id>",
|
||||
default=None,
|
||||
help=("List results for the specified play"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--task",
|
||||
metavar="<task_id>",
|
||||
default=None,
|
||||
help=("List results for the specified task"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--host",
|
||||
metavar="<host_id>",
|
||||
default=None,
|
||||
help=("List results for the specified host"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--status",
|
||||
metavar="<status>",
|
||||
default=None,
|
||||
help=(
|
||||
"List results matching a specific status:\n"
|
||||
"ok, failed, skipped, unreachable, changed, ignored, unknown"
|
||||
)
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ignore-errors",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Return only results with 'ignore_errors: true', defaults to false")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--changed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Return only changed results, defaults to false")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--long",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Don't truncate paths and include additional fields: changed, ignore_errors, play")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--resolve",
|
||||
action="store_true",
|
||||
default=os.environ.get("ARA_CLI_RESOLVE", False),
|
||||
help=("Resolve IDs to identifiers (such as path or names). Defaults to ARA_CLI_RESOLVE or False")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--order",
|
||||
metavar="<order>",
|
||||
default="-started",
|
||||
help=(
|
||||
"Orders results by a field ('id', 'started', 'updated', 'ended', 'duration')\n"
|
||||
"Defaults to '-started' descending so the most recent result is at the top.\n"
|
||||
"The order can be reversed by omitting the '-': ara result list --order=started"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
metavar="<limit>",
|
||||
default=os.environ.get("ARA_CLI_LIMIT", 50),
|
||||
help=("Returns the first <limit> determined by the ordering. Defaults to ARA_CLI_LIMIT or 50.")
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
query = {}
|
||||
if args.playbook is not None:
|
||||
query["playbook"] = args.playbook
|
||||
if args.play is not None:
|
||||
query["play"] = args.play
|
||||
if args.task is not None:
|
||||
query["task"] = args.task
|
||||
if args.host is not None:
|
||||
query["host"] = args.host
|
||||
|
||||
if args.status is not None:
|
||||
query["status"] = args.status
|
||||
|
||||
if args.changed:
|
||||
query["changed"] = args.changed
|
||||
|
||||
query["ignore_errors"] = args.ignore_errors
|
||||
query["order"] = args.order
|
||||
query["limit"] = args.limit
|
||||
|
||||
results = client.get("/api/v1/results", **query)
|
||||
|
||||
if args.resolve:
|
||||
for result in results["results"]:
|
||||
playbook = cli_utils.get_playbook(client, result["playbook"])
|
||||
# Paths can easily take up too much width real estate
|
||||
if not args.long:
|
||||
result["playbook"] = "(%s) %s" % (playbook["id"], cli_utils.truncatepath(playbook["path"], 50))
|
||||
else:
|
||||
result["playbook"] = "(%s) %s" % (playbook["id"], playbook["path"])
|
||||
|
||||
task = cli_utils.get_task(client, result["task"])
|
||||
result["task"] = "(%s) %s" % (task["id"], task["name"])
|
||||
|
||||
host = cli_utils.get_host(client, result["host"])
|
||||
result["host"] = "(%s) %s" % (host["id"], host["name"])
|
||||
|
||||
if args.long:
|
||||
play = cli_utils.get_play(client, result["play"])
|
||||
result["play"] = "(%s) %s" % (play["id"], play["name"])
|
||||
|
||||
# fmt: off
|
||||
if args.long:
|
||||
columns = (
|
||||
"id",
|
||||
"status",
|
||||
"changed",
|
||||
"ignore_errors",
|
||||
"playbook",
|
||||
"play",
|
||||
"task",
|
||||
"host",
|
||||
"started",
|
||||
"duration",
|
||||
)
|
||||
else:
|
||||
columns = (
|
||||
"id",
|
||||
"status",
|
||||
"playbook",
|
||||
"task",
|
||||
"host",
|
||||
"started",
|
||||
"duration",
|
||||
)
|
||||
|
||||
return (
|
||||
columns, (
|
||||
[result[column] for column in columns]
|
||||
for result in results["results"]
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
class ResultShow(ShowOne):
|
||||
""" Returns a detailed view of a specified result """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ResultShow, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"result_id",
|
||||
metavar="<result-id>",
|
||||
help="Result to show",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--with-content",
|
||||
action="store_true",
|
||||
help="Also include the result content in the response (use with '-f json' or '-f yaml')"
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
# TODO: Render json properly in pretty tables
|
||||
if args.with_content and args.formatter == "table":
|
||||
self.log.warn("Rendering using default table formatter, use '-f yaml' or '-f json' for improved display.")
|
||||
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
# TODO: Improve client to be better at handling exceptions
|
||||
result = client.get("/api/v1/results/%s" % args.result_id)
|
||||
if "detail" in result and result["detail"] == "Not found.":
|
||||
self.log.error("Result not found: %s" % args.result_id)
|
||||
sys.exit(1)
|
||||
|
||||
# Parse data from playbook and format it for display
|
||||
result["ansible_version"] = result["playbook"]["ansible_version"]
|
||||
playbook = "(%s) %s" % (result["playbook"]["id"], result["playbook"]["name"] or result["playbook"]["path"])
|
||||
result["report"] = "%s/playbooks/%s.html" % (args.server, result["playbook"]["id"])
|
||||
result["playbook"] = playbook
|
||||
|
||||
# Parse data from play and format it for display
|
||||
play = "(%s) %s" % (result["play"]["id"], result["play"]["name"])
|
||||
result["play"] = play
|
||||
|
||||
# Parse data from task and format it for display
|
||||
task = "(%s) %s" % (result["task"]["id"], result["task"]["name"])
|
||||
path = "(%s) %s:%s" % (result["task"]["file"], result["task"]["path"], result["task"]["lineno"])
|
||||
result["task"] = task
|
||||
result["path"] = path
|
||||
|
||||
if args.with_content:
|
||||
columns = (
|
||||
"id",
|
||||
"report",
|
||||
"status",
|
||||
"playbook",
|
||||
"play",
|
||||
"task",
|
||||
"path",
|
||||
"started",
|
||||
"ended",
|
||||
"duration",
|
||||
"ansible_version",
|
||||
"content",
|
||||
)
|
||||
else:
|
||||
columns = (
|
||||
"id",
|
||||
"report",
|
||||
"status",
|
||||
"playbook",
|
||||
"play",
|
||||
"task",
|
||||
"path",
|
||||
"started",
|
||||
"ended",
|
||||
"duration",
|
||||
"ansible_version",
|
||||
)
|
||||
return (columns, ([result[column] for column in columns]))
|
||||
|
||||
|
||||
class ResultDelete(Command):
|
||||
""" Deletes the specified result and associated resources """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ResultDelete, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
parser.add_argument(
|
||||
"result_id",
|
||||
metavar="<result-id>",
|
||||
help="Result to delete",
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
|
||||
# TODO: Improve client to be better at handling exceptions
|
||||
client.delete("/api/v1/results/%s" % args.result_id)
|
||||
426
ara/cli/task.py
426
ara/cli/task.py
@@ -1,426 +0,0 @@
|
||||
# Copyright (c) 2020 The ARA Records Ansible authors
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from cliff.command import Command
|
||||
from cliff.lister import Lister
|
||||
from cliff.show import ShowOne
|
||||
|
||||
import ara.cli.utils as cli_utils
|
||||
from ara.cli.base import global_arguments
|
||||
from ara.clients.utils import get_client
|
||||
|
||||
|
||||
class TaskList(Lister):
|
||||
""" Returns a list of tasks based on search queries """
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(TaskList, self).get_parser(prog_name)
|
||||
parser = global_arguments(parser)
|
||||
# fmt: off
|
||||
# Task search arguments and ordering as per ara.api.filters.TaskFilter
|
||||
parser.add_argument(
|
||||
"--playbook",
|
||||
metavar="<playbook_id>",
|
||||
default=None,
|
||||
help=("List tasks for a specified playbook id"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--status",
|
||||
metavar="<status>",
|
||||
default=None,
|
||||
help=("List tasks matching a specific status ('completed', 'running' or 'unknown')")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
metavar="<name>",
|
||||
default=None,
|
||||
help=("List tasks matching the provided name (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--path",
|
||||
metavar="<path>",
|
||||
default=None,
|
||||
help=("List tasks matching the provided path (full or partial)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--action",
|
||||
metavar="<action>",
|
||||
default=None,
|
||||
help=("List tasks matching a specific action/ansible module (ex: 'debug', 'package', 'set_fact')"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--long",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Don't truncate paths and include additional fields: path, lineno, handler, play")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--resolve",
|
||||
action="store_true",
|
||||
default=os.environ.get("ARA_CLI_RESOLVE", False),
|
||||
help=("Resolve IDs to identifiers (such as path or names). Defaults to ARA_CLI_RESOLVE or False")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--order",
|
||||
metavar="<order>",
|
||||
default="-started",
|
||||
help=(
|
||||
"Orders tasks by a field ('id', 'created', 'updated', 'started', 'ended', 'duration')\n"
|
||||
"Defaults to '-started' descending so the most recent task is at the top.\n"
|
||||
"The order can be reversed by omitting the '-': ara task list --order=started"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
metavar="<limit>",
|
||||
default=os.environ.get("ARA_CLI_LIMIT", 50),
|
||||
help=("Returns the first <limit> determined by the ordering. Defaults to ARA_CLI_LIMIT or 50.")
|
||||
)
|
||||
# fmt: on
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
client = get_client(
|
||||
client=args.client,
|
||||
endpoint=args.server,
|
||||
timeout=args.timeout,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
verify=False if args.insecure else True,
|
||||
run_sql_migrations=False,
|
||||
)
|
||||
query = {}
|
||||
if args.playbook is not None:
|
||||
query["playbook"] = args.playbook
|
||||
|
||||
if args.status is not None:
|
||||
query["status"] = args.status
|
||||
|
||||
if args.name is not None:
|
||||
query["name"] = args.name
|
||||
|
||||
if args.path is not None:
|
||||
query["path"] = args.path
|
||||
|
||||
if args.action is not None:
|
||||
query["action"] = args.action
|
||||
|
||||
query["order"] = args.order
|
||||