From e472826889d0b7ae73790e9307cd9fbffab39bbf Mon Sep 17 00:00:00 2001 From: Ghanshyam Mann Date: Fri, 10 May 2024 17:25:33 -0700 Subject: [PATCH] Retire Sahara: remove repo content Sahara project is retiring - https://review.opendev.org/c/openstack/governance/+/919374 this commit remove the content of this project repo Depends-On: https://review.opendev.org/c/openstack/project-config/+/919376 Change-Id: I983432972f2f5dd52015eb914d65dd5611d28c83 --- .coveragerc | 13 - .gitignore | 31 - .stestr.conf | 3 - .zuul.yaml | 155 -- CONTRIBUTING.rst | 19 - HACKING.rst | 45 - LICENSE | 175 -- README.rst | 40 +- api-ref/source/conf.py | 207 -- api-ref/source/index.rst | 13 - api-ref/source/v1.1/cluster-templates.inc | 253 --- api-ref/source/v1.1/clusters.inc | 335 --- api-ref/source/v1.1/data-sources.inc | 212 -- api-ref/source/v1.1/event-log.inc | 42 - api-ref/source/v1.1/image-registry.inc | 249 --- api-ref/source/v1.1/index.rst | 20 - api-ref/source/v1.1/job-binaries.inc | 266 --- api-ref/source/v1.1/job-binary-internals.inc | 258 --- api-ref/source/v1.1/job-executions.inc | 325 --- api-ref/source/v1.1/job-types.inc | 61 - api-ref/source/v1.1/jobs.inc | 265 --- api-ref/source/v1.1/node-group-templates.inc | 269 --- api-ref/source/v1.1/parameters.yaml | 1159 ---------- api-ref/source/v1.1/plugins.inc | 187 -- .../cluster-template-create-request.json | 17 - .../cluster-template-create-response.json | 82 - .../cluster-template-show-response.json | 82 - .../cluster-template-update-request.json | 11 - .../cluster-template-update-response.json | 67 - .../cluster-templates-list-response.json | 140 -- .../clusters/cluster-create-request.json | 9 - .../clusters/cluster-create-response.json | 128 -- .../clusters/cluster-scale-request.json | 15 - .../clusters/cluster-scale-response.json | 370 ---- .../clusters/cluster-show-response.json | 128 -- .../clusters/cluster-update-request.json | 4 - .../clusters/cluster-update-response.json | 128 -- .../clusters/clusters-list-response.json | 327 --- .../multiple-clusters-create-request.json | 11 - .../multiple-clusters-create-response.json | 6 - .../data-source-register-hdfs-request.json | 6 - .../data-source-register-hdfs-response.json | 14 - .../data-source-register-swift-request.json | 10 - .../data-source-register-swift-response.json | 14 - .../data-source-show-response.json | 14 - .../data-source-update-request.json | 4 - .../data-source-update-response.json | 14 - .../data-sources-list-response.json | 28 - .../event-log/cluster-progress-response.json | 72 - .../image-register-request.json | 4 - .../image-register-response.json | 25 - .../image-registry/image-show-response.json | 24 - .../image-tags-add-request.json | 7 - .../image-tags-add-response.json | 27 - .../image-tags-delete-request.json | 5 - .../image-tags-delete-response.json | 25 - .../image-registry/images-list-response.json | 48 - .../samples/job-binaries/create-request.json | 9 - .../samples/job-binaries/create-response.json | 13 - .../samples/job-binaries/list-response.json | 37 - .../samples/job-binaries/show-data-response | 3 - .../samples/job-binaries/show-response.json | 13 - .../samples/job-binaries/update-request.json | 5 - .../samples/job-binaries/update-response.json | 13 - .../job-binary-internals/create-response.json | 12 - .../job-binary-internals/list-response.json | 24 - .../job-binary-internals/show-data-response | 3 - .../job-binary-internals/show-response.json | 12 - .../job-binary-internals/update-request.json | 4 - .../job-binary-internals/update-response.json | 12 - .../job-executions/cancel-response.json | 120 - .../job-executions/job-ex-response.json | 120 - .../job-executions/job-ex-update-request.json | 3 - .../job-ex-update-response.json | 120 - .../samples/job-executions/list-response.json | 122 - .../job-types/job-types-list-response.json | 209 -- .../v1.1/samples/jobs/job-create-request.json | 11 - .../samples/jobs/job-create-response.json | 35 - .../samples/jobs/job-execute-request.json | 19 - .../samples/jobs/job-execute-response.json | 30 - .../v1.1/samples/jobs/job-show-response.json | 26 - .../v1.1/samples/jobs/job-update-request.json | 4 - .../samples/jobs/job-update-response.json | 26 - .../v1.1/samples/jobs/jobs-list-response.json | 66 - .../node-group-template-create-request.json | 13 - .../node-group-template-create-response.json | 33 - .../node-group-template-show-response.json | 37 - .../node-group-template-update-request.json | 10 - .../node-group-template-update-response.json | 30 - .../node-group-templates-list-response.json | 76 - .../samples/plugins/plugin-show-response.json | 12 - .../plugins/plugin-update-request.json | 7 - .../plugins/plugin-update-response.json | 32 - .../plugins/plugin-version-show-response.json | 92 - .../plugins/plugins-list-response.json | 41 - api-ref/source/v2/cluster-templates.inc | 241 -- api-ref/source/v2/clusters.inc | 293 --- api-ref/source/v2/data-sources.inc | 202 -- api-ref/source/v2/event-log.inc | 42 - api-ref/source/v2/image-registry.inc | 244 -- api-ref/source/v2/index.rst | 20 - api-ref/source/v2/job-binaries.inc | 256 --- api-ref/source/v2/job-templates.inc | 257 --- api-ref/source/v2/job-types.inc | 61 - api-ref/source/v2/jobs.inc | 262 --- api-ref/source/v2/node-group-templates.inc | 289 --- api-ref/source/v2/parameters.yaml | 1183 ---------- api-ref/source/v2/plugins.inc | 179 -- .../cluster-template-create-request.json | 17 - .../cluster-template-create-response.json | 82 - .../cluster-template-show-response.json | 82 - .../cluster-template-update-request.json | 11 - .../cluster-template-update-response.json | 67 - .../cluster-templates-list-response.json | 140 -- .../clusters/cluster-create-request.json | 9 - .../clusters/cluster-create-response.json | 128 -- .../clusters/cluster-scale-request.json | 15 - .../clusters/cluster-scale-response.json | 370 ---- .../clusters/cluster-show-response.json | 128 -- .../clusters/cluster-update-request.json | 4 - .../clusters/cluster-update-response.json | 128 -- .../clusters/clusters-list-response.json | 327 --- .../multiple-clusters-create-request.json | 11 - .../multiple-clusters-create-response.json | 6 - .../data-source-register-hdfs-request.json | 6 - .../data-source-register-hdfs-response.json | 14 - .../data-source-register-swift-request.json | 10 - .../data-source-register-swift-response.json | 14 - .../data-source-show-response.json | 14 - .../data-source-update-request.json | 4 - .../data-source-update-response.json | 14 - .../data-sources-list-response.json | 28 - .../event-log/cluster-progress-response.json | 72 - .../image-register-request.json | 4 - .../image-register-response.json | 25 - .../image-registry/image-show-response.json | 24 - .../image-tags-add-request.json | 7 - .../image-tags-add-response.json | 27 - .../image-tags-delete-request.json | 5 - .../image-tags-delete-response.json | 25 - .../image-registry/images-list-response.json | 48 - .../samples/job-binaries/create-request.json | 9 - .../samples/job-binaries/create-response.json | 13 - .../samples/job-binaries/list-response.json | 37 - .../samples/job-binaries/show-data-response | 3 - .../samples/job-binaries/show-response.json | 13 - .../samples/job-binaries/update-request.json | 5 - .../samples/job-binaries/update-response.json | 13 - .../job-template-create-request.json | 11 - .../job-template-create-response.json | 35 - .../job-template-show-response.json | 26 - .../job-template-update-request.json | 4 - .../job-template-update-response.json | 26 - .../job-templates-list-response.json | 66 - .../job-types/job-types-list-response.json | 209 -- .../v2/samples/jobs/cancel-response.json | 120 - .../source/v2/samples/jobs/job-request.json | 20 - .../source/v2/samples/jobs/job-response.json | 30 - .../v2/samples/jobs/job-update-request.json | 3 - .../v2/samples/jobs/job-update-response.json | 120 - .../source/v2/samples/jobs/list-response.json | 122 - .../node-group-template-create-request.json | 13 - .../node-group-template-create-response.json | 33 - .../node-group-template-show-response.json | 37 - .../node-group-template-update-request.json | 10 - .../node-group-template-update-response.json | 30 - .../node-group-templates-list-response.json | 76 - .../samples/plugins/plugin-show-response.json | 12 - .../plugins/plugin-update-request.json | 7 - .../plugins/plugin-update-response.json | 32 - .../plugins/plugin-version-show-response.json | 92 - .../plugins/plugins-list-response.json | 41 - bandit.yaml | 133 -- bindep.txt | 30 - devstack/README.rst | 22 - devstack/exercise.sh | 50 - devstack/files/apache-sahara-api.template | 27 - devstack/plugin.sh | 373 ---- devstack/settings | 62 - devstack/upgrade/from-liberty/upgrade-sahara | 14 - devstack/upgrade/from-mitaka/upgrade-sahara | 15 - devstack/upgrade/from-rocky/upgrade-sahara | 18 - devstack/upgrade/resources.sh | 243 -- devstack/upgrade/settings | 24 - devstack/upgrade/shutdown.sh | 24 - devstack/upgrade/upgrade.sh | 73 - doc/requirements.txt | 9 - doc/source/_extra/.htaccess | 9 - doc/source/_templates/sidebarlinks.html | 11 - doc/source/_theme_rtd/layout.html | 4 - doc/source/_theme_rtd/theme.conf | 4 - .../admin/advanced-configuration-guide.rst | 653 ------ doc/source/admin/configs-recommendations.rst | 44 - doc/source/admin/configuration-guide.rst | 211 -- doc/source/admin/index.rst | 10 - doc/source/admin/upgrade-guide.rst | 155 -- doc/source/cli/index.rst | 11 - doc/source/cli/sahara-status.rst | 83 - doc/source/conf.py | 282 --- doc/source/config-generator.conf | 1 - .../configuration/descriptionconfig.rst | 8 - doc/source/configuration/index.rst | 10 - doc/source/configuration/sampleconfig.rst | 8 - .../adding-database-migrations.rst | 113 - doc/source/contributor/apiv2.rst | 112 - doc/source/contributor/contributing.rst | 70 - .../dashboard-dev-environment-guide.rst | 153 -- .../contributor/development-environment.rst | 131 -- .../contributor/development-guidelines.rst | 238 -- doc/source/contributor/devstack.rst | 181 -- doc/source/contributor/gerrit.rst | 14 - doc/source/contributor/how-to-build-oozie.rst | 74 - doc/source/contributor/image-gen.rst | 344 --- doc/source/contributor/index.rst | 30 - doc/source/contributor/jenkins.rst | 41 - doc/source/contributor/log-guidelines.rst | 34 - doc/source/contributor/testing.rst | 36 - doc/source/images/hadoop-cluster-example.jpg | Bin 38120 -> 0 bytes doc/source/images/openstack-interop.png | Bin 37251 -> 0 bytes doc/source/images/sahara-architecture.svg | 1529 ------------- doc/source/index.rst | 78 - doc/source/install/dashboard-guide.rst | 83 - doc/source/install/index.rst | 9 - doc/source/install/installation-guide.rst | 300 --- doc/source/intro/architecture.rst | 39 - doc/source/intro/index.rst | 12 - doc/source/intro/overview.rst | 192 -- doc/source/reference/edp-spi.rst | 224 -- doc/source/reference/index.rst | 22 - doc/source/reference/plugin-spi.rst | 393 ---- doc/source/reference/plugins.rst | 23 - doc/source/reference/restapi.rst | 119 - doc/source/user/building-guest-images.rst | 49 - .../user/building-guest-images/baremetal.rst | 14 - .../sahara-image-create.rst | 80 - .../sahara-image-pack.rst | 90 - doc/source/user/dashboard-user-guide.rst | 477 ---- doc/source/user/edp-s3.rst | 87 - doc/source/user/edp.rst | 727 ------ doc/source/user/features.rst | 279 --- doc/source/user/hadoop-swift.rst | 130 -- doc/source/user/index.rst | 46 - doc/source/user/overview.rst | 77 - doc/source/user/plugins.rst | 71 - doc/source/user/quickstart.rst | 617 ------ doc/source/user/registering-image.rst | 31 - doc/source/user/sahara-on-ironic.rst | 88 - doc/source/user/statuses.rst | 169 -- doc/test/redirect-tests.txt | 11 - etc/edp-examples/README.rst | 6 - etc/sahara/README-sahara.conf.txt | 4 - etc/sahara/api-paste.ini | 45 - etc/sahara/compute.topology.sample | 6 - etc/sahara/rootwrap.conf | 34 - etc/sahara/rootwrap.d/sahara.filters | 4 - etc/sahara/swift.topology.sample | 2 - etc/sudoers.d/sahara-rootwrap | 1 - playbooks/buildimages/run.yaml | 6 - pylintrc | 48 - releasenotes/notes/.placeholder | 0 .../add-impala-2.2-c1649599649aff5c.yaml | 3 - .../notes/add-mapr-520-3ed6cd0ae9688e17.yaml | 3 - .../add-mapr-kafka-3a808bbc1aa21055.yaml | 3 - .../add-mapr-sentry-6012c08b55d679de.yaml | 3 - ...dd-scheduler-edp-job-9eda17dd174e53fa.yaml | 3 - ...-storm-version-1_1_0-3e10b34824706a62.yaml | 3 - ...rade-check-framework-9cd18dbc47b0efbd.yaml | 13 - ...-wsgi-server-support-c8fbc3d76d4e42f6.yaml | 4 - .../add_kafka_in_cdh-774c7c051480c892.yaml | 3 - ...dd_mapr_repo_configs-04af1a67350bfd24.yaml | 4 - ...nstall-timeout-param-d50e5c15e06fa51e.yaml | 4 - .../ambari-downscaling-b9ba759ce9c7325e.yaml | 3 - .../notes/ambari-hive-92b911e0a759ee88.yaml | 3 - .../ambari-server-start-856403bc280dfba3.yaml | 3 - .../ambari26-image-pack-88c9aad59bf635b2.yaml | 3 - ...eneration_validation-47eabb9fa90384c8.yaml | 4 - .../notes/api-insecure-cbd4fd5da71b29a3.yaml | 3 - ...pi-v2-return-payload-a84a609db410228a.yaml | 4 - .../apiv2-microversion-4c1a58ee8090e5a9.yaml | 5 - ...apiv2-payload-tweaks-b73c20a35263d958.yaml | 9 - ...piv2-preview-release-b1ee8cc9b2fb01da.yaml | 9 - ...apiv2-stable-release-25ba9920c8e4632a.yaml | 3 - ...auto_configs_for_hdp-011d460d37dcdf02.yaml | 4 - .../boot-from-volume-e7078452fac1a4a0.yaml | 3 - .../notes/ca-cert-fix-5c434a82f9347039.yaml | 4 - .../notes/cdh-5-5-35e582e149a05632.yaml | 3 - .../notes/cdh-513-bdce0d5d269d8f20.yaml | 3 - .../notes/cdh-labels-5695d95bce226051.yaml | 7 - ...eneration_validation-6334ef6d04950935.yaml | 4 - .../cdh_5_11_support-10d4abb91bc4475f.yaml | 3 - ...eneration_validation-308e7529a9018663.yaml | 4 - .../cdh_5_7_support-9522cb9b4dce2378.yaml | 3 - ...eneration_validation-19d10e6468e30b4f.yaml | 5 - .../cdh_5_9_support-b603a2648b2e7b32.yaml | 3 - ...config-groups-ambari-837de6d33eb0fa87.yaml | 4 - ...uster-creation-apiv2-5d5aceeb2e97c702.yaml | 5 - ...-to-cluster-template-43d502496d18625e.yaml | 4 - .../deprecate-cdh_5_5-0da56b562170566f.yaml | 3 - .../notes/deprecate-hdp-a9ff0ecf6006da49.yaml | 5 - ...ormatted-policy-file-b267f288cba7e325.yaml | 20 - .../deprecate-mapr-51-090423438e3dda20.yaml | 5 - ...te-plugin-vanilla260-46e4b8fe96e8fe68.yaml | 3 - ...hara-all-entry-point-1446a00dab643b7b.yaml | 4 - ...te-spark-version-131-98eccc79b13b6b8f.yaml | 3 - ...orm-version-092.yaml-b9ff2b9ebbb983fc.yaml | 3 - ...esignate-integration-784c5f7f29546015.yaml | 4 - .../notes/drop-py-2-7-bc282e43b26fbf17.yaml | 6 - ...p-python-3-6-and-3-7-f37b9dc6d94620de.yaml | 5 - ...utable-configuration-2dd6b7a0e0fe4437.yaml | 6 - .../notes/engine-opt-258ff1ae9b04d628.yaml | 3 - .../notes/enhance-bfv-12bac06c4438675f.yaml | 6 - .../event_log_for_hdp-a114511c477ef16d.yaml | 3 - ...all-provision-events-c1bd2e05bf2be6bd.yaml | 3 - ...licy-inconsistencies-984020000cc3882a.yaml | 12 - .../force-delete-apiv2-e372392bbc8639f8.yaml | 4 - ...force-delete-changes-2e0881a99742c339.yaml | 6 - ...oop-swift-domain-fix-c1dfdf6c52b5aa25.yaml | 5 - ...swift-jar-for-ambari-4439913b01d42468.yaml | 4 - .../notes/hdfs-dfs-94a9c4f64cf8994f.yaml | 5 - ...emoved-from-defaults-31d1e1f15973b682.yaml | 5 - .../notes/hdp25-b35ef99c240fc127.yaml | 3 - .../notes/hdp26-5a406d7066706bf1.yaml | 3 - .../notes/healthcheck-02e429a3ffcd9482.yaml | 7 - ...ndpoint-type-neutron-4583128c383d9745.yaml | 4 - .../ironic-support-79e7ecad05f54029.yaml | 4 - .../notes/kerberos-76dd297462b7337c.yaml | 5 - ..._manager_integration-e32d141809c8cc46.yaml | 5 - .../keypair-replacement-0c0cc3db0551c112.yaml | 5 - ...eystonauth-migration-c75988975ad1a506.yaml | 4 - .../mapr-health-check-2eba3d742a2b853f.yaml | 3 - .../notes/mapr-labels-5cc318616db59403.yaml | 7 - ...ove-spark-standalone-293ca864de9a7848.yaml | 3 - ...ervices-new-versions-b32c2e8fe07d1600.yaml | 8 - ...ervices-new-versions-dc7652e33f26bbdc.yaml | 7 - .../mapr5.2.0-image-gen-c850e74977b00abe.yaml | 4 - .../neutron-default-a6baf93d857d86b3.yaml | 5 - ...nova-network-removal-debe306fd7c61268.yaml | 8 - ...ages_to_glanceclient-0266a2bd92b4be05.yaml | 3 - .../notes/ntp-config-51ed9d612132e2fa.yaml | 6 - ...nal-project-id-apiv1-2e89756f6f16bd5e.yaml | 3 - ...saging_notifications-cee206fc4f74c217.yaml | 3 - ...lit-from-sahara-core-9ffc5e5d06c9239c.yaml | 5 - .../policy_in_code-5847902775ff9861.yaml | 8 - ...proxy-user-lowercase-f116f7b7e89274cb.yaml | 6 - ...ck_awareness_for_cdh-e0cd5d4ab46aa1b5.yaml | 3 - ...ck_awareness_for_hdp-6e3d44468cc141a5.yaml | 3 - ...r-floating-ips-logic-9d37d9297f3621b3.yaml | 5 - ...move-cdh_5.0_5.3_5.4-b5f140e9b0233c07.yaml | 3 - ...coded-oozie-password-b97475c8772aa1bd.yaml | 4 - ...d-password-from-hive-eb923b518974e853.yaml | 5 - .../notes/remove-hdp-137d0ad3d2389b7a.yaml | 4 - .../remove-mapr-500-3df3041be99a864c.yaml | 3 - .../remove-spark-100-44f3d5efc3806410.yaml | 3 - ...tep-in-vanilla-2.8.2-546b2026e2f5d557.yaml | 6 - .../remove-use-neutron-2499b661dce041d4.yaml | 6 - ...stom_auth_domainname-984fd2d931e306cc.yaml | 6 - ...le_notifications_opt-4c0d46e8e79eb06f.yaml | 6 - ...-datasource-protocol-d3abd0b22f653b3b.yaml | 4 - ...-cfg-location-change-7b61454311b16ce8.yaml | 6 - ...nt-version-discovery-826e9f31093cb10f.yaml | 8 - .../some-polish-api-v2-2d2e390a74b088f9.yaml | 12 - .../notes/spark-2.2-d7c3a84bd52f735a.yaml | 3 - .../notes/spark-2.3-0277fe9feae6668a.yaml | 3 - .../notes/storm-1.2-af75fedb413de56a.yaml | 4 - ...idation-query-string-a6cadbf2f9c57d06.yaml | 5 - .../substring-matching-1d5981b8e5b1d919.yaml | 7 - ...pport-s3-data-source-a912e2cdf4cd51fb.yaml | 3 - ...upport-s3-job-binary-6d91267ae11d09d3.yaml | 3 - .../notes/transport_url-5bbbf0bb54d81727.yaml | 5 - ...trustee-conf-section-5994dcd48a9744d7.yaml | 8 - ...ing-plugins-versions-b8d27764178c3cdd.yaml | 11 - ...anilla-2.7.5-support-ffeeb88fc4be34b4.yaml | 4 - ...anilla-2.8.2-support-84c89aad31105584.yaml | 4 - ...-configuration-steps-48c3d9706c86f227.yaml | 7 - releasenotes/source/2023.1.rst | 6 - releasenotes/source/2023.2.rst | 6 - releasenotes/source/_static/.placeholder | 0 releasenotes/source/_templates/.placeholder | 0 releasenotes/source/conf.py | 207 -- releasenotes/source/index.rst | 25 - releasenotes/source/liberty.rst | 6 - .../locale/en_GB/LC_MESSAGES/releasenotes.po | 990 --------- .../locale/fr/LC_MESSAGES/releasenotes.po | 63 - .../locale/it/LC_MESSAGES/releasenotes.po | 531 ----- releasenotes/source/mitaka.rst | 6 - releasenotes/source/newton.rst | 6 - releasenotes/source/ocata.rst | 6 - releasenotes/source/pike.rst | 6 - releasenotes/source/queens.rst | 6 - releasenotes/source/rocky.rst | 6 - releasenotes/source/stein.rst | 6 - releasenotes/source/train.rst | 6 - releasenotes/source/unreleased.rst | 5 - releasenotes/source/ussuri.rst | 6 - releasenotes/source/victoria.rst | 6 - releasenotes/source/wallaby.rst | 6 - releasenotes/source/xena.rst | 6 - releasenotes/source/yoga.rst | 6 - releasenotes/source/zed.rst | 6 - requirements.txt | 49 - roles/build-sahara-images-cli/README.rst | 13 - .../defaults/main.yaml | 4 - roles/build-sahara-images-cli/tasks/main.yaml | 9 - sahara/__init__.py | 0 sahara/api/__init__.py | 0 sahara/api/acl.py | 55 - sahara/api/base.py | 22 - sahara/api/microversion.py | 30 - sahara/api/middleware/__init__.py | 0 sahara/api/middleware/auth_valid.py | 65 - sahara/api/middleware/sahara_middleware.py | 92 - sahara/api/middleware/version_discovery.py | 78 - sahara/api/v10.py | 328 --- sahara/api/v11.py | 304 --- sahara/api/v2/__init__.py | 66 - sahara/api/v2/cluster_templates.py | 123 -- sahara/api/v2/clusters.py | 136 -- sahara/api/v2/data_sources.py | 77 - sahara/api/v2/images.py | 84 - sahara/api/v2/job_binaries.py | 87 - sahara/api/v2/job_templates.py | 101 - sahara/api/v2/job_types.py | 36 - sahara/api/v2/jobs.py | 102 - sahara/api/v2/node_group_templates.py | 126 -- sahara/api/v2/plugins.py | 55 - sahara/cli/__init__.py | 0 sahara/cli/image_pack/__init__.py | 0 sahara/cli/image_pack/api.py | 131 -- sahara/cli/image_pack/cli.py | 124 -- sahara/cli/sahara_all.py | 62 - sahara/cli/sahara_api.py | 48 - sahara/cli/sahara_engine.py | 48 - sahara/cli/sahara_status.py | 46 - sahara/cli/sahara_subprocess.py | 55 - sahara/common/__init__.py | 0 sahara/common/config.py | 52 - sahara/common/policies/__init__.py | 63 - sahara/common/policies/base.py | 52 - sahara/common/policies/cluster.py | 60 - sahara/common/policies/cluster_template.py | 57 - sahara/common/policies/cluster_templates.py | 57 - sahara/common/policies/clusters.py | 60 - sahara/common/policies/data_source.py | 57 - sahara/common/policies/data_sources.py | 57 - sahara/common/policies/image.py | 65 - sahara/common/policies/images.py | 59 - sahara/common/policies/job.py | 49 - sahara/common/policies/job_binaries.py | 62 - sahara/common/policies/job_binary.py | 62 - .../common/policies/job_binary_internals.py | 65 - sahara/common/policies/job_executions.py | 63 - sahara/common/policies/job_template.py | 61 - sahara/common/policies/job_type.py | 30 - sahara/common/policies/job_types.py | 30 - sahara/common/policies/jobs.py | 65 - sahara/common/policies/node_group_template.py | 57 - .../common/policies/node_group_templates.py | 60 - sahara/common/policies/plugin.py | 49 - sahara/common/policies/plugins.py | 57 - sahara/conductor/__init__.py | 38 - sahara/conductor/api.py | 619 ------ sahara/conductor/manager.py | 817 ------- sahara/conductor/objects.py | 400 ---- sahara/conductor/resource.py | 320 --- sahara/config.py | 236 -- sahara/context.py | 322 --- sahara/db/__init__.py | 20 - sahara/db/api.py | 633 ------ sahara/db/base.py | 29 - sahara/db/migration/__init__.py | 0 sahara/db/migration/alembic.ini | 54 - .../db/migration/alembic_migrations/README.md | 73 - sahara/db/migration/alembic_migrations/env.py | 94 - .../alembic_migrations/script.py.mako | 34 - .../versions/001_icehouse.py | 334 --- .../versions/002_placeholder.py | 30 - .../versions/003_placeholder.py | 30 - .../versions/004_placeholder.py | 30 - .../versions/005_placeholder.py | 30 - .../versions/006_placeholder.py | 30 - .../007_increase_status_description_size.py | 36 - .../versions/008_security_groups.py | 40 - .../versions/009_rollback_info.py | 36 - .../versions/010_auto_security_groups.py | 42 - .../versions/011_sahara_info.py | 36 - .../versions/012_availability_zone.py | 39 - .../versions/013_volumes_availability_zone.py | 40 - .../versions/014_add_volume_type.py | 41 - .../versions/015_add_events_objects.py | 93 - .../versions/016_is_proxy_gateway.py | 38 - .../versions/017_drop_progress.py | 32 - .../versions/018_volume_local_to_instance.py | 38 - .../versions/019_is_default_for_templates.py | 36 - .../020_remove_redandunt_progress_ops.py | 34 - .../versions/021_datasource_placeholders.py | 36 - .../versions/022_add_job_interface.py | 67 - .../versions/023_add_use_autoconfig.py | 42 - .../versions/024_manila_shares.py | 48 - .../versions/025_increase_ip_column_size.py | 36 - .../026_add_is_public_is_protected.py | 65 - .../versions/027_rename_oozie_job_id.py | 35 - .../versions/028_storage_devices_number.py | 35 - .../029_set_is_protected_on_is_default.py | 48 - .../versions/030-health-check.py | 63 - .../versions/031_added_plugins_table.py | 46 - .../versions/032_add_domain_name.py | 38 - ...dd_anti_affinity_ratio_field_to_cluster.py | 34 - .../versions/034_boot_from_volume.py | 40 - .../035_boot_from_volume_enhancements.py | 70 - sahara/db/migration/cli.py | 109 - sahara/db/sqlalchemy/__init__.py | 0 sahara/db/sqlalchemy/api.py | 1700 -------------- sahara/db/sqlalchemy/model_base.py | 53 - sahara/db/sqlalchemy/models.py | 570 ----- sahara/db/sqlalchemy/types.py | 116 - sahara/db/templates/README.rst | 278 --- sahara/db/templates/__init__.py | 0 sahara/db/templates/api.py | 802 ------- sahara/db/templates/cli.py | 206 -- sahara/db/templates/utils.py | 191 -- sahara/exceptions.py | 405 ---- sahara/i18n.py | 26 - sahara/locale/de/LC_MESSAGES/sahara.po | 1529 ------------- sahara/main.py | 174 -- sahara/plugins/__init__.py | 0 sahara/plugins/base.py | 162 -- sahara/plugins/castellan_utils.py | 29 - sahara/plugins/conductor.py | 38 - sahara/plugins/context.py | 74 - sahara/plugins/db.py | 29 - .../ambari/v2_3/cluster.json | 26 - .../ambari/v2_3/master-edp.json | 17 - .../default_templates/ambari/v2_3/master.json | 22 - .../default_templates/ambari/v2_3/worker.json | 18 - .../ambari/v2_4/cluster.json | 26 - .../ambari/v2_4/master-edp.json | 17 - .../default_templates/ambari/v2_4/master.json | 23 - .../default_templates/ambari/v2_4/worker.json | 18 - .../ambari/v2_5/cluster.json | 26 - .../ambari/v2_5/master-edp.json | 17 - .../default_templates/ambari/v2_5/master.json | 23 - .../default_templates/ambari/v2_5/worker.json | 18 - .../default_templates/cdh/v5_5_0/cluster.json | 30 - .../default_templates/cdh/v5_5_0/manager.json | 15 - .../cdh/v5_5_0/master-additional.json | 20 - .../cdh/v5_5_0/master-core.json | 18 - .../cdh/v5_5_0/worker-nm-dn.json | 16 - .../default_templates/cdh/v5_7_0/cluster.json | 30 - .../default_templates/cdh/v5_7_0/manager.json | 15 - .../cdh/v5_7_0/master-additional.json | 20 - .../cdh/v5_7_0/master-core.json | 18 - .../cdh/v5_7_0/worker-nm-dn.json | 16 - .../default_templates/cdh/v5_9_0/cluster.json | 30 - .../default_templates/cdh/v5_9_0/manager.json | 15 - .../cdh/v5_9_0/master-additional.json | 20 - .../cdh/v5_9_0/master-core.json | 18 - .../cdh/v5_9_0/worker-nm-dn.json | 16 - .../mapr/5_0_0_mrv2/cluster.json | 20 - .../mapr/5_0_0_mrv2/master.json | 19 - .../mapr/5_0_0_mrv2/worker.json | 14 - .../mapr/v5_1_0_mrv2/cluster.json | 21 - .../mapr/v5_1_0_mrv2/master.json | 28 - .../mapr/v5_1_0_mrv2/worker.json | 14 - .../mapr/v5_2_0_mrv2/cluster.json | 21 - .../mapr/v5_2_0_mrv2/master.json | 28 - .../mapr/v5_2_0_mrv2/worker.json | 14 - .../spark/v1_3_1/cluster.json | 20 - .../spark/v1_3_1/master.json | 14 - .../default_templates/spark/v1_3_1/slave.json | 14 - .../spark/v1_6_0/cluster.json | 20 - .../spark/v1_6_0/master.json | 14 - .../default_templates/spark/v1_6_0/slave.json | 14 - .../spark/v2_1_0/cluster.json | 20 - .../spark/v2_1_0/master.json | 14 - .../default_templates/spark/v2_1_0/slave.json | 14 - .../storm/v1_0_1/cluster.json | 21 - .../storm/v1_0_1/master.json | 15 - .../default_templates/storm/v1_0_1/slave.json | 15 - .../storm/v1_1_0/cluster.json | 21 - .../storm/v1_1_0/master.json | 15 - .../default_templates/storm/v1_1_0/slave.json | 15 - .../plugins/default_templates/template.conf | 60 - .../vanilla/v2_7_1/cluster.json | 20 - .../vanilla/v2_7_1/master.json | 18 - .../vanilla/v2_7_1/worker.json | 16 - sahara/plugins/edp.py | 96 - sahara/plugins/exceptions.py | 221 -- sahara/plugins/fake/__init__.py | 0 sahara/plugins/fake/edp_engine.py | 55 - sahara/plugins/fake/plugin.py | 132 -- sahara/plugins/health_check_base.py | 261 --- sahara/plugins/images.py | 1114 ---------- sahara/plugins/kerberos.py | 401 ---- sahara/plugins/labels.py | 223 -- sahara/plugins/main.py | 24 - sahara/plugins/objects.py | 20 - sahara/plugins/opts.py | 26 - sahara/plugins/provisioning.py | 360 --- sahara/plugins/recommendations_utils.py | 372 ---- sahara/plugins/resource.py | 32 - .../plugins/resources/create-principal-keytab | 12 - sahara/plugins/resources/cron-file | 2 - sahara/plugins/resources/cron-script | 3 - sahara/plugins/resources/kdc_conf | 16 - sahara/plugins/resources/kdc_conf_redhat | 13 - .../resources/krb-client-init.sh.template | 14 - sahara/plugins/resources/krb5_config | 11 - .../resources/mit-kdc-server-init.sh.template | 34 - sahara/plugins/service_api.py | 20 - sahara/plugins/swift_helper.py | 28 - sahara/plugins/swift_utils.py | 20 - sahara/plugins/testutils.py | 34 - sahara/plugins/topology_helper.py | 31 - sahara/plugins/utils.py | 239 -- sahara/service/__init__.py | 0 sahara/service/api/__init__.py | 23 - sahara/service/api/v10.py | 288 --- sahara/service/api/v11.py | 268 --- sahara/service/api/v2/__init__.py | 0 sahara/service/api/v2/cluster_templates.py | 47 - sahara/service/api/v2/clusters.py | 183 -- sahara/service/api/v2/data_sources.py | 41 - sahara/service/api/v2/images.py | 87 - sahara/service/api/v2/job_binaries.py | 47 - sahara/service/api/v2/job_templates.py | 45 - sahara/service/api/v2/job_types.py | 78 - sahara/service/api/v2/jobs.py | 102 - sahara/service/api/v2/node_group_templates.py | 47 - sahara/service/api/v2/plugins.py | 31 - sahara/service/castellan/__init__.py | 0 sahara/service/castellan/config.py | 53 - .../service/castellan/sahara_key_manager.py | 83 - sahara/service/castellan/utils.py | 57 - sahara/service/coordinator.py | 131 -- sahara/service/edp/__init__.py | 0 sahara/service/edp/base_engine.py | 70 - .../service/edp/binary_retrievers/__init__.py | 0 .../service/edp/binary_retrievers/dispatch.py | 59 - .../edp/binary_retrievers/internal_swift.py | 99 - .../edp/binary_retrievers/manila_share.py | 32 - .../edp/binary_retrievers/s3_storage.py | 19 - .../edp/binary_retrievers/sahara_db.py | 26 - sahara/service/edp/data_sources/__init__.py | 0 sahara/service/edp/data_sources/base.py | 109 - .../service/edp/data_sources/hdfs/__init__.py | 0 .../edp/data_sources/hdfs/implementation.py | 41 - sahara/service/edp/data_sources/manager.py | 86 - .../edp/data_sources/manila/__init__.py | 0 .../edp/data_sources/manila/implementation.py | 63 - .../edp/data_sources/maprfs/__init__.py | 0 .../edp/data_sources/maprfs/implementation.py | 33 - sahara/service/edp/data_sources/opts.py | 28 - .../service/edp/data_sources/s3/__init__.py | 0 .../edp/data_sources/s3/implementation.py | 86 - .../edp/data_sources/swift/__init__.py | 0 .../edp/data_sources/swift/implementation.py | 83 - sahara/service/edp/hdfs_helper.py | 126 -- sahara/service/edp/job_binaries/__init__.py | 0 sahara/service/edp/job_binaries/base.py | 103 - .../edp/job_binaries/internal_db/__init__.py | 0 .../internal_db/implementation.py | 70 - sahara/service/edp/job_binaries/manager.py | 86 - .../edp/job_binaries/manila/__init__.py | 0 .../edp/job_binaries/manila/implementation.py | 87 - sahara/service/edp/job_binaries/opts.py | 28 - .../service/edp/job_binaries/s3/__init__.py | 0 .../edp/job_binaries/s3/implementation.py | 51 - .../edp/job_binaries/swift/__init__.py | 0 .../edp/job_binaries/swift/implementation.py | 121 - sahara/service/edp/job_manager.py | 264 --- sahara/service/edp/job_utils.py | 287 --- sahara/service/edp/oozie/__init__.py | 0 sahara/service/edp/oozie/engine.py | 467 ---- sahara/service/edp/oozie/oozie.py | 146 -- .../edp/oozie/workflow_creator/__init__.py | 0 .../oozie/workflow_creator/base_workflow.py | 85 - .../oozie/workflow_creator/hive_workflow.py | 47 - .../oozie/workflow_creator/java_workflow.py | 57 - .../workflow_creator/mapreduce_workflow.py | 46 - .../oozie/workflow_creator/pig_workflow.py | 49 - .../oozie/workflow_creator/shell_workflow.py | 50 - .../workflow_creator/workflow_factory.py | 359 --- .../edp/resources/edp-main-wrapper.jar | Bin 5075 -> 0 bytes .../edp/resources/edp-spark-wrapper.jar | Bin 3493 -> 0 bytes sahara/service/edp/resources/hive-default.xml | 1873 ---------------- .../service/edp/resources/launch_command.py | 95 - .../service/edp/resources/mapred-default.xml | 1955 ----------------- .../edp/resources/mapred-job-config.xml | 64 - sahara/service/edp/resources/workflow.xml | 9 - sahara/service/edp/s3_common.py | 96 - sahara/service/edp/shares.py | 301 --- sahara/service/edp/spark/__init__.py | 0 sahara/service/edp/spark/engine.py | 433 ---- sahara/service/edp/storm/__init__.py | 0 sahara/service/edp/storm/engine.py | 331 --- sahara/service/edp/utils/__init__.py | 0 sahara/service/edp/utils/shares.py | 301 --- sahara/service/engine.py | 236 -- sahara/service/health/__init__.py | 0 sahara/service/health/common.py | 48 - sahara/service/health/verification_base.py | 156 -- sahara/service/heat/__init__.py | 0 sahara/service/heat/commons.py | 17 - sahara/service/heat/heat_engine.py | 279 --- sahara/service/heat/templates.py | 709 ------ sahara/service/networks.py | 66 - sahara/service/ntp_service.py | 120 - sahara/service/ops.py | 501 ----- sahara/service/periodic.py | 268 --- sahara/service/quotas.py | 191 -- sahara/service/sessions.py | 171 -- sahara/service/trusts.py | 180 -- sahara/service/validation.py | 219 -- sahara/service/validations/__init__.py | 0 sahara/service/validations/acl.py | 62 - sahara/service/validations/base.py | 441 ---- .../validations/cluster_template_schema.py | 132 -- .../service/validations/cluster_templates.py | 103 - sahara/service/validations/clusters.py | 127 -- .../service/validations/clusters_scaling.py | 86 - sahara/service/validations/clusters_schema.py | 163 -- sahara/service/validations/edp/__init__.py | 0 sahara/service/validations/edp/base.py | 89 - sahara/service/validations/edp/data_source.py | 77 - .../validations/edp/data_source_schema.py | 57 - sahara/service/validations/edp/job.py | 69 - sahara/service/validations/edp/job_binary.py | 29 - .../validations/edp/job_binary_internal.py | 41 - .../edp/job_binary_internal_schema.py | 35 - .../validations/edp/job_binary_schema.py | 54 - .../service/validations/edp/job_execution.py | 205 -- .../validations/edp/job_execution_schema.py | 85 - .../service/validations/edp/job_interface.py | 204 -- sahara/service/validations/edp/job_schema.py | 93 - sahara/service/validations/images.py | 51 - .../validations/node_group_template_schema.py | 143 -- .../validations/node_group_templates.py | 102 - sahara/service/validations/plugins.py | 32 - sahara/service/validations/shares.py | 76 - sahara/service/volumes.py | 269 --- sahara/swift/__init__.py | 0 sahara/swift/resources/conf-template.xml | 77 - sahara/swift/swift_helper.py | 112 - sahara/swift/utils.py | 48 - sahara/tests/README.rst | 6 - sahara/tests/__init__.py | 17 - sahara/tests/unit/__init__.py | 0 sahara/tests/unit/api/__init__.py | 0 sahara/tests/unit/api/middleware/__init__.py | 0 .../unit/api/middleware/test_auth_valid.py | 87 - sahara/tests/unit/api/test_acl.py | 54 - sahara/tests/unit/base.py | 74 - sahara/tests/unit/cli/__init__.py | 0 sahara/tests/unit/cli/image_pack/__init__.py | 0 .../cli/image_pack/test_image_pack_api.py | 72 - sahara/tests/unit/cli/test_sahara_cli.py | 86 - sahara/tests/unit/cli/test_sahara_status.py | 40 - sahara/tests/unit/conductor/__init__.py | 0 sahara/tests/unit/conductor/base.py | 52 - .../tests/unit/conductor/manager/__init__.py | 0 .../unit/conductor/manager/test_clusters.py | 434 ---- .../unit/conductor/manager/test_defaults.py | 84 - .../tests/unit/conductor/manager/test_edp.py | 1116 ---------- .../conductor/manager/test_edp_interface.py | 115 - .../conductor/manager/test_from_template.py | 98 - .../unit/conductor/manager/test_templates.py | 752 ------- sahara/tests/unit/conductor/test_api.py | 257 --- sahara/tests/unit/conductor/test_resource.py | 246 --- sahara/tests/unit/db/__init__.py | 0 sahara/tests/unit/db/migration/__init__.py | 0 .../unit/db/migration/test_db_manage_cli.py | 89 - .../unit/db/migration/test_migrations.py | 665 ------ .../unit/db/migration/test_migrations_base.py | 178 -- sahara/tests/unit/db/sqlalchemy/__init__.py | 0 sahara/tests/unit/db/sqlalchemy/test_types.py | 138 -- sahara/tests/unit/db/templates/__init__.py | 0 sahara/tests/unit/db/templates/common.py | 106 - sahara/tests/unit/db/templates/test_delete.py | 367 ---- sahara/tests/unit/db/templates/test_update.py | 798 ------- sahara/tests/unit/db/templates/test_utils.py | 147 -- sahara/tests/unit/db/test_utils.py | 113 - sahara/tests/unit/plugins/__init__.py | 0 .../unit/plugins/test_base_plugins_support.py | 31 - sahara/tests/unit/plugins/test_images.py | 482 ---- sahara/tests/unit/plugins/test_kerberos.py | 130 -- sahara/tests/unit/plugins/test_labels.py | 261 --- .../plugins/test_provide_recommendations.py | 293 --- .../tests/unit/plugins/test_provisioning.py | 140 -- sahara/tests/unit/plugins/test_utils.py | 213 -- .../unit/resources/dfs_admin_0_nodes.txt | 11 - .../unit/resources/dfs_admin_1_nodes.txt | 15 - .../unit/resources/dfs_admin_3_nodes.txt | 23 - sahara/tests/unit/resources/test-default.xml | 31 - sahara/tests/unit/service/__init__.py | 0 sahara/tests/unit/service/api/__init__.py | 0 sahara/tests/unit/service/api/test_v10.py | 325 --- sahara/tests/unit/service/api/v2/__init__.py | 0 sahara/tests/unit/service/api/v2/base.py | 155 -- .../unit/service/api/v2/test_clusters.py | 313 --- .../tests/unit/service/api/v2/test_images.py | 68 - .../tests/unit/service/api/v2/test_plugins.py | 70 - .../tests/unit/service/castellan/__init__.py | 0 .../castellan/test_sahara_key_manager.py | 44 - sahara/tests/unit/service/edp/__init__.py | 0 .../service/edp/binary_retrievers/__init__.py | 0 .../edp/binary_retrievers/test_dispatch.py | 66 - .../binary_retrievers/test_internal_swift.py | 130 -- .../edp/binary_retrievers/test_manila.py | 80 - .../unit/service/edp/data_sources/__init__.py | 0 .../service/edp/data_sources/base_test.py | 71 - .../data_source_manager_support_test.py | 76 - .../service/edp/data_sources/hdfs/__init__.py | 0 .../edp/data_sources/hdfs/test_hdfs_type.py | 77 - .../edp/data_sources/manila/__init__.py | 0 .../data_sources/manila/test_manila_type.py | 143 -- .../edp/data_sources/maprfs/__init__.py | 0 .../maprfs/test_maprfs_type_validation.py | 65 - .../service/edp/data_sources/s3/__init__.py | 0 .../edp/data_sources/s3/test_s3_type.py | 116 - .../edp/data_sources/swift/__init__.py | 0 .../edp/data_sources/swift/test_swift_type.py | 217 -- .../tests/unit/service/edp/edp_test_utils.py | 117 - .../unit/service/edp/job_binaries/__init__.py | 0 .../edp/job_binaries/internal_db/__init__.py | 0 .../internal_db/test_internal_db_type.py | 99 - .../job_binary_manager_support.py | 69 - .../edp/job_binaries/manila/__init__.py | 0 .../job_binaries/manila/test_manila_type.py | 177 -- .../service/edp/job_binaries/s3/__init__.py | 0 .../edp/job_binaries/s3/test_s3_type.py | 69 - .../edp/job_binaries/swift/__init__.py | 0 .../edp/job_binaries/swift/test_swift_type.py | 174 -- .../service/edp/job_binaries/test_base.py | 37 - .../tests/unit/service/edp/oozie/__init__.py | 0 .../unit/service/edp/oozie/test_oozie.py | 280 --- .../tests/unit/service/edp/spark/__init__.py | 0 sahara/tests/unit/service/edp/spark/base.py | 720 ------ .../tests/unit/service/edp/storm/__init__.py | 0 .../unit/service/edp/storm/test_storm.py | 403 ---- .../unit/service/edp/test_hdfs_helper.py | 177 -- .../unit/service/edp/test_job_manager.py | 671 ------ .../service/edp/test_job_possible_configs.py | 44 - .../tests/unit/service/edp/test_job_utils.py | 259 --- .../service/edp/test_json_api_examples.py | 79 - .../tests/unit/service/edp/test_s3_common.py | 103 - .../unit/service/edp/utils/test_shares.py | 389 ---- .../service/edp/workflow_creator/__init__.py | 0 .../workflow_creator/test_create_workflow.py | 263 --- sahara/tests/unit/service/health/__init__.py | 0 .../service/health/test_verification_base.py | 171 -- sahara/tests/unit/service/heat/__init__.py | 0 .../tests/unit/service/heat/test_templates.py | 321 --- sahara/tests/unit/service/test_coordinator.py | 94 - sahara/tests/unit/service/test_engine.py | 153 -- sahara/tests/unit/service/test_networks.py | 149 -- sahara/tests/unit/service/test_ntp_service.py | 105 - sahara/tests/unit/service/test_ops.py | 191 -- sahara/tests/unit/service/test_periodic.py | 274 --- sahara/tests/unit/service/test_quotas.py | 257 --- sahara/tests/unit/service/test_sessions.py | 154 -- sahara/tests/unit/service/test_trusts.py | 137 -- sahara/tests/unit/service/test_volumes.py | 153 -- .../tests/unit/service/validation/__init__.py | 0 .../unit/service/validation/edp/__init__.py | 0 .../validation/edp/test_data_source.py | 181 -- .../unit/service/validation/edp/test_job.py | 245 --- .../service/validation/edp/test_job_binary.py | 113 - .../edp/test_job_binary_internal.py | 65 - .../validation/edp/test_job_executor.py | 385 ---- .../validation/edp/test_job_interface.py | 302 --- .../validation/test_add_tags_validation.py | 50 - .../test_cluster_create_validation.py | 653 ------ .../test_cluster_delete_validation.py | 87 - .../test_cluster_scaling_validation.py | 526 ----- ...test_cluster_template_create_validation.py | 293 --- ...test_cluster_template_update_validation.py | 71 - .../test_cluster_update_validation.py | 142 -- .../test_ng_template_validation_create.py | 408 ---- .../test_ng_template_validation_update.py | 90 - .../validation/test_protected_validation.py | 55 - .../validation/test_share_validations.py | 132 -- .../service/validation/test_validation.py | 49 - sahara/tests/unit/service/validation/utils.py | 446 ---- sahara/tests/unit/swift/__init__.py | 0 sahara/tests/unit/swift/test_swift_helper.py | 49 - sahara/tests/unit/swift/test_utils.py | 66 - sahara/tests/unit/test_context.py | 137 -- sahara/tests/unit/test_exceptions.py | 171 -- sahara/tests/unit/test_main.py | 60 - sahara/tests/unit/testutils.py | 54 - sahara/tests/unit/topology/__init__.py | 0 sahara/tests/unit/topology/test_topology.py | 188 -- sahara/tests/unit/utils/__init__.py | 0 .../tests/unit/utils/notification/__init__.py | 0 .../unit/utils/notification/test_sender.py | 43 - sahara/tests/unit/utils/openstack/__init__.py | 0 .../tests/unit/utils/openstack/test_base.py | 245 --- .../tests/unit/utils/openstack/test_heat.py | 45 - .../tests/unit/utils/openstack/test_images.py | 105 - .../tests/unit/utils/openstack/test_swift.py | 41 - sahara/tests/unit/utils/test_api.py | 98 - sahara/tests/unit/utils/test_api_validator.py | 344 --- sahara/tests/unit/utils/test_cinder.py | 77 - sahara/tests/unit/utils/test_cluster.py | 188 -- .../unit/utils/test_cluster_progress_ops.py | 248 --- sahara/tests/unit/utils/test_configs.py | 51 - sahara/tests/unit/utils/test_crypto.py | 39 - sahara/tests/unit/utils/test_edp.py | 65 - sahara/tests/unit/utils/test_general.py | 75 - sahara/tests/unit/utils/test_hacking.py | 71 - sahara/tests/unit/utils/test_heat.py | 68 - sahara/tests/unit/utils/test_neutron.py | 70 - sahara/tests/unit/utils/test_patches.py | 73 - sahara/tests/unit/utils/test_poll_utils.py | 134 -- sahara/tests/unit/utils/test_proxy.py | 135 -- sahara/tests/unit/utils/test_resources.py | 85 - sahara/tests/unit/utils/test_rpc.py | 96 - sahara/tests/unit/utils/test_ssh_remote.py | 480 ---- sahara/tests/unit/utils/test_types.py | 29 - sahara/tests/unit/utils/test_xml_utils.py | 218 -- sahara/topology/__init__.py | 0 sahara/topology/resources/core-template.xml | 24 - sahara/topology/resources/mapred-template.xml | 17 - sahara/topology/topology_helper.py | 167 -- sahara/utils/__init__.py | 0 sahara/utils/api.py | 441 ---- sahara/utils/api_validator.py | 166 -- sahara/utils/cluster.py | 193 -- sahara/utils/cluster_progress_ops.py | 203 -- sahara/utils/configs.py | 32 - sahara/utils/crypto.py | 64 - sahara/utils/edp.py | 159 -- sahara/utils/files.py | 40 - sahara/utils/general.py | 77 - sahara/utils/hacking/__init__.py | 0 sahara/utils/hacking/checks.py | 129 -- sahara/utils/hacking/commit_message.py | 95 - sahara/utils/hacking/logging_checks.py | 67 - sahara/utils/network.py | 43 - sahara/utils/notification/__init__.py | 0 sahara/utils/notification/sender.py | 90 - sahara/utils/openstack/__init__.py | 0 sahara/utils/openstack/base.py | 112 - sahara/utils/openstack/cinder.py | 87 - sahara/utils/openstack/glance.py | 48 - sahara/utils/openstack/heat.py | 118 - sahara/utils/openstack/images.py | 197 -- sahara/utils/openstack/keystone.py | 319 --- sahara/utils/openstack/manila.py | 73 - sahara/utils/openstack/neutron.py | 115 - sahara/utils/openstack/nova.py | 63 - sahara/utils/openstack/swift.py | 93 - sahara/utils/patches.py | 56 - sahara/utils/poll_utils.py | 163 -- sahara/utils/procutils.py | 98 - sahara/utils/proxy.py | 303 --- sahara/utils/remote.py | 168 -- sahara/utils/resources.py | 73 - sahara/utils/rpc.py | 127 -- sahara/utils/ssh_remote.py | 1044 --------- sahara/utils/tempfiles.py | 38 - sahara/utils/types.py | 111 - sahara/utils/wsgi.py | 85 - sahara/utils/xmlutils.py | 192 -- sahara/version.py | 18 - setup.cfg | 79 - setup.py | 21 - test-requirements.txt | 20 - tools/config/config-generator.sahara.conf | 15 - tools/config/sahara-policy-generator.conf | 3 - tools/cover.sh | 85 - tools/gate/build-images | 84 - tools/lintstack.py | 196 -- tools/lintstack.sh | 61 - tools/test-setup.sh | 57 - tox.ini | 152 -- 976 files changed, 8 insertions(+), 95621 deletions(-) delete mode 100644 .coveragerc delete mode 100644 .gitignore delete mode 100644 .stestr.conf delete mode 100644 .zuul.yaml delete mode 100644 CONTRIBUTING.rst delete mode 100644 HACKING.rst delete mode 100644 LICENSE delete mode 100644 api-ref/source/conf.py delete mode 100644 api-ref/source/index.rst delete mode 100644 api-ref/source/v1.1/cluster-templates.inc delete mode 100644 api-ref/source/v1.1/clusters.inc delete mode 100644 api-ref/source/v1.1/data-sources.inc delete mode 100644 api-ref/source/v1.1/event-log.inc delete mode 100644 api-ref/source/v1.1/image-registry.inc delete mode 100644 api-ref/source/v1.1/index.rst delete mode 100644 api-ref/source/v1.1/job-binaries.inc delete mode 100644 api-ref/source/v1.1/job-binary-internals.inc delete mode 100644 api-ref/source/v1.1/job-executions.inc delete mode 100644 api-ref/source/v1.1/job-types.inc delete mode 100644 api-ref/source/v1.1/jobs.inc delete mode 100644 api-ref/source/v1.1/node-group-templates.inc delete mode 100644 api-ref/source/v1.1/parameters.yaml delete mode 100644 api-ref/source/v1.1/plugins.inc delete mode 100644 api-ref/source/v1.1/samples/cluster-templates/cluster-template-create-request.json delete mode 100644 api-ref/source/v1.1/samples/cluster-templates/cluster-template-create-response.json delete mode 100644 api-ref/source/v1.1/samples/cluster-templates/cluster-template-show-response.json delete mode 100644 api-ref/source/v1.1/samples/cluster-templates/cluster-template-update-request.json delete mode 100644 api-ref/source/v1.1/samples/cluster-templates/cluster-template-update-response.json delete mode 100644 api-ref/source/v1.1/samples/cluster-templates/cluster-templates-list-response.json delete mode 100644 api-ref/source/v1.1/samples/clusters/cluster-create-request.json delete mode 100644 api-ref/source/v1.1/samples/clusters/cluster-create-response.json delete mode 100644 api-ref/source/v1.1/samples/clusters/cluster-scale-request.json delete mode 100644 api-ref/source/v1.1/samples/clusters/cluster-scale-response.json delete mode 100644 api-ref/source/v1.1/samples/clusters/cluster-show-response.json delete mode 100644 api-ref/source/v1.1/samples/clusters/cluster-update-request.json delete mode 100644 api-ref/source/v1.1/samples/clusters/cluster-update-response.json delete mode 100644 api-ref/source/v1.1/samples/clusters/clusters-list-response.json delete mode 100644 api-ref/source/v1.1/samples/clusters/multiple-clusters-create-request.json delete mode 100644 api-ref/source/v1.1/samples/clusters/multiple-clusters-create-response.json delete mode 100644 api-ref/source/v1.1/samples/data-sources/data-source-register-hdfs-request.json delete mode 100644 api-ref/source/v1.1/samples/data-sources/data-source-register-hdfs-response.json delete mode 100644 api-ref/source/v1.1/samples/data-sources/data-source-register-swift-request.json delete mode 100644 api-ref/source/v1.1/samples/data-sources/data-source-register-swift-response.json delete mode 100644 api-ref/source/v1.1/samples/data-sources/data-source-show-response.json delete mode 100644 api-ref/source/v1.1/samples/data-sources/data-source-update-request.json delete mode 100644 api-ref/source/v1.1/samples/data-sources/data-source-update-response.json delete mode 100644 api-ref/source/v1.1/samples/data-sources/data-sources-list-response.json delete mode 100644 api-ref/source/v1.1/samples/event-log/cluster-progress-response.json delete mode 100644 api-ref/source/v1.1/samples/image-registry/image-register-request.json delete mode 100644 api-ref/source/v1.1/samples/image-registry/image-register-response.json delete mode 100644 api-ref/source/v1.1/samples/image-registry/image-show-response.json delete mode 100644 api-ref/source/v1.1/samples/image-registry/image-tags-add-request.json delete mode 100644 api-ref/source/v1.1/samples/image-registry/image-tags-add-response.json delete mode 100644 api-ref/source/v1.1/samples/image-registry/image-tags-delete-request.json delete mode 100644 api-ref/source/v1.1/samples/image-registry/image-tags-delete-response.json delete mode 100644 api-ref/source/v1.1/samples/image-registry/images-list-response.json delete mode 100644 api-ref/source/v1.1/samples/job-binaries/create-request.json delete mode 100644 api-ref/source/v1.1/samples/job-binaries/create-response.json delete mode 100644 api-ref/source/v1.1/samples/job-binaries/list-response.json delete mode 100644 api-ref/source/v1.1/samples/job-binaries/show-data-response delete mode 100644 api-ref/source/v1.1/samples/job-binaries/show-response.json delete mode 100644 api-ref/source/v1.1/samples/job-binaries/update-request.json delete mode 100644 api-ref/source/v1.1/samples/job-binaries/update-response.json delete mode 100644 api-ref/source/v1.1/samples/job-binary-internals/create-response.json delete mode 100644 api-ref/source/v1.1/samples/job-binary-internals/list-response.json delete mode 100644 api-ref/source/v1.1/samples/job-binary-internals/show-data-response delete mode 100644 api-ref/source/v1.1/samples/job-binary-internals/show-response.json delete mode 100644 api-ref/source/v1.1/samples/job-binary-internals/update-request.json delete mode 100644 api-ref/source/v1.1/samples/job-binary-internals/update-response.json delete mode 100644 api-ref/source/v1.1/samples/job-executions/cancel-response.json delete mode 100644 api-ref/source/v1.1/samples/job-executions/job-ex-response.json delete mode 100644 api-ref/source/v1.1/samples/job-executions/job-ex-update-request.json delete mode 100644 api-ref/source/v1.1/samples/job-executions/job-ex-update-response.json delete mode 100644 api-ref/source/v1.1/samples/job-executions/list-response.json delete mode 100644 api-ref/source/v1.1/samples/job-types/job-types-list-response.json delete mode 100644 api-ref/source/v1.1/samples/jobs/job-create-request.json delete mode 100644 api-ref/source/v1.1/samples/jobs/job-create-response.json delete mode 100644 api-ref/source/v1.1/samples/jobs/job-execute-request.json delete mode 100644 api-ref/source/v1.1/samples/jobs/job-execute-response.json delete mode 100644 api-ref/source/v1.1/samples/jobs/job-show-response.json delete mode 100644 api-ref/source/v1.1/samples/jobs/job-update-request.json delete mode 100644 api-ref/source/v1.1/samples/jobs/job-update-response.json delete mode 100644 api-ref/source/v1.1/samples/jobs/jobs-list-response.json delete mode 100644 api-ref/source/v1.1/samples/node-group-templates/node-group-template-create-request.json delete mode 100644 api-ref/source/v1.1/samples/node-group-templates/node-group-template-create-response.json delete mode 100644 api-ref/source/v1.1/samples/node-group-templates/node-group-template-show-response.json delete mode 100644 api-ref/source/v1.1/samples/node-group-templates/node-group-template-update-request.json delete mode 100644 api-ref/source/v1.1/samples/node-group-templates/node-group-template-update-response.json delete mode 100644 api-ref/source/v1.1/samples/node-group-templates/node-group-templates-list-response.json delete mode 100644 api-ref/source/v1.1/samples/plugins/plugin-show-response.json delete mode 100644 api-ref/source/v1.1/samples/plugins/plugin-update-request.json delete mode 100644 api-ref/source/v1.1/samples/plugins/plugin-update-response.json delete mode 100644 api-ref/source/v1.1/samples/plugins/plugin-version-show-response.json delete mode 100644 api-ref/source/v1.1/samples/plugins/plugins-list-response.json delete mode 100644 api-ref/source/v2/cluster-templates.inc delete mode 100644 api-ref/source/v2/clusters.inc delete mode 100644 api-ref/source/v2/data-sources.inc delete mode 100644 api-ref/source/v2/event-log.inc delete mode 100644 api-ref/source/v2/image-registry.inc delete mode 100644 api-ref/source/v2/index.rst delete mode 100644 api-ref/source/v2/job-binaries.inc delete mode 100644 api-ref/source/v2/job-templates.inc delete mode 100644 api-ref/source/v2/job-types.inc delete mode 100644 api-ref/source/v2/jobs.inc delete mode 100644 api-ref/source/v2/node-group-templates.inc delete mode 100644 api-ref/source/v2/parameters.yaml delete mode 100644 api-ref/source/v2/plugins.inc delete mode 100644 api-ref/source/v2/samples/cluster-templates/cluster-template-create-request.json delete mode 100644 api-ref/source/v2/samples/cluster-templates/cluster-template-create-response.json delete mode 100644 api-ref/source/v2/samples/cluster-templates/cluster-template-show-response.json delete mode 100644 api-ref/source/v2/samples/cluster-templates/cluster-template-update-request.json delete mode 100644 api-ref/source/v2/samples/cluster-templates/cluster-template-update-response.json delete mode 100644 api-ref/source/v2/samples/cluster-templates/cluster-templates-list-response.json delete mode 100644 api-ref/source/v2/samples/clusters/cluster-create-request.json delete mode 100644 api-ref/source/v2/samples/clusters/cluster-create-response.json delete mode 100644 api-ref/source/v2/samples/clusters/cluster-scale-request.json delete mode 100644 api-ref/source/v2/samples/clusters/cluster-scale-response.json delete mode 100644 api-ref/source/v2/samples/clusters/cluster-show-response.json delete mode 100644 api-ref/source/v2/samples/clusters/cluster-update-request.json delete mode 100644 api-ref/source/v2/samples/clusters/cluster-update-response.json delete mode 100644 api-ref/source/v2/samples/clusters/clusters-list-response.json delete mode 100644 api-ref/source/v2/samples/clusters/multiple-clusters-create-request.json delete mode 100644 api-ref/source/v2/samples/clusters/multiple-clusters-create-response.json delete mode 100644 api-ref/source/v2/samples/data-sources/data-source-register-hdfs-request.json delete mode 100644 api-ref/source/v2/samples/data-sources/data-source-register-hdfs-response.json delete mode 100644 api-ref/source/v2/samples/data-sources/data-source-register-swift-request.json delete mode 100644 api-ref/source/v2/samples/data-sources/data-source-register-swift-response.json delete mode 100644 api-ref/source/v2/samples/data-sources/data-source-show-response.json delete mode 100644 api-ref/source/v2/samples/data-sources/data-source-update-request.json delete mode 100644 api-ref/source/v2/samples/data-sources/data-source-update-response.json delete mode 100644 api-ref/source/v2/samples/data-sources/data-sources-list-response.json delete mode 100644 api-ref/source/v2/samples/event-log/cluster-progress-response.json delete mode 100644 api-ref/source/v2/samples/image-registry/image-register-request.json delete mode 100644 api-ref/source/v2/samples/image-registry/image-register-response.json delete mode 100644 api-ref/source/v2/samples/image-registry/image-show-response.json delete mode 100644 api-ref/source/v2/samples/image-registry/image-tags-add-request.json delete mode 100644 api-ref/source/v2/samples/image-registry/image-tags-add-response.json delete mode 100644 api-ref/source/v2/samples/image-registry/image-tags-delete-request.json delete mode 100644 api-ref/source/v2/samples/image-registry/image-tags-delete-response.json delete mode 100644 api-ref/source/v2/samples/image-registry/images-list-response.json delete mode 100644 api-ref/source/v2/samples/job-binaries/create-request.json delete mode 100644 api-ref/source/v2/samples/job-binaries/create-response.json delete mode 100644 api-ref/source/v2/samples/job-binaries/list-response.json delete mode 100644 api-ref/source/v2/samples/job-binaries/show-data-response delete mode 100644 api-ref/source/v2/samples/job-binaries/show-response.json delete mode 100644 api-ref/source/v2/samples/job-binaries/update-request.json delete mode 100644 api-ref/source/v2/samples/job-binaries/update-response.json delete mode 100644 api-ref/source/v2/samples/job-templates/job-template-create-request.json delete mode 100644 api-ref/source/v2/samples/job-templates/job-template-create-response.json delete mode 100644 api-ref/source/v2/samples/job-templates/job-template-show-response.json delete mode 100644 api-ref/source/v2/samples/job-templates/job-template-update-request.json delete mode 100644 api-ref/source/v2/samples/job-templates/job-template-update-response.json delete mode 100644 api-ref/source/v2/samples/job-templates/job-templates-list-response.json delete mode 100644 api-ref/source/v2/samples/job-types/job-types-list-response.json delete mode 100644 api-ref/source/v2/samples/jobs/cancel-response.json delete mode 100644 api-ref/source/v2/samples/jobs/job-request.json delete mode 100644 api-ref/source/v2/samples/jobs/job-response.json delete mode 100644 api-ref/source/v2/samples/jobs/job-update-request.json delete mode 100644 api-ref/source/v2/samples/jobs/job-update-response.json delete mode 100644 api-ref/source/v2/samples/jobs/list-response.json delete mode 100644 api-ref/source/v2/samples/node-group-templates/node-group-template-create-request.json delete mode 100644 api-ref/source/v2/samples/node-group-templates/node-group-template-create-response.json delete mode 100644 api-ref/source/v2/samples/node-group-templates/node-group-template-show-response.json delete mode 100644 api-ref/source/v2/samples/node-group-templates/node-group-template-update-request.json delete mode 100644 api-ref/source/v2/samples/node-group-templates/node-group-template-update-response.json delete mode 100644 api-ref/source/v2/samples/node-group-templates/node-group-templates-list-response.json delete mode 100644 api-ref/source/v2/samples/plugins/plugin-show-response.json delete mode 100644 api-ref/source/v2/samples/plugins/plugin-update-request.json delete mode 100644 api-ref/source/v2/samples/plugins/plugin-update-response.json delete mode 100644 api-ref/source/v2/samples/plugins/plugin-version-show-response.json delete mode 100644 api-ref/source/v2/samples/plugins/plugins-list-response.json delete mode 100644 bandit.yaml delete mode 100644 bindep.txt delete mode 100644 devstack/README.rst delete mode 100644 devstack/exercise.sh delete mode 100644 devstack/files/apache-sahara-api.template delete mode 100755 devstack/plugin.sh delete mode 100644 devstack/settings delete mode 100644 devstack/upgrade/from-liberty/upgrade-sahara delete mode 100755 devstack/upgrade/from-mitaka/upgrade-sahara delete mode 100755 devstack/upgrade/from-rocky/upgrade-sahara delete mode 100755 devstack/upgrade/resources.sh delete mode 100644 devstack/upgrade/settings delete mode 100755 devstack/upgrade/shutdown.sh delete mode 100755 devstack/upgrade/upgrade.sh delete mode 100644 doc/requirements.txt delete mode 100644 doc/source/_extra/.htaccess delete mode 100644 doc/source/_templates/sidebarlinks.html delete mode 100644 doc/source/_theme_rtd/layout.html delete mode 100644 doc/source/_theme_rtd/theme.conf delete mode 100644 doc/source/admin/advanced-configuration-guide.rst delete mode 100644 doc/source/admin/configs-recommendations.rst delete mode 100644 doc/source/admin/configuration-guide.rst delete mode 100644 doc/source/admin/index.rst delete mode 100644 doc/source/admin/upgrade-guide.rst delete mode 100644 doc/source/cli/index.rst delete mode 100644 doc/source/cli/sahara-status.rst delete mode 100644 doc/source/conf.py delete mode 120000 doc/source/config-generator.conf delete mode 100644 doc/source/configuration/descriptionconfig.rst delete mode 100644 doc/source/configuration/index.rst delete mode 100644 doc/source/configuration/sampleconfig.rst delete mode 100644 doc/source/contributor/adding-database-migrations.rst delete mode 100644 doc/source/contributor/apiv2.rst delete mode 100644 doc/source/contributor/contributing.rst delete mode 100644 doc/source/contributor/dashboard-dev-environment-guide.rst delete mode 100644 doc/source/contributor/development-environment.rst delete mode 100644 doc/source/contributor/development-guidelines.rst delete mode 100644 doc/source/contributor/devstack.rst delete mode 100644 doc/source/contributor/gerrit.rst delete mode 100644 doc/source/contributor/how-to-build-oozie.rst delete mode 100644 doc/source/contributor/image-gen.rst delete mode 100644 doc/source/contributor/index.rst delete mode 100644 doc/source/contributor/jenkins.rst delete mode 100644 doc/source/contributor/log-guidelines.rst delete mode 100644 doc/source/contributor/testing.rst delete mode 100644 doc/source/images/hadoop-cluster-example.jpg delete mode 100644 doc/source/images/openstack-interop.png delete mode 100644 doc/source/images/sahara-architecture.svg delete mode 100644 doc/source/index.rst delete mode 100644 doc/source/install/dashboard-guide.rst delete mode 100644 doc/source/install/index.rst delete mode 100644 doc/source/install/installation-guide.rst delete mode 100644 doc/source/intro/architecture.rst delete mode 100644 doc/source/intro/index.rst delete mode 100644 doc/source/intro/overview.rst delete mode 100644 doc/source/reference/edp-spi.rst delete mode 100644 doc/source/reference/index.rst delete mode 100644 doc/source/reference/plugin-spi.rst delete mode 100644 doc/source/reference/plugins.rst delete mode 100644 doc/source/reference/restapi.rst delete mode 100644 doc/source/user/building-guest-images.rst delete mode 100644 doc/source/user/building-guest-images/baremetal.rst delete mode 100644 doc/source/user/building-guest-images/sahara-image-create.rst delete mode 100644 doc/source/user/building-guest-images/sahara-image-pack.rst delete mode 100644 doc/source/user/dashboard-user-guide.rst delete mode 100644 doc/source/user/edp-s3.rst delete mode 100644 doc/source/user/edp.rst delete mode 100644 doc/source/user/features.rst delete mode 100644 doc/source/user/hadoop-swift.rst delete mode 100644 doc/source/user/index.rst delete mode 100644 doc/source/user/overview.rst delete mode 100644 doc/source/user/plugins.rst delete mode 100644 doc/source/user/quickstart.rst delete mode 100644 doc/source/user/registering-image.rst delete mode 100644 doc/source/user/sahara-on-ironic.rst delete mode 100644 doc/source/user/statuses.rst delete mode 100644 doc/test/redirect-tests.txt delete mode 100644 etc/edp-examples/README.rst delete mode 100644 etc/sahara/README-sahara.conf.txt delete mode 100644 etc/sahara/api-paste.ini delete mode 100644 etc/sahara/compute.topology.sample delete mode 100644 etc/sahara/rootwrap.conf delete mode 100644 etc/sahara/rootwrap.d/sahara.filters delete mode 100644 etc/sahara/swift.topology.sample delete mode 100644 etc/sudoers.d/sahara-rootwrap delete mode 100644 playbooks/buildimages/run.yaml delete mode 100644 pylintrc delete mode 100644 releasenotes/notes/.placeholder delete mode 100644 releasenotes/notes/add-impala-2.2-c1649599649aff5c.yaml delete mode 100644 releasenotes/notes/add-mapr-520-3ed6cd0ae9688e17.yaml delete mode 100644 releasenotes/notes/add-mapr-kafka-3a808bbc1aa21055.yaml delete mode 100644 releasenotes/notes/add-mapr-sentry-6012c08b55d679de.yaml delete mode 100644 releasenotes/notes/add-scheduler-edp-job-9eda17dd174e53fa.yaml delete mode 100644 releasenotes/notes/add-storm-version-1_1_0-3e10b34824706a62.yaml delete mode 100644 releasenotes/notes/add-upgrade-check-framework-9cd18dbc47b0efbd.yaml delete mode 100644 releasenotes/notes/add-wsgi-server-support-c8fbc3d76d4e42f6.yaml delete mode 100644 releasenotes/notes/add_kafka_in_cdh-774c7c051480c892.yaml delete mode 100644 releasenotes/notes/add_mapr_repo_configs-04af1a67350bfd24.yaml delete mode 100644 releasenotes/notes/ambari-agent-pkg-install-timeout-param-d50e5c15e06fa51e.yaml delete mode 100644 releasenotes/notes/ambari-downscaling-b9ba759ce9c7325e.yaml delete mode 100644 releasenotes/notes/ambari-hive-92b911e0a759ee88.yaml delete mode 100644 releasenotes/notes/ambari-server-start-856403bc280dfba3.yaml delete mode 100644 releasenotes/notes/ambari26-image-pack-88c9aad59bf635b2.yaml delete mode 100644 releasenotes/notes/ambari_2_4_image_generation_validation-47eabb9fa90384c8.yaml delete mode 100644 releasenotes/notes/api-insecure-cbd4fd5da71b29a3.yaml delete mode 100644 releasenotes/notes/api-v2-return-payload-a84a609db410228a.yaml delete mode 100644 releasenotes/notes/apiv2-microversion-4c1a58ee8090e5a9.yaml delete mode 100644 releasenotes/notes/apiv2-payload-tweaks-b73c20a35263d958.yaml delete mode 100644 releasenotes/notes/apiv2-preview-release-b1ee8cc9b2fb01da.yaml delete mode 100644 releasenotes/notes/apiv2-stable-release-25ba9920c8e4632a.yaml delete mode 100644 releasenotes/notes/auto_configs_for_hdp-011d460d37dcdf02.yaml delete mode 100644 releasenotes/notes/boot-from-volume-e7078452fac1a4a0.yaml delete mode 100644 releasenotes/notes/ca-cert-fix-5c434a82f9347039.yaml delete mode 100644 releasenotes/notes/cdh-5-5-35e582e149a05632.yaml delete mode 100644 releasenotes/notes/cdh-513-bdce0d5d269d8f20.yaml delete mode 100644 releasenotes/notes/cdh-labels-5695d95bce226051.yaml delete mode 100644 releasenotes/notes/cdh_5_11_0_image_generation_validation-6334ef6d04950935.yaml delete mode 100644 releasenotes/notes/cdh_5_11_support-10d4abb91bc4475f.yaml delete mode 100644 releasenotes/notes/cdh_5_7_image_generation_validation-308e7529a9018663.yaml delete mode 100644 releasenotes/notes/cdh_5_7_support-9522cb9b4dce2378.yaml delete mode 100644 releasenotes/notes/cdh_5_9_0_image_generation_validation-19d10e6468e30b4f.yaml delete mode 100644 releasenotes/notes/cdh_5_9_support-b603a2648b2e7b32.yaml delete mode 100644 releasenotes/notes/config-groups-ambari-837de6d33eb0fa87.yaml delete mode 100644 releasenotes/notes/consolidate-cluster-creation-apiv2-5d5aceeb2e97c702.yaml delete mode 100644 releasenotes/notes/convert-to-cluster-template-43d502496d18625e.yaml delete mode 100644 releasenotes/notes/deprecate-cdh_5_5-0da56b562170566f.yaml delete mode 100644 releasenotes/notes/deprecate-hdp-a9ff0ecf6006da49.yaml delete mode 100644 releasenotes/notes/deprecate-json-formatted-policy-file-b267f288cba7e325.yaml delete mode 100644 releasenotes/notes/deprecate-mapr-51-090423438e3dda20.yaml delete mode 100644 releasenotes/notes/deprecate-plugin-vanilla260-46e4b8fe96e8fe68.yaml delete mode 100644 releasenotes/notes/deprecate-sahara-all-entry-point-1446a00dab643b7b.yaml delete mode 100644 releasenotes/notes/deprecate-spark-version-131-98eccc79b13b6b8f.yaml delete mode 100644 releasenotes/notes/deprecate-storm-version-092.yaml-b9ff2b9ebbb983fc.yaml delete mode 100644 releasenotes/notes/designate-integration-784c5f7f29546015.yaml delete mode 100644 releasenotes/notes/drop-py-2-7-bc282e43b26fbf17.yaml delete mode 100644 releasenotes/notes/drop-python-3-6-and-3-7-f37b9dc6d94620de.yaml delete mode 100644 releasenotes/notes/enable-mutable-configuration-2dd6b7a0e0fe4437.yaml delete mode 100644 releasenotes/notes/engine-opt-258ff1ae9b04d628.yaml delete mode 100644 releasenotes/notes/enhance-bfv-12bac06c4438675f.yaml delete mode 100644 releasenotes/notes/event_log_for_hdp-a114511c477ef16d.yaml delete mode 100644 releasenotes/notes/fix-install-provision-events-c1bd2e05bf2be6bd.yaml delete mode 100644 releasenotes/notes/fixing-policy-inconsistencies-984020000cc3882a.yaml delete mode 100644 releasenotes/notes/force-delete-apiv2-e372392bbc8639f8.yaml delete mode 100644 releasenotes/notes/force-delete-changes-2e0881a99742c339.yaml delete mode 100644 releasenotes/notes/hadoop-swift-domain-fix-c1dfdf6c52b5aa25.yaml delete mode 100644 releasenotes/notes/hadoop-swift-jar-for-ambari-4439913b01d42468.yaml delete mode 100644 releasenotes/notes/hdfs-dfs-94a9c4f64cf8994f.yaml delete mode 100644 releasenotes/notes/hdp-removed-from-defaults-31d1e1f15973b682.yaml delete mode 100644 releasenotes/notes/hdp25-b35ef99c240fc127.yaml delete mode 100644 releasenotes/notes/hdp26-5a406d7066706bf1.yaml delete mode 100644 releasenotes/notes/healthcheck-02e429a3ffcd9482.yaml delete mode 100644 releasenotes/notes/honor-endpoint-type-neutron-4583128c383d9745.yaml delete mode 100644 releasenotes/notes/ironic-support-79e7ecad05f54029.yaml delete mode 100644 releasenotes/notes/kerberos-76dd297462b7337c.yaml delete mode 100644 releasenotes/notes/key_manager_integration-e32d141809c8cc46.yaml delete mode 100644 releasenotes/notes/keypair-replacement-0c0cc3db0551c112.yaml delete mode 100644 releasenotes/notes/keystoneclient-to-keystonauth-migration-c75988975ad1a506.yaml delete mode 100644 releasenotes/notes/mapr-health-check-2eba3d742a2b853f.yaml delete mode 100644 releasenotes/notes/mapr-labels-5cc318616db59403.yaml delete mode 100644 releasenotes/notes/mapr-remove-spark-standalone-293ca864de9a7848.yaml delete mode 100644 releasenotes/notes/mapr-services-new-versions-b32c2e8fe07d1600.yaml delete mode 100644 releasenotes/notes/mapr-services-new-versions-dc7652e33f26bbdc.yaml delete mode 100644 releasenotes/notes/mapr5.2.0-image-gen-c850e74977b00abe.yaml delete mode 100644 releasenotes/notes/neutron-default-a6baf93d857d86b3.yaml delete mode 100644 releasenotes/notes/nova-network-removal-debe306fd7c61268.yaml delete mode 100644 releasenotes/notes/novaclient_images_to_glanceclient-0266a2bd92b4be05.yaml delete mode 100644 releasenotes/notes/ntp-config-51ed9d612132e2fa.yaml delete mode 100644 releasenotes/notes/optional-project-id-apiv1-2e89756f6f16bd5e.yaml delete mode 100644 releasenotes/notes/options-to-oslo_messaging_notifications-cee206fc4f74c217.yaml delete mode 100644 releasenotes/notes/plugins-split-from-sahara-core-9ffc5e5d06c9239c.yaml delete mode 100644 releasenotes/notes/policy_in_code-5847902775ff9861.yaml delete mode 100644 releasenotes/notes/proxy-user-lowercase-f116f7b7e89274cb.yaml delete mode 100644 releasenotes/notes/rack_awareness_for_cdh-e0cd5d4ab46aa1b5.yaml delete mode 100644 releasenotes/notes/rack_awareness_for_hdp-6e3d44468cc141a5.yaml delete mode 100644 releasenotes/notes/refactor-floating-ips-logic-9d37d9297f3621b3.yaml delete mode 100644 releasenotes/notes/remove-cdh_5.0_5.3_5.4-b5f140e9b0233c07.yaml delete mode 100644 releasenotes/notes/remove-hard-coded-oozie-password-b97475c8772aa1bd.yaml delete mode 100644 releasenotes/notes/remove-hardcoded-password-from-hive-eb923b518974e853.yaml delete mode 100644 releasenotes/notes/remove-hdp-137d0ad3d2389b7a.yaml delete mode 100644 releasenotes/notes/remove-mapr-500-3df3041be99a864c.yaml delete mode 100644 releasenotes/notes/remove-spark-100-44f3d5efc3806410.yaml delete mode 100644 releasenotes/notes/remove-upload-oozie-sharelib-step-in-vanilla-2.8.2-546b2026e2f5d557.yaml delete mode 100644 releasenotes/notes/remove-use-neutron-2499b661dce041d4.yaml delete mode 100644 releasenotes/notes/remove_custom_auth_domainname-984fd2d931e306cc.yaml delete mode 100644 releasenotes/notes/remove_enable_notifications_opt-4c0d46e8e79eb06f.yaml delete mode 100644 releasenotes/notes/s3-datasource-protocol-d3abd0b22f653b3b.yaml delete mode 100644 releasenotes/notes/sahara-cfg-location-change-7b61454311b16ce8.yaml delete mode 100644 releasenotes/notes/sahara-endpoint-version-discovery-826e9f31093cb10f.yaml delete mode 100644 releasenotes/notes/some-polish-api-v2-2d2e390a74b088f9.yaml delete mode 100644 releasenotes/notes/spark-2.2-d7c3a84bd52f735a.yaml delete mode 100644 releasenotes/notes/spark-2.3-0277fe9feae6668a.yaml delete mode 100644 releasenotes/notes/storm-1.2-af75fedb413de56a.yaml delete mode 100644 releasenotes/notes/strict-validation-query-string-a6cadbf2f9c57d06.yaml delete mode 100644 releasenotes/notes/substring-matching-1d5981b8e5b1d919.yaml delete mode 100644 releasenotes/notes/support-s3-data-source-a912e2cdf4cd51fb.yaml delete mode 100644 releasenotes/notes/support-s3-job-binary-6d91267ae11d09d3.yaml delete mode 100644 releasenotes/notes/transport_url-5bbbf0bb54d81727.yaml delete mode 100644 releasenotes/notes/trustee-conf-section-5994dcd48a9744d7.yaml delete mode 100644 releasenotes/notes/updating-plugins-versions-b8d27764178c3cdd.yaml delete mode 100644 releasenotes/notes/vanilla-2.7.5-support-ffeeb88fc4be34b4.yaml delete mode 100644 releasenotes/notes/vanilla-2.8.2-support-84c89aad31105584.yaml delete mode 100644 releasenotes/notes/zookeeper-configuration-steps-48c3d9706c86f227.yaml delete mode 100644 releasenotes/source/2023.1.rst delete mode 100644 releasenotes/source/2023.2.rst delete mode 100644 releasenotes/source/_static/.placeholder delete mode 100644 releasenotes/source/_templates/.placeholder delete mode 100644 releasenotes/source/conf.py delete mode 100644 releasenotes/source/index.rst delete mode 100644 releasenotes/source/liberty.rst delete mode 100644 releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po delete mode 100644 releasenotes/source/locale/fr/LC_MESSAGES/releasenotes.po delete mode 100644 releasenotes/source/locale/it/LC_MESSAGES/releasenotes.po delete mode 100644 releasenotes/source/mitaka.rst delete mode 100644 releasenotes/source/newton.rst delete mode 100644 releasenotes/source/ocata.rst delete mode 100644 releasenotes/source/pike.rst delete mode 100644 releasenotes/source/queens.rst delete mode 100644 releasenotes/source/rocky.rst delete mode 100644 releasenotes/source/stein.rst delete mode 100644 releasenotes/source/train.rst delete mode 100644 releasenotes/source/unreleased.rst delete mode 100644 releasenotes/source/ussuri.rst delete mode 100644 releasenotes/source/victoria.rst delete mode 100644 releasenotes/source/wallaby.rst delete mode 100644 releasenotes/source/xena.rst delete mode 100644 releasenotes/source/yoga.rst delete mode 100644 releasenotes/source/zed.rst delete mode 100644 requirements.txt delete mode 100644 roles/build-sahara-images-cli/README.rst delete mode 100644 roles/build-sahara-images-cli/defaults/main.yaml delete mode 100644 roles/build-sahara-images-cli/tasks/main.yaml delete mode 100644 sahara/__init__.py delete mode 100644 sahara/api/__init__.py delete mode 100644 sahara/api/acl.py delete mode 100644 sahara/api/base.py delete mode 100644 sahara/api/microversion.py delete mode 100644 sahara/api/middleware/__init__.py delete mode 100644 sahara/api/middleware/auth_valid.py delete mode 100644 sahara/api/middleware/sahara_middleware.py delete mode 100644 sahara/api/middleware/version_discovery.py delete mode 100644 sahara/api/v10.py delete mode 100644 sahara/api/v11.py delete mode 100644 sahara/api/v2/__init__.py delete mode 100644 sahara/api/v2/cluster_templates.py delete mode 100644 sahara/api/v2/clusters.py delete mode 100644 sahara/api/v2/data_sources.py delete mode 100644 sahara/api/v2/images.py delete mode 100644 sahara/api/v2/job_binaries.py delete mode 100644 sahara/api/v2/job_templates.py delete mode 100644 sahara/api/v2/job_types.py delete mode 100644 sahara/api/v2/jobs.py delete mode 100644 sahara/api/v2/node_group_templates.py delete mode 100644 sahara/api/v2/plugins.py delete mode 100644 sahara/cli/__init__.py delete mode 100644 sahara/cli/image_pack/__init__.py delete mode 100644 sahara/cli/image_pack/api.py delete mode 100644 sahara/cli/image_pack/cli.py delete mode 100644 sahara/cli/sahara_all.py delete mode 100644 sahara/cli/sahara_api.py delete mode 100644 sahara/cli/sahara_engine.py delete mode 100644 sahara/cli/sahara_status.py delete mode 100644 sahara/cli/sahara_subprocess.py delete mode 100644 sahara/common/__init__.py delete mode 100644 sahara/common/config.py delete mode 100644 sahara/common/policies/__init__.py delete mode 100644 sahara/common/policies/base.py delete mode 100644 sahara/common/policies/cluster.py delete mode 100644 sahara/common/policies/cluster_template.py delete mode 100644 sahara/common/policies/cluster_templates.py delete mode 100644 sahara/common/policies/clusters.py delete mode 100644 sahara/common/policies/data_source.py delete mode 100644 sahara/common/policies/data_sources.py delete mode 100644 sahara/common/policies/image.py delete mode 100644 sahara/common/policies/images.py delete mode 100644 sahara/common/policies/job.py delete mode 100644 sahara/common/policies/job_binaries.py delete mode 100644 sahara/common/policies/job_binary.py delete mode 100644 sahara/common/policies/job_binary_internals.py delete mode 100644 sahara/common/policies/job_executions.py delete mode 100644 sahara/common/policies/job_template.py delete mode 100644 sahara/common/policies/job_type.py delete mode 100644 sahara/common/policies/job_types.py delete mode 100644 sahara/common/policies/jobs.py delete mode 100644 sahara/common/policies/node_group_template.py delete mode 100644 sahara/common/policies/node_group_templates.py delete mode 100644 sahara/common/policies/plugin.py delete mode 100644 sahara/common/policies/plugins.py delete mode 100644 sahara/conductor/__init__.py delete mode 100644 sahara/conductor/api.py delete mode 100644 sahara/conductor/manager.py delete mode 100644 sahara/conductor/objects.py delete mode 100644 sahara/conductor/resource.py delete mode 100644 sahara/config.py delete mode 100644 sahara/context.py delete mode 100644 sahara/db/__init__.py delete mode 100644 sahara/db/api.py delete mode 100644 sahara/db/base.py delete mode 100644 sahara/db/migration/__init__.py delete mode 100644 sahara/db/migration/alembic.ini delete mode 100644 sahara/db/migration/alembic_migrations/README.md delete mode 100644 sahara/db/migration/alembic_migrations/env.py delete mode 100644 sahara/db/migration/alembic_migrations/script.py.mako delete mode 100644 sahara/db/migration/alembic_migrations/versions/001_icehouse.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/002_placeholder.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/003_placeholder.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/004_placeholder.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/005_placeholder.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/006_placeholder.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/007_increase_status_description_size.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/008_security_groups.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/009_rollback_info.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/010_auto_security_groups.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/011_sahara_info.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/012_availability_zone.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/013_volumes_availability_zone.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/014_add_volume_type.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/015_add_events_objects.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/016_is_proxy_gateway.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/017_drop_progress.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/018_volume_local_to_instance.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/019_is_default_for_templates.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/020_remove_redandunt_progress_ops.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/021_datasource_placeholders.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/022_add_job_interface.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/023_add_use_autoconfig.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/024_manila_shares.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/025_increase_ip_column_size.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/026_add_is_public_is_protected.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/027_rename_oozie_job_id.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/028_storage_devices_number.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/029_set_is_protected_on_is_default.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/030-health-check.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/031_added_plugins_table.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/032_add_domain_name.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/033_add_anti_affinity_ratio_field_to_cluster.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/034_boot_from_volume.py delete mode 100644 sahara/db/migration/alembic_migrations/versions/035_boot_from_volume_enhancements.py delete mode 100644 sahara/db/migration/cli.py delete mode 100644 sahara/db/sqlalchemy/__init__.py delete mode 100644 sahara/db/sqlalchemy/api.py delete mode 100644 sahara/db/sqlalchemy/model_base.py delete mode 100644 sahara/db/sqlalchemy/models.py delete mode 100644 sahara/db/sqlalchemy/types.py delete mode 100644 sahara/db/templates/README.rst delete mode 100644 sahara/db/templates/__init__.py delete mode 100644 sahara/db/templates/api.py delete mode 100644 sahara/db/templates/cli.py delete mode 100644 sahara/db/templates/utils.py delete mode 100644 sahara/exceptions.py delete mode 100644 sahara/i18n.py delete mode 100644 sahara/locale/de/LC_MESSAGES/sahara.po delete mode 100644 sahara/main.py delete mode 100644 sahara/plugins/__init__.py delete mode 100644 sahara/plugins/base.py delete mode 100644 sahara/plugins/castellan_utils.py delete mode 100644 sahara/plugins/conductor.py delete mode 100644 sahara/plugins/context.py delete mode 100644 sahara/plugins/db.py delete mode 100644 sahara/plugins/default_templates/ambari/v2_3/cluster.json delete mode 100644 sahara/plugins/default_templates/ambari/v2_3/master-edp.json delete mode 100644 sahara/plugins/default_templates/ambari/v2_3/master.json delete mode 100644 sahara/plugins/default_templates/ambari/v2_3/worker.json delete mode 100644 sahara/plugins/default_templates/ambari/v2_4/cluster.json delete mode 100644 sahara/plugins/default_templates/ambari/v2_4/master-edp.json delete mode 100644 sahara/plugins/default_templates/ambari/v2_4/master.json delete mode 100644 sahara/plugins/default_templates/ambari/v2_4/worker.json delete mode 100644 sahara/plugins/default_templates/ambari/v2_5/cluster.json delete mode 100644 sahara/plugins/default_templates/ambari/v2_5/master-edp.json delete mode 100644 sahara/plugins/default_templates/ambari/v2_5/master.json delete mode 100644 sahara/plugins/default_templates/ambari/v2_5/worker.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_5_0/cluster.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_5_0/manager.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_5_0/master-additional.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_5_0/master-core.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_5_0/worker-nm-dn.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_7_0/cluster.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_7_0/manager.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_7_0/master-additional.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_7_0/master-core.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_7_0/worker-nm-dn.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_9_0/cluster.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_9_0/manager.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_9_0/master-additional.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_9_0/master-core.json delete mode 100644 sahara/plugins/default_templates/cdh/v5_9_0/worker-nm-dn.json delete mode 100644 sahara/plugins/default_templates/mapr/5_0_0_mrv2/cluster.json delete mode 100644 sahara/plugins/default_templates/mapr/5_0_0_mrv2/master.json delete mode 100644 sahara/plugins/default_templates/mapr/5_0_0_mrv2/worker.json delete mode 100644 sahara/plugins/default_templates/mapr/v5_1_0_mrv2/cluster.json delete mode 100644 sahara/plugins/default_templates/mapr/v5_1_0_mrv2/master.json delete mode 100644 sahara/plugins/default_templates/mapr/v5_1_0_mrv2/worker.json delete mode 100644 sahara/plugins/default_templates/mapr/v5_2_0_mrv2/cluster.json delete mode 100644 sahara/plugins/default_templates/mapr/v5_2_0_mrv2/master.json delete mode 100644 sahara/plugins/default_templates/mapr/v5_2_0_mrv2/worker.json delete mode 100644 sahara/plugins/default_templates/spark/v1_3_1/cluster.json delete mode 100644 sahara/plugins/default_templates/spark/v1_3_1/master.json delete mode 100644 sahara/plugins/default_templates/spark/v1_3_1/slave.json delete mode 100644 sahara/plugins/default_templates/spark/v1_6_0/cluster.json delete mode 100644 sahara/plugins/default_templates/spark/v1_6_0/master.json delete mode 100644 sahara/plugins/default_templates/spark/v1_6_0/slave.json delete mode 100644 sahara/plugins/default_templates/spark/v2_1_0/cluster.json delete mode 100644 sahara/plugins/default_templates/spark/v2_1_0/master.json delete mode 100644 sahara/plugins/default_templates/spark/v2_1_0/slave.json delete mode 100644 sahara/plugins/default_templates/storm/v1_0_1/cluster.json delete mode 100644 sahara/plugins/default_templates/storm/v1_0_1/master.json delete mode 100644 sahara/plugins/default_templates/storm/v1_0_1/slave.json delete mode 100644 sahara/plugins/default_templates/storm/v1_1_0/cluster.json delete mode 100644 sahara/plugins/default_templates/storm/v1_1_0/master.json delete mode 100644 sahara/plugins/default_templates/storm/v1_1_0/slave.json delete mode 100644 sahara/plugins/default_templates/template.conf delete mode 100644 sahara/plugins/default_templates/vanilla/v2_7_1/cluster.json delete mode 100644 sahara/plugins/default_templates/vanilla/v2_7_1/master.json delete mode 100644 sahara/plugins/default_templates/vanilla/v2_7_1/worker.json delete mode 100644 sahara/plugins/edp.py delete mode 100644 sahara/plugins/exceptions.py delete mode 100644 sahara/plugins/fake/__init__.py delete mode 100644 sahara/plugins/fake/edp_engine.py delete mode 100644 sahara/plugins/fake/plugin.py delete mode 100644 sahara/plugins/health_check_base.py delete mode 100644 sahara/plugins/images.py delete mode 100644 sahara/plugins/kerberos.py delete mode 100644 sahara/plugins/labels.py delete mode 100644 sahara/plugins/main.py delete mode 100644 sahara/plugins/objects.py delete mode 100644 sahara/plugins/opts.py delete mode 100644 sahara/plugins/provisioning.py delete mode 100644 sahara/plugins/recommendations_utils.py delete mode 100644 sahara/plugins/resource.py delete mode 100644 sahara/plugins/resources/create-principal-keytab delete mode 100644 sahara/plugins/resources/cron-file delete mode 100644 sahara/plugins/resources/cron-script delete mode 100644 sahara/plugins/resources/kdc_conf delete mode 100644 sahara/plugins/resources/kdc_conf_redhat delete mode 100644 sahara/plugins/resources/krb-client-init.sh.template delete mode 100644 sahara/plugins/resources/krb5_config delete mode 100644 sahara/plugins/resources/mit-kdc-server-init.sh.template delete mode 100644 sahara/plugins/service_api.py delete mode 100644 sahara/plugins/swift_helper.py delete mode 100644 sahara/plugins/swift_utils.py delete mode 100644 sahara/plugins/testutils.py delete mode 100644 sahara/plugins/topology_helper.py delete mode 100644 sahara/plugins/utils.py delete mode 100644 sahara/service/__init__.py delete mode 100644 sahara/service/api/__init__.py delete mode 100644 sahara/service/api/v10.py delete mode 100644 sahara/service/api/v11.py delete mode 100644 sahara/service/api/v2/__init__.py delete mode 100644 sahara/service/api/v2/cluster_templates.py delete mode 100644 sahara/service/api/v2/clusters.py delete mode 100644 sahara/service/api/v2/data_sources.py delete mode 100644 sahara/service/api/v2/images.py delete mode 100644 sahara/service/api/v2/job_binaries.py delete mode 100644 sahara/service/api/v2/job_templates.py delete mode 100644 sahara/service/api/v2/job_types.py delete mode 100644 sahara/service/api/v2/jobs.py delete mode 100644 sahara/service/api/v2/node_group_templates.py delete mode 100644 sahara/service/api/v2/plugins.py delete mode 100644 sahara/service/castellan/__init__.py delete mode 100644 sahara/service/castellan/config.py delete mode 100644 sahara/service/castellan/sahara_key_manager.py delete mode 100644 sahara/service/castellan/utils.py delete mode 100644 sahara/service/coordinator.py delete mode 100644 sahara/service/edp/__init__.py delete mode 100644 sahara/service/edp/base_engine.py delete mode 100644 sahara/service/edp/binary_retrievers/__init__.py delete mode 100644 sahara/service/edp/binary_retrievers/dispatch.py delete mode 100644 sahara/service/edp/binary_retrievers/internal_swift.py delete mode 100644 sahara/service/edp/binary_retrievers/manila_share.py delete mode 100644 sahara/service/edp/binary_retrievers/s3_storage.py delete mode 100644 sahara/service/edp/binary_retrievers/sahara_db.py delete mode 100644 sahara/service/edp/data_sources/__init__.py delete mode 100644 sahara/service/edp/data_sources/base.py delete mode 100644 sahara/service/edp/data_sources/hdfs/__init__.py delete mode 100644 sahara/service/edp/data_sources/hdfs/implementation.py delete mode 100644 sahara/service/edp/data_sources/manager.py delete mode 100644 sahara/service/edp/data_sources/manila/__init__.py delete mode 100644 sahara/service/edp/data_sources/manila/implementation.py delete mode 100644 sahara/service/edp/data_sources/maprfs/__init__.py delete mode 100644 sahara/service/edp/data_sources/maprfs/implementation.py delete mode 100644 sahara/service/edp/data_sources/opts.py delete mode 100644 sahara/service/edp/data_sources/s3/__init__.py delete mode 100644 sahara/service/edp/data_sources/s3/implementation.py delete mode 100644 sahara/service/edp/data_sources/swift/__init__.py delete mode 100644 sahara/service/edp/data_sources/swift/implementation.py delete mode 100644 sahara/service/edp/hdfs_helper.py delete mode 100644 sahara/service/edp/job_binaries/__init__.py delete mode 100644 sahara/service/edp/job_binaries/base.py delete mode 100644 sahara/service/edp/job_binaries/internal_db/__init__.py delete mode 100644 sahara/service/edp/job_binaries/internal_db/implementation.py delete mode 100644 sahara/service/edp/job_binaries/manager.py delete mode 100644 sahara/service/edp/job_binaries/manila/__init__.py delete mode 100644 sahara/service/edp/job_binaries/manila/implementation.py delete mode 100644 sahara/service/edp/job_binaries/opts.py delete mode 100644 sahara/service/edp/job_binaries/s3/__init__.py delete mode 100644 sahara/service/edp/job_binaries/s3/implementation.py delete mode 100644 sahara/service/edp/job_binaries/swift/__init__.py delete mode 100644 sahara/service/edp/job_binaries/swift/implementation.py delete mode 100644 sahara/service/edp/job_manager.py delete mode 100644 sahara/service/edp/job_utils.py delete mode 100644 sahara/service/edp/oozie/__init__.py delete mode 100644 sahara/service/edp/oozie/engine.py delete mode 100644 sahara/service/edp/oozie/oozie.py delete mode 100644 sahara/service/edp/oozie/workflow_creator/__init__.py delete mode 100644 sahara/service/edp/oozie/workflow_creator/base_workflow.py delete mode 100644 sahara/service/edp/oozie/workflow_creator/hive_workflow.py delete mode 100644 sahara/service/edp/oozie/workflow_creator/java_workflow.py delete mode 100644 sahara/service/edp/oozie/workflow_creator/mapreduce_workflow.py delete mode 100644 sahara/service/edp/oozie/workflow_creator/pig_workflow.py delete mode 100644 sahara/service/edp/oozie/workflow_creator/shell_workflow.py delete mode 100644 sahara/service/edp/oozie/workflow_creator/workflow_factory.py delete mode 100644 sahara/service/edp/resources/edp-main-wrapper.jar delete mode 100644 sahara/service/edp/resources/edp-spark-wrapper.jar delete mode 100644 sahara/service/edp/resources/hive-default.xml delete mode 100644 sahara/service/edp/resources/launch_command.py delete mode 100644 sahara/service/edp/resources/mapred-default.xml delete mode 100644 sahara/service/edp/resources/mapred-job-config.xml delete mode 100644 sahara/service/edp/resources/workflow.xml delete mode 100644 sahara/service/edp/s3_common.py delete mode 100644 sahara/service/edp/shares.py delete mode 100644 sahara/service/edp/spark/__init__.py delete mode 100644 sahara/service/edp/spark/engine.py delete mode 100644 sahara/service/edp/storm/__init__.py delete mode 100644 sahara/service/edp/storm/engine.py delete mode 100644 sahara/service/edp/utils/__init__.py delete mode 100644 sahara/service/edp/utils/shares.py delete mode 100644 sahara/service/engine.py delete mode 100644 sahara/service/health/__init__.py delete mode 100644 sahara/service/health/common.py delete mode 100644 sahara/service/health/verification_base.py delete mode 100644 sahara/service/heat/__init__.py delete mode 100644 sahara/service/heat/commons.py delete mode 100644 sahara/service/heat/heat_engine.py delete mode 100644 sahara/service/heat/templates.py delete mode 100644 sahara/service/networks.py delete mode 100644 sahara/service/ntp_service.py delete mode 100644 sahara/service/ops.py delete mode 100644 sahara/service/periodic.py delete mode 100644 sahara/service/quotas.py delete mode 100644 sahara/service/sessions.py delete mode 100644 sahara/service/trusts.py delete mode 100644 sahara/service/validation.py delete mode 100644 sahara/service/validations/__init__.py delete mode 100644 sahara/service/validations/acl.py delete mode 100644 sahara/service/validations/base.py delete mode 100644 sahara/service/validations/cluster_template_schema.py delete mode 100644 sahara/service/validations/cluster_templates.py delete mode 100644 sahara/service/validations/clusters.py delete mode 100644 sahara/service/validations/clusters_scaling.py delete mode 100644 sahara/service/validations/clusters_schema.py delete mode 100644 sahara/service/validations/edp/__init__.py delete mode 100644 sahara/service/validations/edp/base.py delete mode 100644 sahara/service/validations/edp/data_source.py delete mode 100644 sahara/service/validations/edp/data_source_schema.py delete mode 100644 sahara/service/validations/edp/job.py delete mode 100644 sahara/service/validations/edp/job_binary.py delete mode 100644 sahara/service/validations/edp/job_binary_internal.py delete mode 100644 sahara/service/validations/edp/job_binary_internal_schema.py delete mode 100644 sahara/service/validations/edp/job_binary_schema.py delete mode 100644 sahara/service/validations/edp/job_execution.py delete mode 100644 sahara/service/validations/edp/job_execution_schema.py delete mode 100644 sahara/service/validations/edp/job_interface.py delete mode 100644 sahara/service/validations/edp/job_schema.py delete mode 100644 sahara/service/validations/images.py delete mode 100644 sahara/service/validations/node_group_template_schema.py delete mode 100644 sahara/service/validations/node_group_templates.py delete mode 100644 sahara/service/validations/plugins.py delete mode 100644 sahara/service/validations/shares.py delete mode 100644 sahara/service/volumes.py delete mode 100644 sahara/swift/__init__.py delete mode 100644 sahara/swift/resources/conf-template.xml delete mode 100644 sahara/swift/swift_helper.py delete mode 100644 sahara/swift/utils.py delete mode 100644 sahara/tests/README.rst delete mode 100644 sahara/tests/__init__.py delete mode 100644 sahara/tests/unit/__init__.py delete mode 100644 sahara/tests/unit/api/__init__.py delete mode 100644 sahara/tests/unit/api/middleware/__init__.py delete mode 100644 sahara/tests/unit/api/middleware/test_auth_valid.py delete mode 100644 sahara/tests/unit/api/test_acl.py delete mode 100644 sahara/tests/unit/base.py delete mode 100644 sahara/tests/unit/cli/__init__.py delete mode 100644 sahara/tests/unit/cli/image_pack/__init__.py delete mode 100644 sahara/tests/unit/cli/image_pack/test_image_pack_api.py delete mode 100644 sahara/tests/unit/cli/test_sahara_cli.py delete mode 100644 sahara/tests/unit/cli/test_sahara_status.py delete mode 100644 sahara/tests/unit/conductor/__init__.py delete mode 100644 sahara/tests/unit/conductor/base.py delete mode 100644 sahara/tests/unit/conductor/manager/__init__.py delete mode 100644 sahara/tests/unit/conductor/manager/test_clusters.py delete mode 100644 sahara/tests/unit/conductor/manager/test_defaults.py delete mode 100644 sahara/tests/unit/conductor/manager/test_edp.py delete mode 100644 sahara/tests/unit/conductor/manager/test_edp_interface.py delete mode 100644 sahara/tests/unit/conductor/manager/test_from_template.py delete mode 100644 sahara/tests/unit/conductor/manager/test_templates.py delete mode 100644 sahara/tests/unit/conductor/test_api.py delete mode 100644 sahara/tests/unit/conductor/test_resource.py delete mode 100644 sahara/tests/unit/db/__init__.py delete mode 100644 sahara/tests/unit/db/migration/__init__.py delete mode 100644 sahara/tests/unit/db/migration/test_db_manage_cli.py delete mode 100644 sahara/tests/unit/db/migration/test_migrations.py delete mode 100644 sahara/tests/unit/db/migration/test_migrations_base.py delete mode 100644 sahara/tests/unit/db/sqlalchemy/__init__.py delete mode 100644 sahara/tests/unit/db/sqlalchemy/test_types.py delete mode 100644 sahara/tests/unit/db/templates/__init__.py delete mode 100644 sahara/tests/unit/db/templates/common.py delete mode 100644 sahara/tests/unit/db/templates/test_delete.py delete mode 100644 sahara/tests/unit/db/templates/test_update.py delete mode 100644 sahara/tests/unit/db/templates/test_utils.py delete mode 100644 sahara/tests/unit/db/test_utils.py delete mode 100644 sahara/tests/unit/plugins/__init__.py delete mode 100644 sahara/tests/unit/plugins/test_base_plugins_support.py delete mode 100644 sahara/tests/unit/plugins/test_images.py delete mode 100644 sahara/tests/unit/plugins/test_kerberos.py delete mode 100644 sahara/tests/unit/plugins/test_labels.py delete mode 100644 sahara/tests/unit/plugins/test_provide_recommendations.py delete mode 100644 sahara/tests/unit/plugins/test_provisioning.py delete mode 100644 sahara/tests/unit/plugins/test_utils.py delete mode 100644 sahara/tests/unit/resources/dfs_admin_0_nodes.txt delete mode 100644 sahara/tests/unit/resources/dfs_admin_1_nodes.txt delete mode 100644 sahara/tests/unit/resources/dfs_admin_3_nodes.txt delete mode 100644 sahara/tests/unit/resources/test-default.xml delete mode 100644 sahara/tests/unit/service/__init__.py delete mode 100644 sahara/tests/unit/service/api/__init__.py delete mode 100644 sahara/tests/unit/service/api/test_v10.py delete mode 100644 sahara/tests/unit/service/api/v2/__init__.py delete mode 100644 sahara/tests/unit/service/api/v2/base.py delete mode 100644 sahara/tests/unit/service/api/v2/test_clusters.py delete mode 100644 sahara/tests/unit/service/api/v2/test_images.py delete mode 100644 sahara/tests/unit/service/api/v2/test_plugins.py delete mode 100644 sahara/tests/unit/service/castellan/__init__.py delete mode 100644 sahara/tests/unit/service/castellan/test_sahara_key_manager.py delete mode 100644 sahara/tests/unit/service/edp/__init__.py delete mode 100644 sahara/tests/unit/service/edp/binary_retrievers/__init__.py delete mode 100644 sahara/tests/unit/service/edp/binary_retrievers/test_dispatch.py delete mode 100644 sahara/tests/unit/service/edp/binary_retrievers/test_internal_swift.py delete mode 100644 sahara/tests/unit/service/edp/binary_retrievers/test_manila.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/__init__.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/base_test.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/data_source_manager_support_test.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/hdfs/__init__.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/hdfs/test_hdfs_type.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/manila/__init__.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/manila/test_manila_type.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/maprfs/__init__.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/maprfs/test_maprfs_type_validation.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/s3/__init__.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/s3/test_s3_type.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/swift/__init__.py delete mode 100644 sahara/tests/unit/service/edp/data_sources/swift/test_swift_type.py delete mode 100644 sahara/tests/unit/service/edp/edp_test_utils.py delete mode 100644 sahara/tests/unit/service/edp/job_binaries/__init__.py delete mode 100644 sahara/tests/unit/service/edp/job_binaries/internal_db/__init__.py delete mode 100644 sahara/tests/unit/service/edp/job_binaries/internal_db/test_internal_db_type.py delete mode 100644 sahara/tests/unit/service/edp/job_binaries/job_binary_manager_support.py delete mode 100644 sahara/tests/unit/service/edp/job_binaries/manila/__init__.py delete mode 100644 sahara/tests/unit/service/edp/job_binaries/manila/test_manila_type.py delete mode 100644 sahara/tests/unit/service/edp/job_binaries/s3/__init__.py delete mode 100644 sahara/tests/unit/service/edp/job_binaries/s3/test_s3_type.py delete mode 100644 sahara/tests/unit/service/edp/job_binaries/swift/__init__.py delete mode 100644 sahara/tests/unit/service/edp/job_binaries/swift/test_swift_type.py delete mode 100644 sahara/tests/unit/service/edp/job_binaries/test_base.py delete mode 100644 sahara/tests/unit/service/edp/oozie/__init__.py delete mode 100644 sahara/tests/unit/service/edp/oozie/test_oozie.py delete mode 100644 sahara/tests/unit/service/edp/spark/__init__.py delete mode 100644 sahara/tests/unit/service/edp/spark/base.py delete mode 100644 sahara/tests/unit/service/edp/storm/__init__.py delete mode 100644 sahara/tests/unit/service/edp/storm/test_storm.py delete mode 100644 sahara/tests/unit/service/edp/test_hdfs_helper.py delete mode 100644 sahara/tests/unit/service/edp/test_job_manager.py delete mode 100644 sahara/tests/unit/service/edp/test_job_possible_configs.py delete mode 100644 sahara/tests/unit/service/edp/test_job_utils.py delete mode 100644 sahara/tests/unit/service/edp/test_json_api_examples.py delete mode 100644 sahara/tests/unit/service/edp/test_s3_common.py delete mode 100644 sahara/tests/unit/service/edp/utils/test_shares.py delete mode 100644 sahara/tests/unit/service/edp/workflow_creator/__init__.py delete mode 100644 sahara/tests/unit/service/edp/workflow_creator/test_create_workflow.py delete mode 100644 sahara/tests/unit/service/health/__init__.py delete mode 100644 sahara/tests/unit/service/health/test_verification_base.py delete mode 100644 sahara/tests/unit/service/heat/__init__.py delete mode 100644 sahara/tests/unit/service/heat/test_templates.py delete mode 100644 sahara/tests/unit/service/test_coordinator.py delete mode 100644 sahara/tests/unit/service/test_engine.py delete mode 100644 sahara/tests/unit/service/test_networks.py delete mode 100644 sahara/tests/unit/service/test_ntp_service.py delete mode 100644 sahara/tests/unit/service/test_ops.py delete mode 100644 sahara/tests/unit/service/test_periodic.py delete mode 100644 sahara/tests/unit/service/test_quotas.py delete mode 100644 sahara/tests/unit/service/test_sessions.py delete mode 100644 sahara/tests/unit/service/test_trusts.py delete mode 100644 sahara/tests/unit/service/test_volumes.py delete mode 100644 sahara/tests/unit/service/validation/__init__.py delete mode 100644 sahara/tests/unit/service/validation/edp/__init__.py delete mode 100644 sahara/tests/unit/service/validation/edp/test_data_source.py delete mode 100644 sahara/tests/unit/service/validation/edp/test_job.py delete mode 100644 sahara/tests/unit/service/validation/edp/test_job_binary.py delete mode 100644 sahara/tests/unit/service/validation/edp/test_job_binary_internal.py delete mode 100644 sahara/tests/unit/service/validation/edp/test_job_executor.py delete mode 100644 sahara/tests/unit/service/validation/edp/test_job_interface.py delete mode 100644 sahara/tests/unit/service/validation/test_add_tags_validation.py delete mode 100644 sahara/tests/unit/service/validation/test_cluster_create_validation.py delete mode 100644 sahara/tests/unit/service/validation/test_cluster_delete_validation.py delete mode 100644 sahara/tests/unit/service/validation/test_cluster_scaling_validation.py delete mode 100644 sahara/tests/unit/service/validation/test_cluster_template_create_validation.py delete mode 100644 sahara/tests/unit/service/validation/test_cluster_template_update_validation.py delete mode 100644 sahara/tests/unit/service/validation/test_cluster_update_validation.py delete mode 100644 sahara/tests/unit/service/validation/test_ng_template_validation_create.py delete mode 100644 sahara/tests/unit/service/validation/test_ng_template_validation_update.py delete mode 100644 sahara/tests/unit/service/validation/test_protected_validation.py delete mode 100644 sahara/tests/unit/service/validation/test_share_validations.py delete mode 100644 sahara/tests/unit/service/validation/test_validation.py delete mode 100644 sahara/tests/unit/service/validation/utils.py delete mode 100644 sahara/tests/unit/swift/__init__.py delete mode 100644 sahara/tests/unit/swift/test_swift_helper.py delete mode 100644 sahara/tests/unit/swift/test_utils.py delete mode 100644 sahara/tests/unit/test_context.py delete mode 100644 sahara/tests/unit/test_exceptions.py delete mode 100644 sahara/tests/unit/test_main.py delete mode 100644 sahara/tests/unit/testutils.py delete mode 100644 sahara/tests/unit/topology/__init__.py delete mode 100644 sahara/tests/unit/topology/test_topology.py delete mode 100644 sahara/tests/unit/utils/__init__.py delete mode 100644 sahara/tests/unit/utils/notification/__init__.py delete mode 100644 sahara/tests/unit/utils/notification/test_sender.py delete mode 100644 sahara/tests/unit/utils/openstack/__init__.py delete mode 100644 sahara/tests/unit/utils/openstack/test_base.py delete mode 100644 sahara/tests/unit/utils/openstack/test_heat.py delete mode 100644 sahara/tests/unit/utils/openstack/test_images.py delete mode 100644 sahara/tests/unit/utils/openstack/test_swift.py delete mode 100644 sahara/tests/unit/utils/test_api.py delete mode 100644 sahara/tests/unit/utils/test_api_validator.py delete mode 100644 sahara/tests/unit/utils/test_cinder.py delete mode 100644 sahara/tests/unit/utils/test_cluster.py delete mode 100644 sahara/tests/unit/utils/test_cluster_progress_ops.py delete mode 100644 sahara/tests/unit/utils/test_configs.py delete mode 100644 sahara/tests/unit/utils/test_crypto.py delete mode 100644 sahara/tests/unit/utils/test_edp.py delete mode 100644 sahara/tests/unit/utils/test_general.py delete mode 100644 sahara/tests/unit/utils/test_hacking.py delete mode 100644 sahara/tests/unit/utils/test_heat.py delete mode 100644 sahara/tests/unit/utils/test_neutron.py delete mode 100644 sahara/tests/unit/utils/test_patches.py delete mode 100644 sahara/tests/unit/utils/test_poll_utils.py delete mode 100644 sahara/tests/unit/utils/test_proxy.py delete mode 100644 sahara/tests/unit/utils/test_resources.py delete mode 100644 sahara/tests/unit/utils/test_rpc.py delete mode 100644 sahara/tests/unit/utils/test_ssh_remote.py delete mode 100644 sahara/tests/unit/utils/test_types.py delete mode 100644 sahara/tests/unit/utils/test_xml_utils.py delete mode 100644 sahara/topology/__init__.py delete mode 100644 sahara/topology/resources/core-template.xml delete mode 100644 sahara/topology/resources/mapred-template.xml delete mode 100644 sahara/topology/topology_helper.py delete mode 100644 sahara/utils/__init__.py delete mode 100644 sahara/utils/api.py delete mode 100644 sahara/utils/api_validator.py delete mode 100644 sahara/utils/cluster.py delete mode 100644 sahara/utils/cluster_progress_ops.py delete mode 100644 sahara/utils/configs.py delete mode 100644 sahara/utils/crypto.py delete mode 100644 sahara/utils/edp.py delete mode 100644 sahara/utils/files.py delete mode 100644 sahara/utils/general.py delete mode 100644 sahara/utils/hacking/__init__.py delete mode 100644 sahara/utils/hacking/checks.py delete mode 100644 sahara/utils/hacking/commit_message.py delete mode 100644 sahara/utils/hacking/logging_checks.py delete mode 100644 sahara/utils/network.py delete mode 100644 sahara/utils/notification/__init__.py delete mode 100644 sahara/utils/notification/sender.py delete mode 100644 sahara/utils/openstack/__init__.py delete mode 100644 sahara/utils/openstack/base.py delete mode 100644 sahara/utils/openstack/cinder.py delete mode 100644 sahara/utils/openstack/glance.py delete mode 100644 sahara/utils/openstack/heat.py delete mode 100644 sahara/utils/openstack/images.py delete mode 100644 sahara/utils/openstack/keystone.py delete mode 100644 sahara/utils/openstack/manila.py delete mode 100644 sahara/utils/openstack/neutron.py delete mode 100644 sahara/utils/openstack/nova.py delete mode 100644 sahara/utils/openstack/swift.py delete mode 100644 sahara/utils/patches.py delete mode 100644 sahara/utils/poll_utils.py delete mode 100644 sahara/utils/procutils.py delete mode 100644 sahara/utils/proxy.py delete mode 100644 sahara/utils/remote.py delete mode 100644 sahara/utils/resources.py delete mode 100644 sahara/utils/rpc.py delete mode 100644 sahara/utils/ssh_remote.py delete mode 100644 sahara/utils/tempfiles.py delete mode 100644 sahara/utils/types.py delete mode 100644 sahara/utils/wsgi.py delete mode 100644 sahara/utils/xmlutils.py delete mode 100644 sahara/version.py delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 test-requirements.txt delete mode 100644 tools/config/config-generator.sahara.conf delete mode 100644 tools/config/sahara-policy-generator.conf delete mode 100755 tools/cover.sh delete mode 100755 tools/gate/build-images delete mode 100755 tools/lintstack.py delete mode 100755 tools/lintstack.sh delete mode 100755 tools/test-setup.sh delete mode 100644 tox.ini diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index d1aa06385e..0000000000 --- a/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True -source = sahara -omit = - .tox/* - sahara/tests/* - -[paths] -source = sahara - -[report] -ignore_errors = True -precision = 3 \ No newline at end of file diff --git a/.gitignore b/.gitignore deleted file mode 100644 index f7c82e2e69..0000000000 --- a/.gitignore +++ /dev/null @@ -1,31 +0,0 @@ -*.egg-info -*.egg[s] -*.log -*.py[co] -*.un~ -.coverage -.testrepository -.tox -.stestr -.venv -.idea -AUTHORS -ChangeLog -build -cover -develop-eggs -dist -doc/build -doc/html -eggs -etc/sahara.conf -etc/sahara/*.conf -etc/sahara/*.topology -sdist -target -tools/lintstack.head.py -tools/pylint_exceptions -doc/source/sample.config - -# Files created by releasenotes build -releasenotes/build diff --git a/.stestr.conf b/.stestr.conf deleted file mode 100644 index d81c293e07..0000000000 --- a/.stestr.conf +++ /dev/null @@ -1,3 +0,0 @@ -[DEFAULT] -test_path=./sahara/tests/unit -top_dir=./ diff --git a/.zuul.yaml b/.zuul.yaml deleted file mode 100644 index 925f7b53c4..0000000000 --- a/.zuul.yaml +++ /dev/null @@ -1,155 +0,0 @@ -- project: - queue: sahara - templates: - - openstack-python3-jobs - - periodic-stable-jobs - - publish-openstack-docs-pti - - check-requirements - - release-notes-jobs-python3 - check: - jobs: - - openstack-tox-pylint: - voting: false - - sahara-tests-scenario: - voting: false - - sahara-tests-scenario-v2: - voting: false - - sahara-tests-tempest: - voting: false - - sahara-tests-tempest-v2: - voting: false - - openstack-tox-py38: - voting: false - - openstack-tox-py311: - voting: false - - openstack-tox-cover: - voting: false - - openstack-ansible-deploy-aio_sahara_metal-ubuntu-focal: - voting: false - gate: - jobs: - - sahara-tests-scenario: - voting: false - - sahara-tests-scenario-v2: - voting: false - - sahara-tests-tempest: - voting: false - - sahara-tests-tempest-v2: - voting: false - - openstack-tox-py38: - voting: false - - openstack-tox-py311: - voting: false - # - sahara-grenade - # - openstack-ansible-deploy-aio_sahara_metal-ubuntu-focal - experimental: - jobs: - - sahara-buildimages-ambari - - sahara-buildimages-cloudera - - sahara-buildimages-mapr - - sahara-buildimages-spark - - sahara-tests-scenario-multinode-spark - -- job: - name: sahara-grenade - parent: grenade - required-projects: - - opendev.org/openstack/grenade - - opendev.org/openstack/sahara - - opendev.org/openstack/python-saharaclient - - opendev.org/openstack/heat - - opendev.org/openstack/heat-tempest-plugin - - opendev.org/openstack/python-heatclient - - opendev.org/openstack/sahara-tests - - opendev.org/openstack/sahara-plugin-ambari - - opendev.org/openstack/sahara-plugin-cdh - - opendev.org/openstack/sahara-plugin-mapr - - opendev.org/openstack/sahara-plugin-spark - - opendev.org/openstack/sahara-plugin-storm - - opendev.org/openstack/sahara-plugin-vanilla - vars: - grenade_localrc: - RUN_HEAT_INTEGRATION_TESTS: False - grenade_devstack_localrc: - shared: - IMAGE_URLS: http://tarballs.openstack.org/heat-test-image/fedora-heat-test-image.qcow2,https://cloud-images.ubuntu.com/xenial/current/xenial-server-cloudimg-amd64-disk1.img - devstack_local_conf: - test-config: - $TEMPEST_CONFIG: - data_processing: - test_image_name: xenial-server-cloudimg-amd64-disk1 - test_ssh_user: ubuntu - data-processing-feature-enabled: - s3: True - devstack_plugins: - sahara: https://opendev.org/openstack/sahara - heat: https://opendev.org/openstack/heat - devstack_services: - h-api: true - h-api-cfn: true - h-eng: true - heat: true - tls-proxy: false - tempest_plugins: - - sahara-tests - - heat-tempest-plugin - tempest_test_regex: ^(sahara_tempest_plugin.tests.) - tox_envlist: all - group-vars: - subnode: - devstack_services: - tls-proxy: false - irrelevant-files: - - ^(test-|)requirements.txt$ - - ^.*\.rst$ - - ^doc/.*$ - - ^sahara/locale/.*$ - - ^sahara/tests/unit/.*$ - - ^releasenotes/.*$ - - ^tools/.*$ - - ^tox.ini$ - -- job: - name: openstack-ansible-deploy-aio_sahara_metal-ubuntu-focal - parent: openstack-ansible-deploy-aio - nodeset: ubuntu-focal - -- job: - name: sahara-buildimages-base - nodeset: centos-8-stream - vars: - sahara_src_dir: src/opendev.org/openstack/sahara - run: playbooks/buildimages/run.yaml - timeout: 7200 - required-projects: - - openstack/sahara - - openstack/sahara-plugin-ambari - - openstack/sahara-plugin-cdh - - openstack/sahara-plugin-mapr - - openstack/sahara-plugin-spark - - openstack/sahara-plugin-storm - - openstack/sahara-plugin-vanilla - -- job: - name: sahara-buildimages-ambari - parent: sahara-buildimages-base - vars: - sahara_plugin: ambari - -- job: - name: sahara-buildimages-cloudera - parent: sahara-buildimages-base - vars: - sahara_plugin: cdh - -- job: - name: sahara-buildimages-mapr - parent: sahara-buildimages-base - vars: - sahara_plugin: mapr - -- job: - name: sahara-buildimages-spark - parent: sahara-buildimages-base - vars: - sahara_plugin: spark diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst deleted file mode 100644 index b7df2c28ca..0000000000 --- a/CONTRIBUTING.rst +++ /dev/null @@ -1,19 +0,0 @@ -The source repository for this project can be found at: - - https://opendev.org/openstack/sahara - -Pull requests submitted through GitHub are not monitored. - -To start contributing to OpenStack, follow the steps in the contribution guide -to set up and use Gerrit: - - https://docs.openstack.org/contributors/code-and-documentation/quick-start.html - -Bugs should be filed on Storyboard: - - https://storyboard.openstack.org/#!/project/openstack/sahara - -For more specific information about contributing to this repository, see the -sahara contributor guide: - - https://docs.openstack.org/sahara/latest/contributor/contributing.html diff --git a/HACKING.rst b/HACKING.rst deleted file mode 100644 index ade538fcf4..0000000000 --- a/HACKING.rst +++ /dev/null @@ -1,45 +0,0 @@ -Sahara Style Commandments -========================= - -- Step 1: Read the OpenStack Style Commandments - https://docs.openstack.org/hacking/latest/ -- Step 2: Read on - -Sahara Specific Commandments ----------------------------- - -Commit Messages ---------------- -Using a common format for commit messages will help keep our git history -readable. Follow these guidelines: - -- [S365] First, provide a brief summary of 50 characters or less. Summaries - of greater than 72 characters will be rejected by the gate. - -- [S364] The first line of the commit message should provide an accurate - description of the change, not just a reference to a bug or blueprint. - -Imports -------- -- [S366, S367] Organize your imports according to the ``Import order`` - -Dictionaries/Lists ------------------- - -- [S360] Ensure default arguments are not mutable. -- [S368] Must use a dict comprehension instead of a dict constructor with a - sequence of key-value pairs. For more information, please refer to - http://legacy.python.org/dev/peps/pep-0274/ - -Logs ----- - -- [S373] Don't translate logs - -- [S374] You used a deprecated log level - -Importing json --------------- - -- [S375] It's more preferable to use ``jsonutils`` from ``oslo_serialization`` - instead of ``json`` for operating with ``json`` objects. diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 67db858821..0000000000 --- a/LICENSE +++ /dev/null @@ -1,175 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. diff --git a/README.rst b/README.rst index e90cb1f577..4ee2c5f138 100644 --- a/README.rst +++ b/README.rst @@ -1,34 +1,10 @@ -======================== -Team and repository tags -======================== +This project is no longer maintained. -.. image:: https://governance.openstack.org/tc/badges/sahara.svg - :target: https://governance.openstack.org/tc/reference/tags/index.html +The contents of this repository are still available in the Git +source code management system. To see the contents of this +repository before it reached its end of life, please check out the +previous commit with "git checkout HEAD^1". -.. Change things from this point on - -OpenStack Data Processing ("Sahara") project -============================================ - -Sahara at wiki.openstack.org: https://wiki.openstack.org/wiki/Sahara - -Storyboard project: https://storyboard.openstack.org/#!/project/935 - -Sahara docs site: https://docs.openstack.org/sahara/latest/ - -Roadmap: https://wiki.openstack.org/wiki/Sahara/Roadmap - -Quickstart guide: https://docs.openstack.org/sahara/latest/user/quickstart.html - -How to participate: https://docs.openstack.org/sahara/latest/contributor/how-to-participate.html - -Source: https://opendev.org/openstack/sahara - -Bugs and feature requests: https://storyboard.openstack.org/#!/project/935 - -Release notes: https://docs.openstack.org/releasenotes/sahara/ - -License -------- - -Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0 +For any further questions, please email +openstack-discuss@lists.openstack.org or join #openstack-dev on +OFTC. diff --git a/api-ref/source/conf.py b/api-ref/source/conf.py deleted file mode 100644 index 1e2a0147d4..0000000000 --- a/api-ref/source/conf.py +++ /dev/null @@ -1,207 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# sahara documentation build configuration file, created Fri May 6 15:19:20 -# 2016. -# -# This file is execfile()d with the current directory set to -# its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import os -import sys - -extensions = [ - 'os_api_ref', - 'openstackdocstheme' -] - -# openstackdocstheme options -repository_name = 'openstack/sahara' -use_storyboard = True - -html_theme = 'openstackdocs' -html_theme_options = { - "sidebar_dropdown": "api_ref", - "sidebar_mode": "toc", -} - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('../../')) -sys.path.insert(0, os.path.abspath('../')) -sys.path.insert(0, os.path.abspath('./')) - -# -- General configuration ---------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -# -# source_encoding = 'utf-8' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -copyright = '2010-present, OpenStack Foundation' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# The reST default role (used for this markup: `text`) to use -# for all documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -add_module_names = False - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# -- Options for man page output ---------------------------------------------- - -# Grouping the document tree for man pages. -# List of tuples 'sourcefile', 'target', 'title', 'Authors name', 'manual' - - -# -- Options for HTML output -------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. Major themes that come with -# Sphinx are currently 'default' and 'sphinxdoc'. -# html_theme_path = ["."] -# html_theme = '_theme' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = ['_static'] - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_use_modindex = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = '' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'saharaoc' - - -# -- Options for LaTeX output ------------------------------------------------- - -# The paper size ('letter' or 'a4'). -# latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -# latex_font_size = '10pt' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass -# [howto/manual]). -latex_documents = [ - ('index', 'Sahara.tex', 'OpenStack Data Processing API Documentation', - 'OpenStack Foundation', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# Additional stuff for the LaTeX preamble. -# latex_preamble = '' - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_use_modindex = True diff --git a/api-ref/source/index.rst b/api-ref/source/index.rst deleted file mode 100644 index fa2a090c90..0000000000 --- a/api-ref/source/index.rst +++ /dev/null @@ -1,13 +0,0 @@ -=================== -Data Processing API -=================== - -Contents: - -API content can be searched using the :ref:`search`. - -.. toctree:: - :maxdepth: 2 - - v1.1/index - v2/index diff --git a/api-ref/source/v1.1/cluster-templates.inc b/api-ref/source/v1.1/cluster-templates.inc deleted file mode 100644 index 5d699c121a..0000000000 --- a/api-ref/source/v1.1/cluster-templates.inc +++ /dev/null @@ -1,253 +0,0 @@ -.. -*- rst -*- - -================= -Cluster templates -================= - -A cluster template configures a Hadoop cluster. A cluster template -lists node groups with the number of instances in each group. You -can also define cluster-scoped configurations in a cluster -template. - - -Show cluster template details -============================= - -.. rest_method:: GET /v1.1/{project_id}/cluster-templates/{cluster_template_id} - -Shows details for a cluster template. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - cluster_template_id: url_cluster_template_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: cluster_template_description - - use_autoconfig: use_autoconfig - - cluster_configs: cluster_configs - - created_at: created_at - - default_image_id: default_image_id - - updated_at: updated_at - - plugin_name: plugin_name - - is_default: is_default - - is_protected: object_is_protected - - shares: object_shares - - domain_name: domain_name - - tenant_id: tenant_id - - node_groups: node_groups - - is_public: object_is_public - - hadoop_version: hadoop_version - - id: cluster_template_id - - name: cluster_template_name - - - -Response Example ----------------- - -.. literalinclude:: samples/cluster-templates/cluster-templates-list-response.json - :language: javascript - - - - -Update cluster templates -======================== - -.. rest_method:: PUT /v1.1/{project_id}/cluster-templates/{cluster_template_id} - -Updates a cluster template. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - cluster_template_id: cluster_template_id - -Request Example ---------------- - -.. literalinclude:: samples/cluster-templates/cluster-template-update-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: cluster_template_description - - use_autoconfig: use_autoconfig - - cluster_configs: cluster_configs - - created_at: created_at - - default_image_id: default_image_id - - updated_at: updated_at - - plugin_name: plugin_name - - is_default: is_default - - is_protected: object_is_protected - - shares: object_shares - - domain_name: domain_name - - tenant_id: tenant_id - - node_groups: node_groups - - is_public: object_is_public - - hadoop_version: hadoop_version - - id: cluster_template_id - - name: cluster_template_name - - - - - -Delete cluster template -======================= - -.. rest_method:: DELETE /v1.1/{project_id}/cluster-templates/{cluster_template_id} - -Deletes a cluster template. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - cluster_template_id: cluster_template_id - - - - - - -List cluster templates -====================== - -.. rest_method:: GET /v1.1/{project_id}/cluster-templates - -Lists available cluster templates. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - limit: limit - - marker: marker - - sort_by: sort_by_cluster_templates - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - description: cluster_template_description - - use_autoconfig: use_autoconfig - - cluster_configs: cluster_configs - - created_at: created_at - - default_image_id: default_image_id - - updated_at: updated_at - - plugin_name: plugin_name - - is_default: is_default - - is_protected: object_is_protected - - shares: object_shares - - domain_name: domain_name - - tenant_id: tenant_id - - node_groups: node_groups - - is_public: object_is_public - - hadoop_version: hadoop_version - - id: cluster_template_id - - name: cluster_template_name - - - -Response Example ----------------- -.. rest_method:: GET /v1.1/{project_id}/cluster-templates?limit=2 - -.. literalinclude:: samples/cluster-templates/cluster-templates-list-response.json - :language: javascript - - - - -Create cluster templates -======================== - -.. rest_method:: POST /v1.1/{project_id}/cluster-templates - -Creates a cluster template. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: project_id - - -Request Example ---------------- - -.. literalinclude:: samples/cluster-templates/cluster-template-create-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: cluster_template_description - - use_autoconfig: use_autoconfig - - cluster_configs: cluster_configs - - created_at: created_at - - default_image_id: default_image_id - - updated_at: updated_at - - plugin_name: plugin_name - - is_default: is_default - - is_protected: object_is_protected - - shares: object_shares - - domain_name: domain_name - - tenant_id: tenant_id - - node_groups: node_groups - - is_public: object_is_public - - hadoop_version: hadoop_version - - id: cluster_template_id - - name: cluster_template_name - - - - diff --git a/api-ref/source/v1.1/clusters.inc b/api-ref/source/v1.1/clusters.inc deleted file mode 100644 index d36a06e03e..0000000000 --- a/api-ref/source/v1.1/clusters.inc +++ /dev/null @@ -1,335 +0,0 @@ -.. -*- rst -*- - -======== -Clusters -======== - -A cluster is a group of nodes with the same configuration. - - -List available clusters -======================= - -.. rest_method:: GET /v1.1/{project_id}/clusters - -Lists available clusters. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - limit: limit - - marker: marker - - sort_by: sort_by_clusters - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - count: count - - info: info - - cluster_template_id: cluster_template_id - - is_transient: is_transient - - provision_progress: provision_progress - - status: status - - neutron_management_network: neutron_management_network - - clusters: clusters - - management_public_key: management_public_key - - status_description: status_description - - trust_id: trust_id - - domain_name: domain_name - - - -Response Example ----------------- -.. rest_method:: GET /v1.1/{project_id}/clusters - -.. literalinclude:: samples/clusters/clusters-list-response.json - :language: javascript - - - - -Create cluster -============== - -.. rest_method:: POST /v1.1/{project_id}/clusters - -Creates a cluster. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - -Request Example ---------------- - -.. literalinclude:: samples/clusters/cluster-create-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - count: count - - info: info - - cluster_template_id: cluster_template_id - - is_transient: is_transient - - provision_progress: provision_progress - - status: status - - neutron_management_network: neutron_management_network - - management_public_key: management_public_key - - status_description: status_description - - trust_id: trust_id - - domain_name: domain_name - - - - - -Create multiple clusters -======================== - -.. rest_method:: POST /v1.1/{project_id}/clusters/multiple - -Creates multiple clusters. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - -Request Example ---------------- - -.. literalinclude:: samples/clusters/multiple-clusters-create-request.json - :language: javascript - - - - - - - -Show details of a cluster -========================= - -.. rest_method:: GET /v1.1/{project_id}/clusters/{cluster_id} - -Shows details for a cluster, by ID. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - cluster_id: url_cluster_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - count: count - - info: info - - cluster_template_id: cluster_template_id - - is_transient: is_transient - - provision_progress: provision_progress - - status: status - - neutron_management_network: neutron_management_network - - management_public_key: management_public_key - - status_description: status_description - - trust_id: trust_id - - domain_name: domain_name - - - -Response Example ----------------- - -.. literalinclude:: samples/clusters/cluster-show-response.json - :language: javascript - - - - -Delete a cluster -================ - -.. rest_method:: DELETE /v1.1/{project_id}/clusters/{cluster_id} - -Deletes a cluster. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - cluster_id: url_cluster_id - - - - - - -Scale cluster -============= - -.. rest_method:: PUT /v1.1/{project_id}/clusters/{cluster_id} - -Scales a cluster. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - cluster_id: cluster_id - -Request Example ---------------- - -.. literalinclude:: samples/clusters/cluster-scale-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - count: count - - info: info - - cluster_template_id: cluster_template_id - - is_transient: is_transient - - provision_progress: provision_progress - - status: status - - neutron_management_network: neutron_management_network - - management_public_key: management_public_key - - status_description: status_description - - trust_id: trust_id - - domain_name: domain_name - - - - -Update cluster -============== - -.. rest_method:: PATCH /v1.1/{project_id}/clusters/{cluster_id} - -Updates a cluster. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - cluster_id: url_cluster_id - -Request Example ---------------- - -.. literalinclude:: samples/clusters/cluster-update-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - count: count - - info: info - - cluster_template_id: cluster_template_id - - is_transient: is_transient - - provision_progress: provision_progress - - status: status - - neutron_management_network: neutron_management_network - - management_public_key: management_public_key - - status_description: status_description - - trust_id: trust_id - - domain_name: domain_name - - - - -Show progress -============= - -.. rest_method:: GET /v1.1/{project_id}/clusters/{cluster_id} - -Shows provisioning progress for a cluster. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - cluster_id: url_cluster_id - - - - -Response Example ----------------- - -.. literalinclude:: samples/event-log/cluster-progress-response.json - :language: javascript - - - diff --git a/api-ref/source/v1.1/data-sources.inc b/api-ref/source/v1.1/data-sources.inc deleted file mode 100644 index 9804dae274..0000000000 --- a/api-ref/source/v1.1/data-sources.inc +++ /dev/null @@ -1,212 +0,0 @@ -.. -*- rst -*- - -============ -Data sources -============ - -A data source object defines the location of input or output for -MapReduce jobs and might reference different types of storage. - -The Data Processing service does not validate data source -locations. - - -Show data source details -======================== - -.. rest_method:: GET /v1.1/{project_id}/data-sources/{data_source_id} - -Shows details for a data source. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - data_source_id: url_data_source_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: data_source_description - - url: url - - tenant_id: tenant_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - type: type - - id: data_source_id - - name: data_source_name - - - -Response Example ----------------- - -.. literalinclude:: samples/data-sources/data-source-show-response.json - :language: javascript - - - - -Delete data source -================== - -.. rest_method:: DELETE /v1.1/{project_id}/data-sources/{data_source_id} - -Deletes a data source. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - data_source_id: url_data_source_id - - - - - - -Update data source -================== - -.. rest_method:: PUT /v1.1/{project_id}/data-sources/{data_source_id} - -Updates a data source. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - data_source_id: url_data_source_id - -Request Example ---------------- - -.. literalinclude:: samples/data-sources/data-source-update-request.json - :language: javascript - - - - - - - -List data sources -================= - -.. rest_method:: GET /v1.1/{project_id}/data-sources - -Lists all data sources. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - limit: limit - - marker: marker - - sort_by: sort_by_data_sources - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - description: data_source_description - - url: url - - tenant_id: tenant_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - type: type - - id: data_source_id - - name: data_source_name - - - -Response Example ----------------- - -.. rest_method:: GET /v1.1/{project_id}/data-sourses?sort_by=-name - -.. literalinclude:: samples/data-sources/data-sources-list-response.json - :language: javascript - - - - -Create data source -================== - -.. rest_method:: POST /v1.1/{project_id}/data-sources - -Creates a data source. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - -Request Example ---------------- - -.. literalinclude:: samples/data-sources/data-source-register-hdfs-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: data_source_description - - url: url - - tenant_id: tenant_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - type: type - - id: data_source_id - - name: data_source_name - - - - diff --git a/api-ref/source/v1.1/event-log.inc b/api-ref/source/v1.1/event-log.inc deleted file mode 100644 index 88299a3d2c..0000000000 --- a/api-ref/source/v1.1/event-log.inc +++ /dev/null @@ -1,42 +0,0 @@ -.. -*- rst -*- - -========= -Event log -========= - -The event log feature provides information about cluster -provisioning. In the event of errors, the event log shows the -reason for the failure. - - -Show progress -============= - -.. rest_method:: GET /v1.1/{project_id}/clusters/{cluster_id} - -Shows provisioning progress of cluster. - - -Normal response codes: 200 -Error response codes: - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - cluster_id: cluster_id - - - - -Response Example ----------------- - -.. literalinclude:: samples/event-log/cluster-progress-response.json - :language: javascript - - - diff --git a/api-ref/source/v1.1/image-registry.inc b/api-ref/source/v1.1/image-registry.inc deleted file mode 100644 index 8c5c2a26eb..0000000000 --- a/api-ref/source/v1.1/image-registry.inc +++ /dev/null @@ -1,249 +0,0 @@ -.. -*- rst -*- - -============== -Image registry -============== - -Use the image registry tool to manage images, add tags to and -remove tags from images, and define the user name for an instance -operating system. Each plugin lists required tags for an image. To -run remote operations, the Data Processing service requires a user -name with which to log in to the operating system for an instance. - - -Add tags to image -================= - -.. rest_method:: POST /v1.1/{project_id}/images/{image_id}/tag - -Adds tags to an image. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - tags: tags - - image_id: url_image_id - -Request Example ---------------- - -.. literalinclude:: samples/image-registry/image-tags-add-request.json - :language: javascript - - - - - - - -Show image details -================== - -.. rest_method:: GET /v1.1/{project_id}/images/{image_id} - -Shows details for an image. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - image_id: url_image_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - status: status - - username: username - - updated: updated - - description: image_description - - created: created - - image: image - - tags: tags - - minDisk: minDisk - - name: image_name - - progress: progress - - minRam: minRam - - id: image_id - - metadata: metadata - - - -Response Example ----------------- - -.. literalinclude:: samples/image-registry/image-show-response.json - :language: javascript - - - - -Register image -============== - -.. rest_method:: POST /v1.1/{project_id}/images/{image_id} - -Registers an image in the registry. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - username: username - - description: image_description - - image_id: url_image_id - -Request Example ---------------- - -.. literalinclude:: samples/image-registry/image-register-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - status: status - - username: username - - updated: updated - - description: image_description - - created: created - - image: image - - tags: tags - - minDisk: minDisk - - name: image_name - - progress: progress - - minRam: minRam - - id: image_id - - metadata: metadata - - - - - -Unregister image -================ - -.. rest_method:: DELETE /v1.1/{project_id}/images/{image_id} - -Removes an image from the registry. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - image_id: url_image_id - - - - - - -Remove tags from image -====================== - -.. rest_method:: POST /v1.1/{project_id}/images/{image_id}/untag - -Removes tags from an image. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - tags: tags - - image_id: url_image_id - -Request Example ---------------- - -.. literalinclude:: samples/image-registry/image-tags-delete-request.json - :language: javascript - - - - - - - -List images -=========== - -.. rest_method:: GET /v1.1/{project_id}/images - -Lists all images registered in the registry. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - tags: tags - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - status: status - - username: username - - updated: updated - - description: image_description - - created: created - - image: image - - tags: tags - - minDisk: minDisk - - name: image_name - - images: images - - progress: progress - - minRam: minRam - - id: image_id - - metadata: metadata - - - -Response Example ----------------- - -.. literalinclude:: samples/image-registry/images-list-response.json - :language: javascript - - - diff --git a/api-ref/source/v1.1/index.rst b/api-ref/source/v1.1/index.rst deleted file mode 100644 index a733181352..0000000000 --- a/api-ref/source/v1.1/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -:tocdepth: 3 - ------------------------- -Data Processing API v1.1 ------------------------- - -.. rest_expand_all:: - -.. include:: cluster-templates.inc -.. include:: clusters.inc -.. include:: data-sources.inc -.. include:: event-log.inc -.. include:: image-registry.inc -.. include:: job-binaries.inc -.. include:: job-executions.inc -.. include:: job-types.inc -.. include:: job-binary-internals.inc -.. include:: jobs.inc -.. include:: node-group-templates.inc -.. include:: plugins.inc diff --git a/api-ref/source/v1.1/job-binaries.inc b/api-ref/source/v1.1/job-binaries.inc deleted file mode 100644 index 1ac9f8ee78..0000000000 --- a/api-ref/source/v1.1/job-binaries.inc +++ /dev/null @@ -1,266 +0,0 @@ -.. -*- rst -*- - -============ -Job binaries -============ - -Job binary objects represent data processing applications and -libraries that are stored in either the internal database or the -Object Storage service. - - -List job binaries -================= - -.. rest_method:: GET /v1.1/{project_id}/job-binaries - -Lists the available job binaries. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - limit: limit - - marker: marker - - sort_by: sort_by_job_binary - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - description: job_binary_description - - url: url - - tenant_id: tenant_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - binaries: binaries - - id: job_binary_id - - name: job_binary_name - - - -Response Example ----------------- -.. rest_method:: GET /v1.1/{project_id}/job-binaries?sort_by=created_at - -.. literalinclude:: samples/job-binaries/list-response.json - :language: javascript - - - - -Create job binary -================= - -.. rest_method:: POST /v1.1/{project_id}/job-binaries - -Creates a job binary. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - -Request Example ---------------- - -.. literalinclude:: samples/job-binaries/create-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: job_binary_description - - url: url - - tenant_id: tenant_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - id: job_binary_id - - name: job_binary_name - - - - - -Show job binary details -======================= - -.. rest_method:: GET /v1.1/{project_id}/job-binaries/{job_binary_id} - -Shows details for a job binary. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_binary_id: url_job_binary_id - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: job_binary_description - - url: url - - tenant_id: tenant_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - id: job_binary_id - - name: job_binary_name - - - -Response Example ----------------- - -.. literalinclude:: samples/job-binaries/show-response.json - :language: javascript - - - - -Delete job binary -================= - -.. rest_method:: DELETE /v1.1/{project_id}/job-binaries/{job_binary_id} - -Deletes a job binary. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - - project_id: url_project_id - - job_binary_id: url_job_binary_id - - - - - - -Update job binary -================= - -.. rest_method:: PUT /v1.1/{project_id}/job-binaries/{job_binary_id} - -Updates a job binary. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - - project_id: url_project_id - - job_binary_id: url_job_binary_id - - -Request Example ---------------- - -.. literalinclude:: samples/job-binaries/update-request.json - :language: javascript - - - - - - - -Show job binary data -==================== - -.. rest_method:: GET /v1.1/{project_id}/job-binaries/{job_binary_id}/data - -Shows data for a job binary. - -The response body shows the job binary raw data and the response -headers show the data length. - -Example response: - -:: - - HTTP/1.1 200 OK - Connection: keep-alive - Content-Length: 161 - Content-Type: text/html; charset=utf-8 - Date: Sat, 28 Mar 2016 02:42:48 GMT - A = load '$INPUT' using PigStorage(':') as (fruit: chararray); - B = foreach A generate com.hadoopbook.pig.Trim(fruit); - store B into '$OUTPUT' USING PigStorage(); - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_binary_id: url_job_binary_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - Content-Length: Content-Length - - - -Response Example ----------------- - -.. literalinclude:: samples/job-binaries/show-data-response - :language: text - - - diff --git a/api-ref/source/v1.1/job-binary-internals.inc b/api-ref/source/v1.1/job-binary-internals.inc deleted file mode 100644 index 81b8f989a1..0000000000 --- a/api-ref/source/v1.1/job-binary-internals.inc +++ /dev/null @@ -1,258 +0,0 @@ -.. -*- rst -*- - -==================== -Job binary internals -==================== - -Job binary internal objects represent data processing applications -and libraries that are stored in the internal database. - - -Create job binary internal -========================== - -.. rest_method:: PUT /v1.1/{project_id}/job-binary-internals/{name} - -Creates a job binary internal. - -Job binary internals are objects that represent data processing -applications and libraries that are stored in the internal -database. - -Specify the file contents (raw data or script text) in the request -body. Specify the file name in the URI. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - name: url_job_binary_internals_name - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - name: job_binary_internals_name - - tenant_id: tenant_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - datasize: datasize - - id: job_binary_internals_id - - - - - -Show job binary internal data -============================= - -.. rest_method:: GET /v1.1/{project_id}/job-binary-internals/{job_binary_internals_id}/data - -Shows data for a job binary internal. - -The response body shows the job binary raw data and the response -headers show the data length. - -Example response: - -:: - - HTTP/1.1 200 OK - Connection: keep-alive - Content-Length: 161 - Content-Type: text/html; charset=utf-8 - Date: Sat, 28 Mar 2016 02:21:13 GMT - A = load '$INPUT' using PigStorage(':') as (fruit: chararray); - B = foreach A generate com.hadoopbook.pig.Trim(fruit); - store B into '$OUTPUT' USING PigStorage(); - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_binary_internals_id: url_job_binary_internals_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - Content-Length: Content-Length - - - -Response Example ----------------- - -.. literalinclude:: samples/job-binary-internals/show-data-response - :language: text - - - - -Show job binary internal details -================================ - -.. rest_method:: GET /v1.1/{project_id}/job-binary-internals/{job_binary_internals_id} - -Shows details for a job binary internal. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_binary_internals_id: url_job_binary_internals_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - name: job_binary_internals_name - - tenant_id: tenant_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - datasize: datasize - - id: job_binary_internals_id - - - -Response Example ----------------- - -.. literalinclude:: samples/job-binary-internals/show-response.json - :language: javascript - - - - -Delete job binary internal -========================== - -.. rest_method:: DELETE /v1.1/{project_id}/job-binary-internals/{job_binary_internals_id} - -Deletes a job binary internal. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_binary_internals_id: url_job_binary_internals_id - - - - - - -Update job binary internal -========================== - -.. rest_method:: PATCH /v1.1/{project_id}/job-binary-internals/{job_binary_internals_id} - -Updates a job binary internal. - -Normal respose codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_binary_internals_id: url_job_binary_internals_id - -Request Example ---------------- - -.. literalinclude:: samples/job-binary-internals/update-request.json - :language: javascript - - - - - - - -List job binary internals -========================= - -.. rest_method:: GET /v1.1/{project_id}/job-binary-internals - -Lists the available job binary internals. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - limit: limit - - marker: marker - - sort_by: sort_by_job_binary_internals - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - binaries: binaries - - name: job_binary_internals_name - - tenant_id: tenant_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - datasize: datasize - - id: job_binary_internals_id - - - -Response Example ----------------- -.. rest_method:: GET /v1.1/{project_id}/job-binary-internals - -.. literalinclude:: samples/job-binary-internals/list-response.json - :language: javascript - - - diff --git a/api-ref/source/v1.1/job-executions.inc b/api-ref/source/v1.1/job-executions.inc deleted file mode 100644 index 9d9f5628bd..0000000000 --- a/api-ref/source/v1.1/job-executions.inc +++ /dev/null @@ -1,325 +0,0 @@ -.. -*- rst -*- - -============== -Job executions -============== - -A job execution object represents a Hadoop job that runs on a -cluster. A job execution polls the status of a running job and -reports it to the user. Also a user can cancel a running job. - - -Refresh job execution status -============================ - -.. rest_method:: GET /v1.1/{project_id}/job-executions/{job_execution_id}/refresh-status - -Refreshes the status of and shows information for a job execution. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_execution_id: url_job_execution_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - info: info - - output_id: output_id - - start_time: start_time - - job_id: job_id - - updated_at: updated_at - - tenant_id: tenant_id - - created_at: created_at - - args: args - - data_source_urls: data_source_urls - - return_code: return_code - - oozie_job_id: oozie_job_id - - is_protected: is_protected_3 - - cluster_id: cluster_id - - end_time: end_time - - params: params - - is_public: job_execution_is_public - - input_id: input_id - - configs: configs - - job_execution: job_execution - - id: job_execution_id - - - -Response Example ----------------- - -.. literalinclude:: samples/job-executions/job-ex-response.json - :language: javascript - - - - -List job executions -=================== - -.. rest_method:: GET /v1.1/{project_id}/job-executions - -Lists available job executions. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - limit: limit - - marker: marker - - sort_by: sort_by_job_execution - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - info: info - - output_id: output_id - - start_time: start_time - - job_id: job_id - - updated_at: updated_at - - tenant_id: tenant_id - - created_at: created_at - - args: args - - data_source_urls: data_source_urls - - return_code: return_code - - oozie_job_id: oozie_job_id - - is_protected: is_protected_3 - - cluster_id: cluster_id - - end_time: end_time - - params: params - - is_public: job_execution_is_public - - input_id: input_id - - configs: configs - - job_execution: job_execution - - id: job_execution_id - - job_executions: job_executions - - - -Response Example ----------------- -.. rest_method:: /v1.1/{project_id}/job-executions - -.. literalinclude:: samples/job-executions/list-response.json - :language: javascript - - - - -Show job execution details -========================== - -.. rest_method:: GET /v1.1/{project_id}/job-executions/{job_execution_id} - -Shows details for a job execution, by ID. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_execution_id: url_job_execution_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - info: info - - output_id: output_id - - start_time: start_time - - job_id: job_id - - updated_at: updated_at - - tenant_id: tenant_id - - created_at: created_at - - args: args - - data_source_urls: data_source_urls - - return_code: return_code - - oozie_job_id: oozie_job_id - - is_protected: is_protected_3 - - cluster_id: cluster_id - - end_time: end_time - - params: params - - is_public: job_execution_is_public - - input_id: input_id - - configs: configs - - job_execution: job_execution - - id: job_execution_id - - - -Response Example ----------------- - -.. literalinclude:: samples/job-executions/job-ex-response.json - :language: javascript - - - - -Delete job execution -==================== - -.. rest_method:: DELETE /v1.1/{project_id}/job-executions/{job_execution_id} - -Deletes a job execution. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_execution_id: url_job_execution_id - - - - - - -Update job execution -==================== - -.. rest_method:: PATCH /v1.1/{project_id}/job-executions/{job_execution_id} - -Updates a job execution. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_execution_id: url_job_execution_id - -Request Example ---------------- - -.. literalinclude:: samples/job-executions/job-ex-update-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - info: info - - output_id: output_id - - start_time: start_time - - job_id: job_id - - updated_at: updated_at - - tenant_id: tenant_id - - created_at: created_at - - args: args - - data_source_urls: data_source_urls - - return_code: return_code - - oozie_job_id: oozie_job_id - - is_protected: is_protected_3 - - cluster_id: cluster_id - - end_time: end_time - - params: params - - is_public: job_execution_is_public - - input_id: input_id - - configs: configs - - job_execution: job_execution - - id: job_execution_id - - - - - -Cancel job execution -==================== - -.. rest_method:: GET /v1.1/{project_id}/job-executions/{job_execution_id}/cancel - -Cancels a job execution. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_execution_id: url_job_execution_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - info: info - - output_id: output_id - - start_time: start_time - - job_id: job_id - - updated_at: updated_at - - tenant_id: tenant_id - - created_at: created_at - - args: args - - data_source_urls: data_source_urls - - return_code: return_code - - oozie_job_id: oozie_job_id - - is_protected: is_protected_3 - - cluster_id: cluster_id - - end_time: end_time - - params: params - - is_public: job_execution_is_public - - input_id: input_id - - configs: configs - - job_execution: job_execution - - id: job_execution_id - - - -Response Example ----------------- - -.. literalinclude:: samples/job-executions/cancel-response.json - :language: javascript - - - diff --git a/api-ref/source/v1.1/job-types.inc b/api-ref/source/v1.1/job-types.inc deleted file mode 100644 index ab719dfdd7..0000000000 --- a/api-ref/source/v1.1/job-types.inc +++ /dev/null @@ -1,61 +0,0 @@ -.. -*- rst -*- - -========= -Job types -========= - -Each plugin that supports EDP also supports specific job types. -Different versions of a plugin might actually support different job -types. Configuration options vary by plugin, version, and job type. - -The job types provide information about which plugins support which -job types and how to configure the job types. - - -List job types -============== - -.. rest_method:: GET /v1.1/{project_id}/job-types - -Lists all job types. - -You can use query parameters to filter the response. - - -Normal response codes: 200 -Error response codes: - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - plugin: plugin - - version: version - - type: type - - hints: hints - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - versions: versions - - title: title - - description: description_plugin - - job_types: job_types - - name: plugin_name - - - -Response Example ----------------- - -.. literalinclude:: samples/job-types/job-types-list-response.json - :language: javascript - - - diff --git a/api-ref/source/v1.1/jobs.inc b/api-ref/source/v1.1/jobs.inc deleted file mode 100644 index c4e0cb7a9f..0000000000 --- a/api-ref/source/v1.1/jobs.inc +++ /dev/null @@ -1,265 +0,0 @@ -.. -*- rst -*- - -==== -Jobs -==== - -A job object lists the binaries that a job needs to run. To run a -job, you must specify data sources and job parameters. - -You can run a job on an existing or new transient cluster. - - -Run job -======= - -.. rest_method:: POST /v1.1/{project_id}/jobs/{job_id}/execute - -Runs a job. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_id: url_job_id - -Request Example ---------------- - -.. literalinclude:: samples/jobs/job-execute-request.json - :language: javascript - - - - - - - -List jobs -========= - -.. rest_method:: GET /v1.1/{project_id}/jobs - -Lists all jobs. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - limit: limit - - marker: marker - - sort_by: sort_by_jobs - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - jobs: jobs - - description: job_description - - tenant_id: tenant_id - - created_at: created_at - - mains: mains - - updated_at: updated_at - - libs: libs - - is_protected: object_is_protected - - interface: interface - - is_public: object_is_public - - type: type - - id: job_id - - name: job_name - - markers: markers - - prev: prev - - next: next - - -Response Example ----------------- -..rest_method:: GET /v1.1/{project_id}/jobs?limit=2 - -.. literalinclude:: samples/jobs/jobs-list-response.json - :language: javascript - - - - -Create job -========== - -.. rest_method:: POST /v1.1/{project_id}/jobs - -Creates a job object. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - -Request Example ---------------- - -.. literalinclude:: samples/jobs/job-create-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: job_description - - tenant_id: tenant_id - - created_at: created_at - - mains: mains - - updated_at: updated_at - - libs: libs - - is_protected: object_is_protected - - interface: interface - - is_public: object_is_public - - type: type - - id: job_id - - name: job_name - - - - - -Show job details -================ - -.. rest_method:: GET /v1.1/{project_id}/jobs/{job_id} - -Shows details for a job. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_id: url_job_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: job_description - - tenant_id: tenant_id - - created_at: created_at - - mains: mains - - updated_at: updated_at - - libs: libs - - is_protected: object_is_protected - - interface: interface - - is_public: object_is_public - - type: type - - id: job_id - - name: job_name - - - -Response Example ----------------- - -.. literalinclude:: samples/jobs/job-show-response.json - :language: javascript - - - - -Remove job -========== - -.. rest_method:: DELETE /v1.1/{project_id}/jobs/{job_id} - -Removes a job. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_id: url_job_id - - - - - - -Update job object -================= - -.. rest_method:: PATCH /v1.1/{project_id}/jobs/{job_id} - -Updates a job object. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - job_id: url_job_id - -Request Example ---------------- - -.. literalinclude:: samples/jobs/job-update-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: job_description - - tenant_id: tenant_id - - created_at: created_at - - mains: mains - - updated_at: updated_at - - libs: libs - - is_protected: object_is_protected - - interface: interface - - is_public: object_is_public - - type: type - - id: job_id - - name: job_name - - - - diff --git a/api-ref/source/v1.1/node-group-templates.inc b/api-ref/source/v1.1/node-group-templates.inc deleted file mode 100644 index e61a61981d..0000000000 --- a/api-ref/source/v1.1/node-group-templates.inc +++ /dev/null @@ -1,269 +0,0 @@ -.. -*- rst -*- - -==================== -Node group templates -==================== - -A cluster is a group of nodes with the same configuration. A node -group template configures a node in the cluster. - -A template configures Hadoop processes and VM characteristics, such -as the number of reduced slots for task tracker, the number of -CPUs, and the amount of RAM. The template specifies the VM -characteristics through an OpenStack flavor. - - -List node group templates -========================= - -.. rest_method:: GET /v1.1/{project_id}/node-group-templates - -Lists available node group templates. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - limit: limit - - marker: marker - - sort_by: sort_by_node_group_templates - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - volume_local_to_instance: volume_local_to_instance - - availability_zone: availability_zone - - updated_at: updated_at - - use_autoconfig: use_autoconfig - - volumes_per_node: volumes_per_node - - id: node_group_template_id - - security_groups: security_groups - - shares: object_shares - - node_configs: node_configs - - auto_security_group: auto_security_group - - volumes_availability_zone: volumes_availability_zone - - description: node_group_template_description - - volume_mount_prefix: volume_mount_prefix - - plugin_name: plugin_name - - floating_ip_pool: floating_ip_pool - - is_default: is_default - - image_id: image_id - - volumes_size: volumes_size - - is_proxy_gateway: is_proxy_gateway - - is_public: object_is_public - - hadoop_version: hadoop_version - - name: node_group_template_name - - tenant_id: tenant_id - - created_at: created_at - - volume_type: volume_type - - is_protected: object_is_protected - - node_processes: node_processes - - flavor_id: flavor_id - - - -Response Example ----------------- -.. rest_method:: GET /v1.1/{project_id}/node-group-templates?limit=2&marker=38b4e146-1d39-4822-bad2-fef1bf304a52&sort_by=name - -.. literalinclude:: samples/node-group-templates/node-group-templates-list-response.json - :language: javascript - - - - -Create node group template -========================== - -.. rest_method:: POST /v1.1/{project_id}/node-group-templates - -Creates a node group template. - -Normal response codes: 202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - -Request Example ---------------- - -.. literalinclude:: samples/node-group-templates/node-group-template-create-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - volume_local_to_instance: volume_local_to_instance - - availability_zone: availability_zone - - updated_at: updated_at - - use_autoconfig: use_autoconfig - - volumes_per_node: volumes_per_node - - id: node_group_template_id - - security_groups: security_groups - - shares: object_shares - - node_configs: node_configs - - auto_security_group: auto_security_group - - volumes_availability_zone: volumes_availability_zone - - description: node_group_template_description - - volume_mount_prefix: volume_mount_prefix - - plugin_name: plugin_name - - floating_ip_pool: floating_ip_pool - - is_default: is_default - - image_id: image_id - - volumes_size: volumes_size - - is_proxy_gateway: is_proxy_gateway - - is_public: object_is_public - - hadoop_version: hadoop_version - - name: node_group_template_name - - tenant_id: tenant_id - - created_at: created_at - - volume_type: volume_type - - is_protected: object_is_protected - - node_processes: node_processes - - flavor_id: flavor_id - - - - - -Show node group template details -================================ - -.. rest_method:: GET /v1.1/{project_id}/node-group-templates/{node_group_template_id} - -Shows a node group template, by ID. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - node_group_template_id: url_node_group_template_id - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - volume_local_to_instance: volume_local_to_instance - - availability_zone: availability_zone - - updated_at: updated_at - - use_autoconfig: use_autoconfig - - volumes_per_node: volumes_per_node - - id: node_group_template_id - - security_groups: security_groups - - shares: object_shares - - node_configs: node_configs - - auto_security_group: auto_security_group - - volumes_availability_zone: volumes_availability_zone - - description: node_group_template_description - - volume_mount_prefix: volume_mount_prefix - - plugin_name: plugin_name - - floating_ip_pool: floating_ip_pool - - is_default: is_default - - image_id: image_id - - volumes_size: volumes_size - - is_proxy_gateway: is_proxy_gateway - - is_public: object_is_public - - hadoop_version: hadoop_version - - name: node_group_template_name - - tenant_id: tenant_id - - created_at: created_at - - volume_type: volume_type - - is_protected: object_is_protected - - node_processes: node_processes - - flavor_id: flavor_id - - - -Response Example ----------------- - -.. literalinclude:: samples/node-group-templates/node-group-template-show-response.json - :language: javascript - - - - -Delete node group template -========================== - -.. rest_method:: DELETE /v1.1/{project_id}/node-group-templates/{node_group_template_id} - -Deletes a node group template. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - node_group_template_id: url_node_group_template_id - - - - - - -Update node group template -========================== - -.. rest_method:: PUT /v1.1/{project_id}/node-group-templates/{node_group_template_id} - -Updates a node group template. - -Normal respose codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - node_group_template_id: url_node_group_template_id - -Request Example ---------------- - -.. literalinclude:: samples/node-group-templates/node-group-template-update-request.json - :language: javascript - - - - - - diff --git a/api-ref/source/v1.1/parameters.yaml b/api-ref/source/v1.1/parameters.yaml deleted file mode 100644 index fc28dc6ec0..0000000000 --- a/api-ref/source/v1.1/parameters.yaml +++ /dev/null @@ -1,1159 +0,0 @@ -# variables in header -Content-Length: - description: | - The length of the data, in bytes. - in: header - required: true - type: string - -# variables in path -hints: - description: | - Includes configuration hints in the response. - in: path - required: false - type: boolean -job_binary_id: - description: | - The UUID of the job binary. - in: path - required: true - type: string -limit: - description: | - Maximum number of objects in response data. - in: path - required: false - type: integer -marker: - description: | - ID of the last element on the list which - won't be in response. - in: path - required: false - type: string -plugin: - description: | - Filters the response by a plugin name. - in: path - required: false - type: string -sort_by_cluster_templates: - description: | - The field for sorting cluster templates. - this parameter accepts the following values: - ``name``, ``plugin_name``, ``hadoop_version``, - ``created_at``, ``updated_at``, ``id``. Also - this values can started with ``-`` prefix for - descending sort. For example: ``-name``. - in: path - required: false - type: string - -sort_by_clusters: - description: | - The field for sorting clusters. - this parameter accepts the following values: - ``name``, ``plugin_name``, ``hadoop_version``, - ``status``, ``id``. Also this values can - started with ``-`` prefix for descending sort. - For example: ``-name``. - in: path - required: false - type: string - -sort_by_data_sources: - description: | - The field for sorting data sources. - this parameter accepts the following values: - ``id``, ``name``, ``type``, ``created_at``, - ``updated_at``. Also this values can started - with ``-`` prefix for descending sort. - For example: ``-name``. - in: path - required: false - type: string - -sort_by_job_binary: - description: | - The field for sorting job binaries. - this parameter accepts the following values: - ``id``, ``name``, ``created_at``, ``updated_at``. - Also this values can started with ``-`` prefix - for descending sort. For example: ``-name``. - in: path - required: false - type: string - -sort_by_job_binary_internals: - description: | - The field for sorting job binary internals. - this parameter accepts the following values: - ``id``, ``name``, ``created_at``, ``updated_at``. - Also this values can started with ``-`` prefix - for descending sort. For example: ``-name``. - in: path - required: false - type: string - -sort_by_job_execution: - description: | - The field for sorting job executions. - this parameter accepts the following values: - ``id``, ``job_template``, ``cluster``, - ``status``. Also this values can started - with ``-`` prefix for descending sort. - For example: ``-cluster``. - in: path - required: false - type: string - -sort_by_jobs: - description: | - The field for sorting jobs. - this parameter accepts the following values: - ``id``, ``name``, ``type``, ``created_at``, - ``updated_at``. Also this values can started - with ``-`` prefix for descending sort. - For example: ``-name``. - in: path - required: false - type: string - -sort_by_node_group_templates: - description: | - The field for sorting node group templates. - this parameter accepts the following values: - ``name``, ``plugin_name``, ``hadoop_version``, - ``created_at``, ``updated_at``, ``id``. Also - this values can started with ``-`` prefix for - descending sort. For example: ``-name``. - in: path - required: false - type: string - -type_2: - description: | - Filters the response by a job type. - in: path - required: false - type: string -url_cluster_id: - description: | - The ID of the cluster - in: path - required: true - type: string -url_cluster_template_id: - description: | - The unique identifier of the cluster template. - in: path - required: true - type: string -url_data_source_id: - description: | - The UUID of the data source. - in: path - required: true - type: string -url_image_id: - description: | - The UUID of the image. - in: path - required: true - type: string -url_job_binary_id: - description: | - The UUID of the job binary. - in: path - required: true - type: string -url_job_binary_internals_id: - description: | - The UUID of the job binary internal. - in: path - required: true - type: string -url_job_binary_internals_name: - description: | - The name of the job binary internal. - in: path - required: true - type: string -url_job_execution_id: - description: | - The UUID of the job execution. - in: path - required: true - type: string -url_job_id: - description: | - The UUID of the job. - in: path - required: true - type: string -url_node_group_template_id: - description: | - The UUID of the node group template. - in: path - required: true - type: string -url_plugin_name: - description: | - Name of the plugin. - in: path - required: true - type: string -url_project_id: - description: | - UUID of the project. - in: path - required: true - type: string -version: - description: | - Filters the response by a plugin version. - in: path - required: true - type: string -version_1: - description: | - Version of the plugin. - in: path - required: false - type: string - - -# variables in body -args: - description: | - The list of arguments. - in: body - required: true - type: array -auto_security_group: - description: | - If set to ``True``, the cluster group is - automatically secured. - in: body - required: true - type: boolean -availability_zone: - description: | - The availability of the node in the cluster. - in: body - required: true - type: string -binaries: - description: | - The list of job binary internal objects. - in: body - required: true - type: array -cluster_configs: - description: | - A set of key and value pairs that contain the - cluster configuration. - in: body - required: true - type: object -cluster_id: - description: | - The UUID of the cluster. - in: body - required: true - type: string -cluster_template_description: - description: | - Description of the cluster template - in: body - required: false - type: string -cluster_template_id: - description: | - The UUID of the cluster template. - in: body - required: true - type: string -cluster_template_name: - description: | - The name of the cluster template. - in: body - required: true - type: string -clusters: - description: | - The list of clusters. - in: body - required: true - type: array -configs: - description: | - The mappings of the job tasks. - in: body - required: true - type: object -count: - description: | - The number of nodes in the cluster. - in: body - required: true - type: integer -created: - description: | - The date and time when the image was created. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - For example, ``2015-08-27T09:49:58-05:00``. - - The ``±hh:mm`` value, if included, is the time zone as an offset - from UTC. - in: body - required: true - type: string -created_at: - description: | - The date and time when the cluster was created. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -created_at_1: - description: | - The date and time when the object was created. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -created_at_2: - description: | - The date and time when the node was created in the cluster. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -created_at_3: - description: | - The date and time when the job execution object was created. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -data_source_description: - description: | - The description of the data source object. - in: body - required: true - type: string -data_source_id: - description: | - The UUID of the data source. - in: body - required: true - type: string -data_source_name: - description: | - The name of the data source. - in: body - required: true - type: string -data_source_urls: - description: | - The data source URLs. - in: body - required: true - type: object -datasize: - description: | - The size of the data stored in the internal - database. - in: body - required: true - type: integer -default_image_id: - description: | - The default ID of the image. - in: body - required: true - type: string -description: - description: | - The description of the cluster. - in: body - required: true - type: string -description_3: - description: | - The description of the node in the cluster. - in: body - required: true - type: string -description_7: - description: | - Description of the image. - in: body - required: false - type: string -description_plugin: - description: | - The full description of the plugin. - in: body - required: true - type: string -domain_name: - description: | - Domain name for internal and external hostname resolution. - Required if DNS service is enabled. - in: body - required: false - type: string -end_time: - description: | - The end date and time of the job execution. - - The date and time when the job completed execution. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -flavor_id: - description: | - The ID of the flavor. - in: body - required: true - type: string -floating_ip_pool: - description: | - The UUID of the pool in the template. - in: body - required: true - type: string -hadoop_version: - description: | - The version of the Hadoop used in the cluster. - in: body - required: true - type: string -hadoop_version_1: - description: | - The version of the Hadoop. - in: body - required: true - type: string -id: - description: | - The UUID of the cluster. - in: body - required: true - type: string -id_1: - description: | - The ID of the object. - in: body - required: true - type: string -image: - description: | - A set of key and value pairs that contain image - properties. - in: body - required: true - type: object -image_description: - description: | - The description of the image. - in: body - required: true - type: string -image_id: - description: | - The UUID of the image. - in: body - required: true - type: string -image_name: - description: | - The name of the operating system image. - in: body - required: true - type: string -images: - description: | - The list of images and their properties. - in: body - required: true - type: array -info: - description: | - A set of key and value pairs that contain cluster - information. - in: body - required: true - type: object -info_1: - description: | - The report of the executed job objects. - in: body - required: true - type: object -input_id: - description: | - The UUID of the input. - in: body - required: true - type: string -interface: - description: | - The interfaces of the job object. - in: body - required: true - type: array -is_default: - description: | - If set to ``true``, the cluster is the default - cluster. - in: body - required: true - type: boolean -is_protected: - description: | - If set to ``true``, the cluster is protected. - in: body - required: true - type: boolean -is_protected_2: - description: | - If set to ``true``, the node is protected. - in: body - required: true - type: boolean -is_protected_3: - description: | - If set to ``true``, the job execution object is - protected. - in: body - required: true - type: boolean -is_proxy_gateway: - description: | - If set to ``true``, the node is the proxy - gateway. - in: body - required: true - type: boolean -is_public: - description: | - If set to ``true``, the cluster is public. - in: body - required: true - type: boolean -is_transient: - description: | - If set to ``true``, the cluster is transient. - in: body - required: true - type: boolean -job_binary_description: - description: | - The description of the job binary object. - in: body - required: true - type: string -job_binary_internals_id: - description: | - The UUID of the job binary internal. - in: body - required: true - type: string -job_binary_internals_name: - description: | - The name of the job binary internal. - in: body - required: true - type: string -job_binary_name: - description: | - The name of the object. - in: body - required: true - type: string -job_description: - description: | - The description of the job object. - in: body - required: true - type: string -job_execution: - description: | - A set of key and value pairs that contain the job - object. - in: body - required: true - type: object -job_execution_id: - description: | - The UUID of the job execution object. - in: body - required: true - type: string -job_execution_is_public: - description: | - If set to ``true``, the job execution object is - public. - in: body - required: true - type: boolean -job_executions: - description: | - The list of job execution objects. - in: body - required: true - type: array -job_id: - description: | - The UUID of the job object. - in: body - required: true - type: string -job_name: - description: | - The name of the job object. - in: body - required: true - type: string -job_types: - description: | - The list of plugins and their job types. - in: body - required: true - type: array -jobs: - description: | - The list of the jobs. - in: body - required: true - type: array -libs: - description: | - The list of the job object properties. - in: body - required: true - type: array -mains: - description: | - The list of the job object and their properties. - in: body - required: true - type: array -management_public_key: - description: | - The SSH key for the management network. - in: body - required: true - type: string -markers: - description: | - The markers of previous and following pages of data. - This field exists only if ``limit`` is passed to - request. - in: body - required: false - type: object -metadata: - description: | - A set of key and value pairs that contain image - metadata. - in: body - required: true - type: object -minDisk: - description: | - The minimum disk space, in GB. - in: body - required: true - type: integer -minRam: - description: | - The minimum amount of random access memory (RAM) - for the image, in GB. - in: body - required: true - type: integer -name: - description: | - The name of the cluster. - in: body - required: true - type: string -name_1: - description: | - The name of the object. - in: body - required: true - type: string -neutron_management_network: - description: | - The UUID of the neutron management network. - in: body - required: true - type: string -next: - description: | - The marker of next page of list data. - in: body - required: false - type: string -node_configs: - description: | - A set of key and value pairs that contain the - node configuration in the cluster. - in: body - required: true - type: object -node_group_template_description: - description: | - Description of the node group template - in: body - required: false - type: string -node_group_template_id: - description: | - The UUID of the node group template. - in: body - required: true - type: string -node_group_template_name: - description: | - The name of the node group template. - in: body - required: true - type: string -node_groups: - description: | - The detail properties of the node in key-value - pairs. - in: body - required: true - type: object -node_processes: - description: | - The list of the processes performed by the node. - in: body - required: true - type: array -object_is_protected: - description: | - If set to ``true``, the object is protected. - in: body - required: true - type: boolean -object_is_public: - description: | - If set to ``true``, the object is public. - in: body - required: true - type: boolean -object_shares: - description: | - The sharing of resources in the cluster. - in: body - required: true - type: string -oozie_job_id: - description: | - The UUID of the ``oozie_job``. - in: body - required: true - type: string -output_id: - description: | - The UUID of the output of job execution object. - in: body - required: true - type: string -params: - description: | - The mappings of values to the parameters. - in: body - required: true - type: object -plugin_name: - description: | - The name of the plugin. - in: body - required: true - type: string -plugins: - description: | - The list of plugins. - in: body - required: true - type: array -prev: - description: | - The marker of previous page. May be ``null`` if - previous page is first or if current page is first. - in: body - required: false - type: string -progress: - description: | - A progress indicator, as a percentage value, for - the amount of image content that has been processed. - in: body - required: true - type: integer -project_id: - description: | - The UUID of the project. - in: body - required: true - type: string -provision_progress: - description: | - A list of the cluster progresses. - in: body - required: true - type: array -return_code: - description: | - The code returned after job has executed. - in: body - required: true - type: string -security_groups: - description: | - The security groups of the node. - in: body - required: true - type: string -shares: - description: | - The shares of the cluster. - in: body - required: true - type: string -start_time: - description: | - The date and time when the job started. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -status: - description: | - The status of the cluster. - in: body - required: true - type: string -status_1: - description: | - The current status of the image. - in: body - required: true - type: string -status_description: - description: | - The description of the cluster status. - in: body - required: true - type: string -tags: - description: | - List of tags to add. - in: body - required: true - type: array -tags_1: - description: | - Lists images only with specific tag. Can be used - multiple times. - in: body - required: false - type: string -tags_2: - description: | - One or more image tags. - in: body - required: true - type: array -tags_3: - description: | - List of tags to remove. - in: body - required: true - type: array -tenant_id: - description: | - The UUID of the tenant. - in: body - required: true - type: string -title: - description: | - The title of the plugin. - in: body - required: true - type: string -trust_id: - description: | - The id of the trust. - in: body - required: true - type: integer -type: - description: | - The type of the data source object. - in: body - required: true - type: string -type_1: - description: | - The type of the job object. - in: body - required: true - type: string -updated: - description: | - The date and time when the image was updated. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - For example, ``2015-08-27T09:49:58-05:00``. - - The ``±hh:mm`` value, if included, is the time zone as an offset - from UTC. - in: body - required: true - type: string -updated_at: - description: | - The date and time when the cluster was updated. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -updated_at_1: - description: | - The date and time when the object was updated. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -updated_at_2: - description: | - The date and time when the node was updated. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -updated_at_3: - description: | - The date and time when the job execution object was updated. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -url: - description: | - The url of the data source object. - in: body - required: true - type: string -url_1: - description: | - The url of the job binary object. - in: body - required: true - type: string -use_autoconfig: - description: | - If set to ``true``, the cluster is auto - configured. - in: body - required: true - type: boolean -use_autoconfig_1: - description: | - If set to ``true``, the node is auto configured. - in: body - required: true - type: boolean -username: - description: | - The name of the user for the image. - in: body - required: true - type: string -username_1: - description: | - The user name to log in to an instance operating - system for remote operations execution. - in: body - required: true - type: string -versions: - description: | - The list of plugin versions. - in: body - required: true - type: array -volume_local_to_instance: - description: | - If set to ``true``, the volume is local to the - instance. - in: body - required: true - type: boolean -volume_mount_prefix: - description: | - The mount point of the node. - in: body - required: true - type: string -volume_type: - description: | - The type of volume in a node. - in: body - required: true - type: string -volumes_availability_zone: - description: | - The availability zone of the volumes. - in: body - required: true - type: string -volumes_per_node: - description: | - The number of volumes for the node. - in: body - required: true - type: integer -volumes_size: - description: | - The size of the volumes in a node. - in: body - required: true - type: integer - diff --git a/api-ref/source/v1.1/plugins.inc b/api-ref/source/v1.1/plugins.inc deleted file mode 100644 index 0708b70fb7..0000000000 --- a/api-ref/source/v1.1/plugins.inc +++ /dev/null @@ -1,187 +0,0 @@ -.. -*- rst -*- - -======= -Plugins -======= - -A plugin object defines the Hadoop or Spark version that it can -install and which configurations can be set for the cluster. - - -Show plugin details -=================== - -.. rest_method:: GET /v1.1/{project_id}/plugins/{plugin_name} - -Shows details for a plugin. - - -Normal response codes: 200 -Error response codes: 400, 500 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - plugin_name: url_plugin_name - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - versions: versions - - title: title - - description: description_plugin - - name: plugin_name - - - -Response Example ----------------- - -.. literalinclude:: samples/plugins/plugin-show-response.json - :language: javascript - - - - -List plugins -============ - -.. rest_method:: GET /v1.1/{project_id}/plugins - -Lists all registered plugins. - - -Normal response codes: 200 -Error response codes: 400, 500 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - title: title - - versions: versions - - plugins: plugins - - description: description_plugin - - name: plugin_name - - - -Response Example ----------------- - -.. literalinclude:: samples/plugins/plugins-list-response.json - :language: javascript - - - - -Show plugin version details -=========================== - -.. rest_method:: GET /v1.1/{project_id}/plugins/{plugin_name}/{version} - -Shows details for a plugin version. - - -Normal response codes: 200 -Error response codes: 400, 500 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - plugin_name: url_plugin_name - - version: version - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - versions: versions - - title: title - - description: description_plugin - - name: plugin_name - - - -Response Example ----------------- - -.. literalinclude:: samples/plugins/plugin-version-show-response.json - :language: javascript - - - - -Update plugin details -===================== - -.. rest_method:: PATCH /v1.1/{project_id}/plugins/{plugin_name} - -Updates details for a plugin. - - -Normal response codes: 202 -Error response codes: 400, 500 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - project_id: url_project_id - - plugin_name: url_plugin_name - - -Request Example ---------------- - -.. literalinclude:: samples/plugins/plugin-update-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - title: title - - versions: versions - - description: description_plugin - - name: plugin_name - - -Response Example ----------------- - -.. literalinclude:: samples/plugins/plugin-update-response.json - :language: javascript - - - - diff --git a/api-ref/source/v1.1/samples/cluster-templates/cluster-template-create-request.json b/api-ref/source/v1.1/samples/cluster-templates/cluster-template-create-request.json deleted file mode 100644 index e7d9027f6a..0000000000 --- a/api-ref/source/v1.1/samples/cluster-templates/cluster-template-create-request.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "plugin_name": "vanilla", - "hadoop_version": "2.7.1", - "node_groups": [ - { - "name": "worker", - "count": 3, - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251" - }, - { - "name": "master", - "count": 1, - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae" - } - ], - "name": "cluster-template" -} diff --git a/api-ref/source/v1.1/samples/cluster-templates/cluster-template-create-response.json b/api-ref/source/v1.1/samples/cluster-templates/cluster-template-create-response.json deleted file mode 100644 index 0e24853bfb..0000000000 --- a/api-ref/source/v1.1/samples/cluster-templates/cluster-template-create-response.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "cluster_template": { - "is_public": false, - "anti_affinity": [], - "name": "cluster-template", - "created_at": "2015-09-14T10:38:44", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "cluster_configs": {}, - "shares": null, - "id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "default_image_id": null, - "is_default": false, - "updated_at": null, - "plugin_name": "vanilla", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "1751c04e-8f39-467e-a421-480961172d4b", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "3ee85068-c455-4391-9db2-b54a20b99df3", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "neutron_management_network": null, - "domain_name": null, - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "description": null, - "is_protected": false - } -} diff --git a/api-ref/source/v1.1/samples/cluster-templates/cluster-template-show-response.json b/api-ref/source/v1.1/samples/cluster-templates/cluster-template-show-response.json deleted file mode 100644 index 2c70d53e8a..0000000000 --- a/api-ref/source/v1.1/samples/cluster-templates/cluster-template-show-response.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "cluster_template": { - "is_public": false, - "anti_affinity": [], - "name": "cluster-template", - "created_at": "2015-09-14T10:38:44", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "cluster_configs": {}, - "shares": null, - "id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "default_image_id": null, - "is_default": false, - "updated_at": null, - "plugin_name": "vanilla", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "1751c04e-8f39-467e-a421-480961172d4b", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "3ee85068-c455-4391-9db2-b54a20b99df3", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "domain_name": null, - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "description": null, - "is_protected": false - } -} diff --git a/api-ref/source/v1.1/samples/cluster-templates/cluster-template-update-request.json b/api-ref/source/v1.1/samples/cluster-templates/cluster-template-update-request.json deleted file mode 100644 index 885150e607..0000000000 --- a/api-ref/source/v1.1/samples/cluster-templates/cluster-template-update-request.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "description": "Updated template", - "plugin_name": "vanilla", - "hadoop_version": "2.7.1", - "name": "vanilla-updated", - "cluster_configs": { - "HDFS": { - "dfs.replication": 2 - } - } -} diff --git a/api-ref/source/v1.1/samples/cluster-templates/cluster-template-update-response.json b/api-ref/source/v1.1/samples/cluster-templates/cluster-template-update-response.json deleted file mode 100644 index bc0800b0e6..0000000000 --- a/api-ref/source/v1.1/samples/cluster-templates/cluster-template-update-response.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "cluster_template": { - "is_public": false, - "anti_affinity": [], - "name": "vanilla-updated", - "created_at": "2015-08-21T08:41:24", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "cluster_configs": { - "HDFS": { - "dfs.replication": 2 - } - }, - "shares": null, - "id": "84d47e85-6094-473f-bf6d-5a7e6e86564e", - "default_image_id": null, - "is_default": false, - "updated_at": "2015-09-14T10:45:57", - "plugin_name": "vanilla", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": {}, - "JobFlow": {}, - "MapReduce": {}, - "Hive": {}, - "Hadoop": {}, - "HDFS": {} - }, - "auto_security_group": true, - "availability_zone": "", - "count": 1, - "flavor_id": "3", - "id": "57b966ab-617e-4735-bf60-0cb991208a52", - "security_groups": [], - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-08-21T08:41:24", - "node_group_template_id": "a5533187-3f14-42c3-ba3a-196c13fe0fb5", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "all", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "datanode", - "historyserver", - "resourcemanager", - "nodemanager", - "oozie" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "neutron_management_network": null, - "domain_name": null, - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "description": "Updated template", - "is_protected": false - } -} diff --git a/api-ref/source/v1.1/samples/cluster-templates/cluster-templates-list-response.json b/api-ref/source/v1.1/samples/cluster-templates/cluster-templates-list-response.json deleted file mode 100644 index a5ebbf7b88..0000000000 --- a/api-ref/source/v1.1/samples/cluster-templates/cluster-templates-list-response.json +++ /dev/null @@ -1,140 +0,0 @@ -{ - "cluster_templates": [ - { - "is_public": false, - "anti_affinity": [], - "name": "cluster-template", - "created_at": "2015-09-14T10:38:44", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "cluster_configs": {}, - "shares": null, - "id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "default_image_id": null, - "is_default": false, - "updated_at": null, - "plugin_name": "vanilla", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "1751c04e-8f39-467e-a421-480961172d4b", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "3ee85068-c455-4391-9db2-b54a20b99df3", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "domain_name": null, - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "description": null, - "is_protected": false - }, - { - "is_public": true, - "anti_affinity": [], - "name": "asd", - "created_at": "2015-08-18T08:39:39", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "cluster_configs": { - "general": {} - }, - "shares": null, - "id": "5a9c787c-2078-4f7d-9a66-27759be9051b", - "default_image_id": null, - "is_default": false, - "updated_at": "2015-09-14T08:41:15", - "plugin_name": "vanilla", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": true, - "availability_zone": "", - "count": 1, - "flavor_id": "2", - "id": "a65864dd-3f99-4d29-a011-f7711cc23fa0", - "security_groups": [], - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-08-18T08:39:39", - "node_group_template_id": "42ce49de-1b8f-41d5-8f4a-244ec0826d92", - "updated_at": null, - "volumes_per_node": 1, - "is_proxy_gateway": false, - "name": "asd", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "jobtracker" - ], - "volumes_size": 10, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "neutron_management_network": null, - "domain_name": null, - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "description": "", - "is_protected": false - } - ], - "markers": { - "prev": null, - "next": "2c76e0d3-56cd-4d28-bb4f-4808e538c7b9" - } -} diff --git a/api-ref/source/v1.1/samples/clusters/cluster-create-request.json b/api-ref/source/v1.1/samples/clusters/cluster-create-request.json deleted file mode 100644 index c579a285cb..0000000000 --- a/api-ref/source/v1.1/samples/clusters/cluster-create-request.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "plugin_name": "vanilla", - "hadoop_version": "2.7.1", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "user_keypair_id": "test", - "name": "vanilla-cluster", - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd" -} diff --git a/api-ref/source/v1.1/samples/clusters/cluster-create-response.json b/api-ref/source/v1.1/samples/clusters/cluster-create-response.json deleted file mode 100644 index 992c22eaa0..0000000000 --- a/api-ref/source/v1.1/samples/clusters/cluster-create-response.json +++ /dev/null @@ -1,128 +0,0 @@ -{ - "cluster": { - "is_public": false, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "domain_name": null, - "status_description": "", - "plugin_name": "vanilla", - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "info": {}, - "user_keypair_id": "test", - "management_public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfe9ARO+t9CybtuC1+cusDTeQL7wos1+U2dKPlCUJvNUn0PcunGefqWI4MUZPY9yGmvRqfINy7/xRQCzL0AwgqzwcCXamcK8JCC80uH7j8Vxa4kJheG1jxMoz/FpDSdRnzNZ+m7H5rjOwAQANhL7KatGLyCPQg9fqOoaIyCZE/A3fztm/XjJMpWnuANpUZubZtISEfu4UZKVk/DPSlBrbTZkTOvEog1LwZCZoTt0rq6a7PJFzJJkq0YecRudu/f3tpXbNe/F84sd9PhOSqcrRbm72WzglyEE8PuS1kuWpEz8G+Y5/0tQxnoh6khj9mgflrdCFuvpdutFLH4eN5MFDh Generated-by-Sahara\n", - "id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "0fe07f2a-0275-4bc0-93b2-c3c1e48e2815", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "c7a3bea4-c898-446b-8c67-6d378d4c06c4", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "provision_progress": [], - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "trust_id": null, - "description": null, - "created_at": "2015-09-14T10:57:11", - "is_protected": false, - "updated_at": "2015-09-14T10:57:12", - "is_transient": false, - "cluster_configs": { - "HDFS": { - "dfs.replication": 3 - } - }, - "anti_affinity": [], - "name": "vanilla-cluster", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "status": "Validating" - } -} diff --git a/api-ref/source/v1.1/samples/clusters/cluster-scale-request.json b/api-ref/source/v1.1/samples/clusters/cluster-scale-request.json deleted file mode 100644 index 8b61d5ea0f..0000000000 --- a/api-ref/source/v1.1/samples/clusters/cluster-scale-request.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "add_node_groups": [ - { - "count": 1, - "name": "b-worker", - "node_group_template_id": "bc270ffe-a086-4eeb-9baa-2f5a73504622" - } - ], - "resize_node_groups": [ - { - "count": 4, - "name": "worker" - } - ] -} diff --git a/api-ref/source/v1.1/samples/clusters/cluster-scale-response.json b/api-ref/source/v1.1/samples/clusters/cluster-scale-response.json deleted file mode 100644 index fa33ae3639..0000000000 --- a/api-ref/source/v1.1/samples/clusters/cluster-scale-response.json +++ /dev/null @@ -1,370 +0,0 @@ -{ - "cluster": { - "info": { - "YARN": { - "Web UI": "http://172.18.168.115:8088", - "ResourceManager": "http://172.18.168.115:8032" - }, - "HDFS": { - "Web UI": "http://172.18.168.115:50070", - "NameNode": "hdfs://vanilla-cluster-master-0:9000" - }, - "MapReduce JobHistory Server": { - "Web UI": "http://172.18.168.115:19888" - }, - "JobFlow": { - "Oozie": "http://172.18.168.115:11000" - } - }, - "plugin_name": "vanilla", - "hadoop_version": "2.7.1", - "updated_at": "2015-09-14T11:01:15", - "name": "vanilla-cluster", - "id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "management_public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfe9ARO+t9CybtuC1+cusDTeQL7wos1+U2dKPlCUJvNUn0PcunGefqWI4MUZPY9yGmvRqfINy7/xRQCzL0AwgqzwcCXamcK8JCC80uH7j8Vxa4kJheG1jxMoz/FpDSdRnzNZ+m7H5rjOwAQANhL7KatGLyCPQg9fqOoaIyCZE/A3fztm/XjJMpWnuANpUZubZtISEfu4UZKVk/DPSlBrbTZkTOvEog1LwZCZoTt0rq6a7PJFzJJkq0YecRudu/f3tpXbNe/F84sd9PhOSqcrRbm72WzglyEE8PuS1kuWpEz8G+Y5/0tQxnoh6khj9mgflrdCFuvpdutFLH4eN5MFDh Generated-by-Sahara\n", - "trust_id": null, - "status_description": "", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "is_protected": false, - "is_transient": false, - "provision_progress": [ - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Create Heat stack", - "step_type": "Engine: create cluster", - "updated_at": "2015-09-14T10:57:38", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:57:18", - "id": "0a6d95f9-30f4-4434-823a-a38a7999a5af" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 4, - "successful": true, - "step_name": "Configure instances", - "step_type": "Engine: create cluster", - "updated_at": "2015-09-14T10:58:22", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:58:16", - "id": "29f2b587-c34c-4871-9ed9-9235b411cd9a" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Start the following process(es): Oozie", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T11:01:15", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T11:00:27", - "id": "36f1efde-90f9-41c1-b409-aa1cf9623e3e" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 4, - "successful": true, - "step_name": "Configure instances", - "step_type": "Plugin: configure cluster", - "updated_at": "2015-09-14T10:59:21", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:58:22", - "id": "602bcc27-3a2d-42c8-8aca-ebc475319c72" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Configure topology data", - "step_type": "Plugin: configure cluster", - "updated_at": "2015-09-14T10:59:37", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:59:21", - "id": "7e291df1-2d32-410d-ae89-33ab6f83cf17" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 3, - "successful": true, - "step_name": "Start the following process(es): DataNodes, NodeManagers", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T11:00:11", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T11:00:01", - "id": "8ab7933c-ad61-4a4f-88db-23ce78ee10f6" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Await DataNodes start up", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T11:00:21", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T11:00:11", - "id": "9c8dc016-8c5b-4e80-9857-80c41f6bd971" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Start the following process(es): HistoryServer", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T11:00:27", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T11:00:21", - "id": "c6327532-222b-416c-858f-73dbb32b8e97" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 4, - "successful": true, - "step_name": "Wait for instance accessibility", - "step_type": "Engine: create cluster", - "updated_at": "2015-09-14T10:58:14", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:57:41", - "id": "d3eca726-8b44-473a-ac29-fba45a893725" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 0, - "successful": true, - "step_name": "Mount volumes to instances", - "step_type": "Engine: create cluster", - "updated_at": "2015-09-14T10:58:15", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:58:14", - "id": "d7a875ff-64bf-41aa-882d-b5061c8ee152" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Start the following process(es): ResourceManager", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T11:00:00", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:59:55", - "id": "ded7d227-10b8-4cb0-ab6c-25da1462bb7a" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Start the following process(es): NameNode", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T10:59:54", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:59:38", - "id": "e1701ff5-930a-4212-945a-43515dfe24d1" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 4, - "successful": true, - "step_name": "Assign IPs", - "step_type": "Engine: create cluster", - "updated_at": "2015-09-14T10:57:41", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:57:38", - "id": "eaf0ab1b-bf8f-48f0-8f2c-fa4f82f539b9" - } - ], - "status": "Active", - "description": null, - "use_autoconfig": true, - "shares": null, - "domain_name": null, - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "is_public": false, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "node_groups": [ - { - "volumes_per_node": 0, - "volume_type": null, - "updated_at": "2015-09-14T10:57:37", - "name": "b-worker", - "id": "b7a6dea4-c898-446b-8c67-4f378d4c06c4", - "node_group_template_id": "bc270ffe-a086-4eeb-9baa-2f5a73504622", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048, - "yarn.scheduler.maximum-allocation-mb": 2048 - }, - "MapReduce": { - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "yarn.app.mapreduce.am.resource.mb": 256 - } - }, - "auto_security_group": false, - "volumes_availability_zone": null, - "use_autoconfig": true, - "security_groups": null, - "shares": null, - "node_processes": [ - "datanode", - "nodemanager" - ], - "availability_zone": null, - "flavor_id": "2", - "image_id": null, - "volume_local_to_instance": false, - "count": 1, - "volumes_size": 0, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "volume_mount_prefix": "/volumes/disk", - "instances": [], - "is_proxy_gateway": false, - "created_at": "2015-09-14T10:57:11" - }, - { - "volumes_per_node": 0, - "volume_type": null, - "updated_at": "2015-09-14T10:57:36", - "name": "master", - "id": "0fe07f2a-0275-4bc0-93b2-c3c1e48e2815", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048, - "yarn.scheduler.maximum-allocation-mb": 2048 - }, - "MapReduce": { - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "yarn.app.mapreduce.am.resource.mb": 256 - } - }, - "auto_security_group": false, - "volumes_availability_zone": null, - "use_autoconfig": true, - "security_groups": null, - "shares": null, - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "availability_zone": null, - "flavor_id": "2", - "image_id": null, - "volume_local_to_instance": false, - "count": 1, - "volumes_size": 0, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "volume_mount_prefix": "/volumes/disk", - "instances": [ - { - "instance_id": "b9f16a07-88fc-423e-83a3-489598fe6737", - "internal_ip": "10.50.0.60", - "instance_name": "vanilla-cluster-master-0", - "updated_at": "2015-09-14T10:57:39", - "management_ip": "172.18.168.115", - "created_at": "2015-09-14T10:57:36", - "id": "4867d92e-cc7b-4cde-9a1a-149e91caa491" - } - ], - "is_proxy_gateway": false, - "created_at": "2015-09-14T10:57:11" - }, - { - "volumes_per_node": 0, - "volume_type": null, - "updated_at": "2015-09-14T10:57:37", - "name": "worker", - "id": "c7a3bea4-c898-446b-8c67-6d378d4c06c4", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048, - "yarn.scheduler.maximum-allocation-mb": 2048 - }, - "MapReduce": { - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "yarn.app.mapreduce.am.resource.mb": 256 - } - }, - "auto_security_group": false, - "volumes_availability_zone": null, - "use_autoconfig": true, - "security_groups": null, - "shares": null, - "node_processes": [ - "datanode", - "nodemanager" - ], - "availability_zone": null, - "flavor_id": "2", - "image_id": null, - "volume_local_to_instance": false, - "count": 4, - "volumes_size": 0, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "volume_mount_prefix": "/volumes/disk", - "instances": [ - { - "instance_id": "0cf1ee81-aa72-48da-be2c-65bc2fa51f8f", - "internal_ip": "10.50.0.63", - "instance_name": "vanilla-cluster-worker-0", - "updated_at": "2015-09-14T10:57:39", - "management_ip": "172.18.168.118", - "created_at": "2015-09-14T10:57:37", - "id": "f3633b30-c1e4-4144-930b-ab5b780b87be" - }, - { - "instance_id": "4a937391-b594-4ad0-9a53-00a99a691383", - "internal_ip": "10.50.0.62", - "instance_name": "vanilla-cluster-worker-1", - "updated_at": "2015-09-14T10:57:40", - "management_ip": "172.18.168.117", - "created_at": "2015-09-14T10:57:37", - "id": "0d66fd93-f277-4a94-b46a-f5866aa0c38f" - }, - { - "instance_id": "839b1d56-6d0d-4aa4-9d05-30e029c276f8", - "internal_ip": "10.50.0.61", - "instance_name": "vanilla-cluster-worker-2", - "updated_at": "2015-09-14T10:57:40", - "management_ip": "172.18.168.116", - "created_at": "2015-09-14T10:57:37", - "id": "0982cefd-5c58-436e-8f1e-c1d0830f18a7" - } - ], - "is_proxy_gateway": false, - "created_at": "2015-09-14T10:57:11" - } - ], - "cluster_configs": { - "HDFS": { - "dfs.replication": 3 - } - }, - "user_keypair_id": "apavlov", - "anti_affinity": [], - "created_at": "2015-09-14T10:57:11" - } -} diff --git a/api-ref/source/v1.1/samples/clusters/cluster-show-response.json b/api-ref/source/v1.1/samples/clusters/cluster-show-response.json deleted file mode 100644 index 992c22eaa0..0000000000 --- a/api-ref/source/v1.1/samples/clusters/cluster-show-response.json +++ /dev/null @@ -1,128 +0,0 @@ -{ - "cluster": { - "is_public": false, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "domain_name": null, - "status_description": "", - "plugin_name": "vanilla", - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "info": {}, - "user_keypair_id": "test", - "management_public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfe9ARO+t9CybtuC1+cusDTeQL7wos1+U2dKPlCUJvNUn0PcunGefqWI4MUZPY9yGmvRqfINy7/xRQCzL0AwgqzwcCXamcK8JCC80uH7j8Vxa4kJheG1jxMoz/FpDSdRnzNZ+m7H5rjOwAQANhL7KatGLyCPQg9fqOoaIyCZE/A3fztm/XjJMpWnuANpUZubZtISEfu4UZKVk/DPSlBrbTZkTOvEog1LwZCZoTt0rq6a7PJFzJJkq0YecRudu/f3tpXbNe/F84sd9PhOSqcrRbm72WzglyEE8PuS1kuWpEz8G+Y5/0tQxnoh6khj9mgflrdCFuvpdutFLH4eN5MFDh Generated-by-Sahara\n", - "id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "0fe07f2a-0275-4bc0-93b2-c3c1e48e2815", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "c7a3bea4-c898-446b-8c67-6d378d4c06c4", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "provision_progress": [], - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "trust_id": null, - "description": null, - "created_at": "2015-09-14T10:57:11", - "is_protected": false, - "updated_at": "2015-09-14T10:57:12", - "is_transient": false, - "cluster_configs": { - "HDFS": { - "dfs.replication": 3 - } - }, - "anti_affinity": [], - "name": "vanilla-cluster", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "status": "Validating" - } -} diff --git a/api-ref/source/v1.1/samples/clusters/cluster-update-request.json b/api-ref/source/v1.1/samples/clusters/cluster-update-request.json deleted file mode 100644 index ab01348afa..0000000000 --- a/api-ref/source/v1.1/samples/clusters/cluster-update-request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "public-vanilla-cluster", - "is_public": true -} diff --git a/api-ref/source/v1.1/samples/clusters/cluster-update-response.json b/api-ref/source/v1.1/samples/clusters/cluster-update-response.json deleted file mode 100644 index 4dae13f4c9..0000000000 --- a/api-ref/source/v1.1/samples/clusters/cluster-update-response.json +++ /dev/null @@ -1,128 +0,0 @@ -{ - "cluster": { - "is_public": true, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "domain_name": null, - "status_description": "", - "plugin_name": "vanilla", - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "info": {}, - "user_keypair_id": "test", - "management_public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfe9ARO+t9CybtuC1+cusDTeQL7wos1+U2dKPlCUJvNUn0PcunGefqWI4MUZPY9yGmvRqfINy7/xRQCzL0AwgqzwcCXamcK8JCC80uH7j8Vxa4kJheG1jxMoz/FpDSdRnzNZ+m7H5rjOwAQANhL7KatGLyCPQg9fqOoaIyCZE/A3fztm/XjJMpWnuANpUZubZtISEfu4UZKVk/DPSlBrbTZkTOvEog1LwZCZoTt0rq6a7PJFzJJkq0YecRudu/f3tpXbNe/F84sd9PhOSqcrRbm72WzglyEE8PuS1kuWpEz8G+Y5/0tQxnoh6khj9mgflrdCFuvpdutFLH4eN5MFDh Generated-by-Sahara\n", - "id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "0fe07f2a-0275-4bc0-93b2-c3c1e48e2815", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "c7a3bea4-c898-446b-8c67-6d378d4c06c4", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "provision_progress": [], - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "trust_id": null, - "description": null, - "created_at": "2015-09-14T10:57:11", - "is_protected": false, - "updated_at": "2015-09-14T10:57:12", - "is_transient": false, - "cluster_configs": { - "HDFS": { - "dfs.replication": 3 - } - }, - "anti_affinity": [], - "name": "public-vanilla-cluster", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "status": "Validating" - } -} diff --git a/api-ref/source/v1.1/samples/clusters/clusters-list-response.json b/api-ref/source/v1.1/samples/clusters/clusters-list-response.json deleted file mode 100644 index 13d9d0650c..0000000000 --- a/api-ref/source/v1.1/samples/clusters/clusters-list-response.json +++ /dev/null @@ -1,327 +0,0 @@ -{ - "clusters": [ - { - "is_public": false, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "domain_name": null, - "status_description": "", - "plugin_name": "vanilla", - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "info": { - "YARN": { - "Web UI": "http://172.18.168.115:8088", - "ResourceManager": "http://172.18.168.115:8032" - }, - "HDFS": { - "Web UI": "http://172.18.168.115:50070", - "NameNode": "hdfs://vanilla-cluster-master-0:9000" - }, - "JobFlow": { - "Oozie": "http://172.18.168.115:11000" - }, - "MapReduce JobHistory Server": { - "Web UI": "http://172.18.168.115:19888" - } - }, - "user_keypair_id": "apavlov", - "management_public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfe9ARO+t9CybtuC1+cusDTeQL7wos1+U2dKPlCUJvNUn0PcunGefqWI4MUZPY9yGmvRqfINy7/xRQCzL0AwgqzwcCXamcK8JCC80uH7j8Vxa4kJheG1jxMoz/FpDSdRnzNZ+m7H5rjOwAQANhL7KatGLyCPQg9fqOoaIyCZE/A3fztm/XjJMpWnuANpUZubZtISEfu4UZKVk/DPSlBrbTZkTOvEog1LwZCZoTt0rq6a7PJFzJJkq0YecRudu/f3tpXbNe/F84sd9PhOSqcrRbm72WzglyEE8PuS1kuWpEz8G+Y5/0tQxnoh6khj9mgflrdCFuvpdutFLH4eN5MFDh Generated-by-Sahara\n", - "id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "0fe07f2a-0275-4bc0-93b2-c3c1e48e2815", - "security_groups": null, - "use_autoconfig": true, - "instances": [ - { - "created_at": "2015-09-14T10:57:36", - "id": "4867d92e-cc7b-4cde-9a1a-149e91caa491", - "management_ip": "172.18.168.115", - "updated_at": "2015-09-14T10:57:39", - "instance_id": "b9f16a07-88fc-423e-83a3-489598fe6737", - "internal_ip": "10.50.0.60", - "instance_name": "vanilla-cluster-master-0" - } - ], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": "2015-09-14T10:57:36", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "c7a3bea4-c898-446b-8c67-6d378d4c06c4", - "security_groups": null, - "use_autoconfig": true, - "instances": [ - { - "created_at": "2015-09-14T10:57:37", - "id": "f3633b30-c1e4-4144-930b-ab5b780b87be", - "management_ip": "172.18.168.118", - "updated_at": "2015-09-14T10:57:39", - "instance_id": "0cf1ee81-aa72-48da-be2c-65bc2fa51f8f", - "internal_ip": "10.50.0.63", - "instance_name": "vanilla-cluster-worker-0" - }, - { - "created_at": "2015-09-14T10:57:37", - "id": "0d66fd93-f277-4a94-b46a-f5866aa0c38f", - "management_ip": "172.18.168.117", - "updated_at": "2015-09-14T10:57:40", - "instance_id": "4a937391-b594-4ad0-9a53-00a99a691383", - "internal_ip": "10.50.0.62", - "instance_name": "vanilla-cluster-worker-1" - }, - { - "created_at": "2015-09-14T10:57:37", - "id": "0982cefd-5c58-436e-8f1e-c1d0830f18a7", - "management_ip": "172.18.168.116", - "updated_at": "2015-09-14T10:57:40", - "instance_id": "839b1d56-6d0d-4aa4-9d05-30e029c276f8", - "internal_ip": "10.50.0.61", - "instance_name": "vanilla-cluster-worker-2" - } - ], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": "2015-09-14T10:57:37", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "provision_progress": [ - { - "created_at": "2015-09-14T10:57:18", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "0a6d95f9-30f4-4434-823a-a38a7999a5af", - "step_type": "Engine: create cluster", - "step_name": "Create Heat stack", - "updated_at": "2015-09-14T10:57:38", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:58:16", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "29f2b587-c34c-4871-9ed9-9235b411cd9a", - "step_type": "Engine: create cluster", - "step_name": "Configure instances", - "updated_at": "2015-09-14T10:58:22", - "successful": true, - "total": 4, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T11:00:27", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "36f1efde-90f9-41c1-b409-aa1cf9623e3e", - "step_type": "Plugin: start cluster", - "step_name": "Start the following process(es): Oozie", - "updated_at": "2015-09-14T11:01:15", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:58:22", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "602bcc27-3a2d-42c8-8aca-ebc475319c72", - "step_type": "Plugin: configure cluster", - "step_name": "Configure instances", - "updated_at": "2015-09-14T10:59:21", - "successful": true, - "total": 4, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:59:21", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "7e291df1-2d32-410d-ae89-33ab6f83cf17", - "step_type": "Plugin: configure cluster", - "step_name": "Configure topology data", - "updated_at": "2015-09-14T10:59:37", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T11:00:01", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "8ab7933c-ad61-4a4f-88db-23ce78ee10f6", - "step_type": "Plugin: start cluster", - "step_name": "Start the following process(es): DataNodes, NodeManagers", - "updated_at": "2015-09-14T11:00:11", - "successful": true, - "total": 3, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T11:00:11", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "9c8dc016-8c5b-4e80-9857-80c41f6bd971", - "step_type": "Plugin: start cluster", - "step_name": "Await DataNodes start up", - "updated_at": "2015-09-14T11:00:21", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T11:00:21", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "c6327532-222b-416c-858f-73dbb32b8e97", - "step_type": "Plugin: start cluster", - "step_name": "Start the following process(es): HistoryServer", - "updated_at": "2015-09-14T11:00:27", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:57:41", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "d3eca726-8b44-473a-ac29-fba45a893725", - "step_type": "Engine: create cluster", - "step_name": "Wait for instance accessibility", - "updated_at": "2015-09-14T10:58:14", - "successful": true, - "total": 4, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:58:14", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "d7a875ff-64bf-41aa-882d-b5061c8ee152", - "step_type": "Engine: create cluster", - "step_name": "Mount volumes to instances", - "updated_at": "2015-09-14T10:58:15", - "successful": true, - "total": 0, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:59:55", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "ded7d227-10b8-4cb0-ab6c-25da1462bb7a", - "step_type": "Plugin: start cluster", - "step_name": "Start the following process(es): ResourceManager", - "updated_at": "2015-09-14T11:00:00", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:59:38", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "e1701ff5-930a-4212-945a-43515dfe24d1", - "step_type": "Plugin: start cluster", - "step_name": "Start the following process(es): NameNode", - "updated_at": "2015-09-14T10:59:54", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:57:38", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "id": "eaf0ab1b-bf8f-48f0-8f2c-fa4f82f539b9", - "step_type": "Engine: create cluster", - "step_name": "Assign IPs", - "updated_at": "2015-09-14T10:57:41", - "successful": true, - "total": 4, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - } - ], - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "trust_id": null, - "description": null, - "created_at": "2015-09-14T10:57:11", - "is_protected": false, - "updated_at": "2015-09-14T11:01:15", - "is_transient": false, - "cluster_configs": { - "HDFS": { - "dfs.replication": 3 - } - }, - "anti_affinity": [], - "name": "vanilla-cluster", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "status": "Active" - } - ] -} diff --git a/api-ref/source/v1.1/samples/clusters/multiple-clusters-create-request.json b/api-ref/source/v1.1/samples/clusters/multiple-clusters-create-request.json deleted file mode 100644 index d5f0d29e87..0000000000 --- a/api-ref/source/v1.1/samples/clusters/multiple-clusters-create-request.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "plugin_name": "vanilla", - "hadoop_version": "2.6.0", - "cluster_template_id": "9951f86d-57ba-43d6-9cb0-14ed2ec7a6cf", - "default_image_id": "bc3c3d3c-2684-4bf8-a9fa-388fb71288a9", - "user_keypair_id": "test", - "name": "def-cluster", - "count": 2, - "cluster_configs": {}, - "neutron_management_network": "7e31648b-4b2e-4f32-9b0a-113581c27076" -} diff --git a/api-ref/source/v1.1/samples/clusters/multiple-clusters-create-response.json b/api-ref/source/v1.1/samples/clusters/multiple-clusters-create-response.json deleted file mode 100644 index 5b13bca55d..0000000000 --- a/api-ref/source/v1.1/samples/clusters/multiple-clusters-create-response.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "clusters": [ - "a007a3e7-658f-4568-b0f2-fe2fd5efc554", - "b012a6et-65hf-4566-b0f2-fe3fd7efc567" - ] -} diff --git a/api-ref/source/v1.1/samples/data-sources/data-source-register-hdfs-request.json b/api-ref/source/v1.1/samples/data-sources/data-source-register-hdfs-request.json deleted file mode 100644 index 9d9c9c945c..0000000000 --- a/api-ref/source/v1.1/samples/data-sources/data-source-register-hdfs-request.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "description": "This is hdfs input", - "url": "hdfs://test-master-node:8020/user/hadoop/input", - "type": "hdfs", - "name": "hdfs_input" -} diff --git a/api-ref/source/v1.1/samples/data-sources/data-source-register-hdfs-response.json b/api-ref/source/v1.1/samples/data-sources/data-source-register-hdfs-response.json deleted file mode 100644 index 6d637044f9..0000000000 --- a/api-ref/source/v1.1/samples/data-sources/data-source-register-hdfs-response.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "data_source": { - "is_public": false, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-03-26 11:09:36.148464", - "id": "d7fffe9c-3b42-46a9-8be8-e98f586fa7a9", - "updated_at": null, - "name": "hdfs_input", - "description": "This is hdfs input", - "url": "hdfs://test-master-node:8020/user/hadoop/input", - "type": "hdfs" - } -} diff --git a/api-ref/source/v1.1/samples/data-sources/data-source-register-swift-request.json b/api-ref/source/v1.1/samples/data-sources/data-source-register-swift-request.json deleted file mode 100644 index 30a1e535dd..0000000000 --- a/api-ref/source/v1.1/samples/data-sources/data-source-register-swift-request.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "description": "This is input", - "url": "swift://container/text", - "credentials": { - "password": "swordfish", - "user": "dev" - }, - "type": "swift", - "name": "swift_input" -} diff --git a/api-ref/source/v1.1/samples/data-sources/data-source-register-swift-response.json b/api-ref/source/v1.1/samples/data-sources/data-source-register-swift-response.json deleted file mode 100644 index 66a8c7bf0d..0000000000 --- a/api-ref/source/v1.1/samples/data-sources/data-source-register-swift-response.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "data_source": { - "is_public": false, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-03-26 11:18:10.691493", - "id": "953831f2-0852-49d8-ac71-af5805e25256", - "updated_at": null, - "name": "swift_input", - "description": "This is input", - "url": "swift://container/text", - "type": "swift" - } -} diff --git a/api-ref/source/v1.1/samples/data-sources/data-source-show-response.json b/api-ref/source/v1.1/samples/data-sources/data-source-show-response.json deleted file mode 100644 index 66a8c7bf0d..0000000000 --- a/api-ref/source/v1.1/samples/data-sources/data-source-show-response.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "data_source": { - "is_public": false, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-03-26 11:18:10.691493", - "id": "953831f2-0852-49d8-ac71-af5805e25256", - "updated_at": null, - "name": "swift_input", - "description": "This is input", - "url": "swift://container/text", - "type": "swift" - } -} diff --git a/api-ref/source/v1.1/samples/data-sources/data-source-update-request.json b/api-ref/source/v1.1/samples/data-sources/data-source-update-request.json deleted file mode 100644 index 8397ae6545..0000000000 --- a/api-ref/source/v1.1/samples/data-sources/data-source-update-request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "description": "This is public input", - "is_protected": true -} diff --git a/api-ref/source/v1.1/samples/data-sources/data-source-update-response.json b/api-ref/source/v1.1/samples/data-sources/data-source-update-response.json deleted file mode 100644 index d874ed1cd1..0000000000 --- a/api-ref/source/v1.1/samples/data-sources/data-source-update-response.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "data_source": { - "is_public": true, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-09-15 12:32:24.847493", - "id": "953831f2-0852-49d8-ac71-af5805e25256", - "updated_at": "2015-09-15 12:34:42.597435", - "name": "swift_input", - "description": "This is public input", - "url": "swift://container/text", - "type": "swift" - } -} diff --git a/api-ref/source/v1.1/samples/data-sources/data-sources-list-response.json b/api-ref/source/v1.1/samples/data-sources/data-sources-list-response.json deleted file mode 100644 index 724542ec7c..0000000000 --- a/api-ref/source/v1.1/samples/data-sources/data-sources-list-response.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "data_sources": [ - { - "is_public": false, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-03-26 11:18:10", - "id": "953831f2-0852-49d8-ac71-af5805e25256", - "name": "swift_input", - "updated_at": null, - "description": "This is input", - "url": "swift://container/text", - "type": "swift" - }, - { - "is_public": false, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-03-26 11:09:36", - "id": "d7fffe9c-3b42-46a9-8be8-e98f586fa7a9", - "name": "hdfs_input", - "updated_at": null, - "description": "This is hdfs input", - "url": "hdfs://test-master-node:8020/user/hadoop/input", - "type": "hdfs" - } - ] -} diff --git a/api-ref/source/v1.1/samples/event-log/cluster-progress-response.json b/api-ref/source/v1.1/samples/event-log/cluster-progress-response.json deleted file mode 100644 index f1923093e3..0000000000 --- a/api-ref/source/v1.1/samples/event-log/cluster-progress-response.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "status": "Error", - "neutron_management_network": "7e31648b-4b2e-4f32-9b0a-113581c27076", - "is_transient": false, - "description": "", - "user_keypair_id": "vgridnev", - "updated_at": "2015-03-31 14:10:59", - "plugin_name": "spark", - "provision_progress": [ - { - "successful": false, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-03-31 14:10:20", - "step_type": "Engine: create cluster", - "updated_at": "2015-03-31 14:10:35", - "events": [ - { - "instance_name": "sample-worker-spark-004", - "successful": false, - "created_at": "2015-03-31 14:10:35", - "updated_at": null, - "event_info": "Node sample-worker-spark-004 has error status\nError ID: 3e238c82-d1f5-4560-8ed8-691e923e16a0", - "instance_id": "b5ba5ba8-e9c1-47f7-9355-3ce0ec0e449d", - "node_group_id": "145cf2fb-dcdf-42af-a4b9-a4047d2919d4", - "step_id": "3f243c67-2c27-47c7-a0c0-0834ad17f8b6", - "id": "34afcfc7-bdb0-43cb-b142-283d560dc6ad" - }, - { - "instance_name": "sample-worker-spark-001", - "successful": true, - "created_at": "2015-03-31 14:10:35", - "updated_at": null, - "event_info": null, - "instance_id": "c532ab71-38da-475a-95f8-f8eb93b8f1c2", - "node_group_id": "145cf2fb-dcdf-42af-a4b9-a4047d2919d4", - "step_id": "3f243c67-2c27-47c7-a0c0-0834ad17f8b6", - "id": "4ba50414-5216-4161-bc7a-12716122b99d" - } - ], - "cluster_id": "c26ec982-ba6b-4d75-818c-a50240164af0", - "step_name": "Wait for instances to become active", - "total": 5, - "id": "3f243c67-2c27-47c7-a0c0-0834ad17f8b6" - }, - { - "successful": true, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-03-31 14:10:12", - "step_type": "Engine: create cluster", - "updated_at": "2015-03-31 14:10:19", - "events": [], - "cluster_id": "c26ec982-ba6b-4d75-818c-a50240164af0", - "step_name": "Run instances", - "total": 5, - "id": "407ba50a-c799-46af-9dfb-6aa5f6ade426" - } - ], - "anti_affinity": [], - "node_groups": [], - "management_public_key": "Sahara", - "status_description": "Creating cluster failed for the following reason(s): Node sample-worker-spark-004 has error status\nError ID: 3e238c82-d1f5-4560-8ed8-691e923e16a0", - "hadoop_version": "1.0.0", - "id": "c26ec982-ba6b-4d75-1f8c-a50240164af0", - "trust_id": null, - "info": {}, - "cluster_template_id": "5a9a09a3-9349-43bd-9058-16c401fad2d5", - "name": "sample", - "cluster_configs": {}, - "created_at": "2015-03-31 14:10:07", - "default_image_id": "e6a6c5da-67be-4017-a7d2-81f466efe67e", - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda" -} diff --git a/api-ref/source/v1.1/samples/image-registry/image-register-request.json b/api-ref/source/v1.1/samples/image-registry/image-register-request.json deleted file mode 100644 index 7bd4d15efd..0000000000 --- a/api-ref/source/v1.1/samples/image-registry/image-register-request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "username": "ubuntu", - "description": "Ubuntu image for Hadoop 2.7.1" -} diff --git a/api-ref/source/v1.1/samples/image-registry/image-register-response.json b/api-ref/source/v1.1/samples/image-registry/image-register-response.json deleted file mode 100644 index 5851a58ec2..0000000000 --- a/api-ref/source/v1.1/samples/image-registry/image-register-response.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "image": { - "updated": "2015-03-24T10:05:10Z", - "metadata": { - "_sahara_description": "Ubuntu image for Hadoop 2.7.1", - "_sahara_username": "ubuntu", - "_sahara_tag_vanilla": true, - "_sahara_tag_2.7.1": true - }, - "id": "bb8d12b5-f9bb-49f0-aecb-739b8a9bec89", - "minDisk": 0, - "status": "ACTIVE", - "tags": [ - "vanilla", - "2.7.1" - ], - "minRam": 0, - "progress": 100, - "username": "ubuntu", - "created": "2015-02-03T10:28:39Z", - "name": "sahara-vanilla-2.7.1-ubuntu-14.04", - "description": "Ubuntu image for Hadoop 2.7.1", - "OS-EXT-IMG-SIZE:size": 1101856768 - } -} diff --git a/api-ref/source/v1.1/samples/image-registry/image-show-response.json b/api-ref/source/v1.1/samples/image-registry/image-show-response.json deleted file mode 100644 index 0f09f23f56..0000000000 --- a/api-ref/source/v1.1/samples/image-registry/image-show-response.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "image": { - "updated": "2015-02-03T10:29:32Z", - "metadata": { - "_sahara_username": "ubuntu", - "_sahara_tag_vanilla": true, - "_sahara_tag_2.6.0": true - }, - "id": "bb8d12b5-f9bb-49f0-aecb-739b8a9bec89", - "minDisk": 0, - "status": "ACTIVE", - "tags": [ - "vanilla", - "2.6.0" - ], - "minRam": 0, - "progress": 100, - "username": "ubuntu", - "created": "2015-02-03T10:28:39Z", - "name": "sahara-vanilla-2.6.0-ubuntu-14.04", - "description": null, - "OS-EXT-IMG-SIZE:size": 1101856768 - } -} diff --git a/api-ref/source/v1.1/samples/image-registry/image-tags-add-request.json b/api-ref/source/v1.1/samples/image-registry/image-tags-add-request.json deleted file mode 100644 index aa69662a6a..0000000000 --- a/api-ref/source/v1.1/samples/image-registry/image-tags-add-request.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "tags": [ - "vanilla", - "2.7.1", - "some_other_tag" - ] -} diff --git a/api-ref/source/v1.1/samples/image-registry/image-tags-add-response.json b/api-ref/source/v1.1/samples/image-registry/image-tags-add-response.json deleted file mode 100644 index 2c66b2930d..0000000000 --- a/api-ref/source/v1.1/samples/image-registry/image-tags-add-response.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "image": { - "updated": "2015-03-24T10:18:33Z", - "metadata": { - "_sahara_tag_vanilla": true, - "_sahara_description": "Ubuntu image for Hadoop 2.7.1", - "_sahara_username": "ubuntu", - "_sahara_tag_some_other_tag": true, - "_sahara_tag_2.7.1": true - }, - "id": "bb8d12b5-f9bb-49f0-aecb-739b8a9bec89", - "minDisk": 0, - "status": "ACTIVE", - "tags": [ - "vanilla", - "some_other_tag", - "2.7.1" - ], - "minRam": 0, - "progress": 100, - "username": "ubuntu", - "created": "2015-02-03T10:28:39Z", - "name": "sahara-vanilla-2.6.0-ubuntu-14.04", - "description": "Ubuntu image for Hadoop 2.7.1", - "OS-EXT-IMG-SIZE:size": 1101856768 - } -} diff --git a/api-ref/source/v1.1/samples/image-registry/image-tags-delete-request.json b/api-ref/source/v1.1/samples/image-registry/image-tags-delete-request.json deleted file mode 100644 index 44e1cef468..0000000000 --- a/api-ref/source/v1.1/samples/image-registry/image-tags-delete-request.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "tags": [ - "some_other_tag" - ] -} diff --git a/api-ref/source/v1.1/samples/image-registry/image-tags-delete-response.json b/api-ref/source/v1.1/samples/image-registry/image-tags-delete-response.json deleted file mode 100644 index 44eb131390..0000000000 --- a/api-ref/source/v1.1/samples/image-registry/image-tags-delete-response.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "image": { - "updated": "2015-03-24T10:19:28Z", - "metadata": { - "_sahara_description": "Ubuntu image for Hadoop 2.7.1", - "_sahara_username": "ubuntu", - "_sahara_tag_vanilla": true, - "_sahara_tag_2.7.1": true - }, - "id": "bb8d12b5-f9bb-49f0-aecb-739b8a9bec89", - "minDisk": 0, - "status": "ACTIVE", - "tags": [ - "vanilla", - "2.7.1" - ], - "minRam": 0, - "progress": 100, - "username": "ubuntu", - "created": "2015-02-03T10:28:39Z", - "name": "sahara-vanilla-2.7.1-ubuntu-14.04", - "description": "Ubuntu image for Hadoop 2.7.1", - "OS-EXT-IMG-SIZE:size": 1101856768 - } -} diff --git a/api-ref/source/v1.1/samples/image-registry/images-list-response.json b/api-ref/source/v1.1/samples/image-registry/images-list-response.json deleted file mode 100644 index d40f0c215f..0000000000 --- a/api-ref/source/v1.1/samples/image-registry/images-list-response.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "images": [ - { - "name": "ubuntu-vanilla-2.7.1", - "id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "created": "2015-08-06T08:17:14Z", - "metadata": { - "_sahara_tag_2.7.1": true, - "_sahara_username": "ubuntu", - "_sahara_tag_vanilla": true - }, - "username": "ubuntu", - "progress": 100, - "OS-EXT-IMG-SIZE:size": 998716928, - "status": "ACTIVE", - "minDisk": 0, - "tags": [ - "vanilla", - "2.7.1" - ], - "updated": "2015-09-04T09:35:09Z", - "minRam": 0, - "description": null - }, - { - "name": "cdh-latest", - "id": "ff74035b-9da7-4edf-981d-57f270ed337d", - "created": "2015-09-04T11:56:44Z", - "metadata": { - "_sahara_username": "ubuntu", - "_sahara_tag_5.4.0": true, - "_sahara_tag_cdh": true - }, - "username": "ubuntu", - "progress": 100, - "OS-EXT-IMG-SIZE:size": 3281453056, - "status": "ACTIVE", - "minDisk": 0, - "tags": [ - "5.4.0", - "cdh" - ], - "updated": "2015-09-04T12:46:42Z", - "minRam": 0, - "description": null - } - ] -} diff --git a/api-ref/source/v1.1/samples/job-binaries/create-request.json b/api-ref/source/v1.1/samples/job-binaries/create-request.json deleted file mode 100644 index f32e15b40f..0000000000 --- a/api-ref/source/v1.1/samples/job-binaries/create-request.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "url": "swift://container/jar-example.jar", - "name": "jar-example.jar", - "description": "This is a job binary", - "extra": { - "password": "swordfish", - "user": "admin" - } -} diff --git a/api-ref/source/v1.1/samples/job-binaries/create-response.json b/api-ref/source/v1.1/samples/job-binaries/create-response.json deleted file mode 100644 index feba49ef79..0000000000 --- a/api-ref/source/v1.1/samples/job-binaries/create-response.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "job_binary": { - "is_public": false, - "description": "This is a job binary", - "url": "swift://container/jar-example.jar", - "tenant_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 14:49:20.106452", - "id": "07f86352-ee8a-4b08-b737-d705ded5ff9c", - "updated_at": null, - "name": "jar-example.jar", - "is_protected": false - } -} diff --git a/api-ref/source/v1.1/samples/job-binaries/list-response.json b/api-ref/source/v1.1/samples/job-binaries/list-response.json deleted file mode 100644 index 3e5e5539a4..0000000000 --- a/api-ref/source/v1.1/samples/job-binaries/list-response.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "binaries": [ - { - "is_public": false, - "description": "", - "url": "internal-db://d2498cbf-4589-484a-a814-81436c18beb3", - "tenant_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 12:36:59.375060", - "updated_at": null, - "id": "84248975-3c82-4206-a58d-6e7fb3a563fd", - "name": "example.pig", - "is_protected": false - }, - { - "is_public": false, - "description": "", - "url": "internal-db://22f1d87a-23c8-483e-a0dd-cb4a16dde5f9", - "tenant_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 12:43:52.265899", - "updated_at": null, - "id": "508fc62d-1d58-4412-b603-bdab307bb926", - "name": "udf.jar", - "is_protected": false - }, - { - "is_public": false, - "description": "", - "url": "swift://container/jar-example.jar", - "tenant_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 14:25:04.970513", - "updated_at": null, - "id": "a716a9cd-9add-4b12-b1b6-cdb71aaef350", - "name": "jar-example.jar", - "is_protected": false - } - ] -} diff --git a/api-ref/source/v1.1/samples/job-binaries/show-data-response b/api-ref/source/v1.1/samples/job-binaries/show-data-response deleted file mode 100644 index 8765f0c6c5..0000000000 --- a/api-ref/source/v1.1/samples/job-binaries/show-data-response +++ /dev/null @@ -1,3 +0,0 @@ -A = load '$INPUT' using PigStorage(':') as (fruit: chararray); -B = foreach A generate com.hadoopbook.pig.Trim(fruit); -store B into '$OUTPUT' USING PigStorage(); \ No newline at end of file diff --git a/api-ref/source/v1.1/samples/job-binaries/show-response.json b/api-ref/source/v1.1/samples/job-binaries/show-response.json deleted file mode 100644 index a46f819067..0000000000 --- a/api-ref/source/v1.1/samples/job-binaries/show-response.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "job_binary": { - "is_public": false, - "description": "an example jar file", - "url": "swift://container/jar-example.jar", - "tenant_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 14:25:04.970513", - "updated_at": null, - "id": "a716a9cd-9add-4b12-b1b6-cdb71aaef350", - "name": "jar-example.jar", - "is_protected": false - } -} diff --git a/api-ref/source/v1.1/samples/job-binaries/update-request.json b/api-ref/source/v1.1/samples/job-binaries/update-request.json deleted file mode 100644 index 456b0b209c..0000000000 --- a/api-ref/source/v1.1/samples/job-binaries/update-request.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "url": "swift://container/new-jar-example.jar", - "name": "new-jar-example.jar", - "description": "This is a new job binary" -} diff --git a/api-ref/source/v1.1/samples/job-binaries/update-response.json b/api-ref/source/v1.1/samples/job-binaries/update-response.json deleted file mode 100644 index 947049e6eb..0000000000 --- a/api-ref/source/v1.1/samples/job-binaries/update-response.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "job_binary": { - "is_public": false, - "description": "This is a new job binary", - "url": "swift://container/new-jar-example.jar", - "tenant_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2015-09-15 12:42:51.421542", - "updated_at": null, - "id": "b713d7ad-4add-4f12-g1b6-cdg71aaef350", - "name": "new-jar-example.jar", - "is_protected": false - } -} diff --git a/api-ref/source/v1.1/samples/job-binary-internals/create-response.json b/api-ref/source/v1.1/samples/job-binary-internals/create-response.json deleted file mode 100644 index a8d23d58ce..0000000000 --- a/api-ref/source/v1.1/samples/job-binary-internals/create-response.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "job_binary_internal": { - "is_public": false, - "name": "script.pig", - "tenant_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 13:17:35.994466", - "updated_at": null, - "datasize": 160, - "id": "4833dc4b-8682-4d5b-8a9f-2036b47a0996", - "is_protected": false - } -} diff --git a/api-ref/source/v1.1/samples/job-binary-internals/list-response.json b/api-ref/source/v1.1/samples/job-binary-internals/list-response.json deleted file mode 100644 index d5a2909ab3..0000000000 --- a/api-ref/source/v1.1/samples/job-binary-internals/list-response.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "binaries": [ - { - "is_public": false, - "name": "example.pig", - "tenant_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 12:36:59.329034", - "updated_at": null, - "datasize": 161, - "id": "d2498cbf-4589-484a-a814-81436c18beb3", - "is_protected": false - }, - { - "is_public": false, - "name": "udf.jar", - "tenant_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 12:43:52.008620", - "updated_at": null, - "datasize": 3745, - "id": "22f1d87a-23c8-483e-a0dd-cb4a16dde5f9", - "is_protected": false - } - ] -} diff --git a/api-ref/source/v1.1/samples/job-binary-internals/show-data-response b/api-ref/source/v1.1/samples/job-binary-internals/show-data-response deleted file mode 100644 index 12df7a847a..0000000000 --- a/api-ref/source/v1.1/samples/job-binary-internals/show-data-response +++ /dev/null @@ -1,3 +0,0 @@ -A = load '$INPUT' using PigStorage(':') as (fruit: chararray); -B = foreach A generate com.hadoopbook.pig.Trim(fruit); -store B into '$OUTPUT' USING PigStorage() \ No newline at end of file diff --git a/api-ref/source/v1.1/samples/job-binary-internals/show-response.json b/api-ref/source/v1.1/samples/job-binary-internals/show-response.json deleted file mode 100644 index a8d23d58ce..0000000000 --- a/api-ref/source/v1.1/samples/job-binary-internals/show-response.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "job_binary_internal": { - "is_public": false, - "name": "script.pig", - "tenant_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 13:17:35.994466", - "updated_at": null, - "datasize": 160, - "id": "4833dc4b-8682-4d5b-8a9f-2036b47a0996", - "is_protected": false - } -} diff --git a/api-ref/source/v1.1/samples/job-binary-internals/update-request.json b/api-ref/source/v1.1/samples/job-binary-internals/update-request.json deleted file mode 100644 index d6e2920525..0000000000 --- a/api-ref/source/v1.1/samples/job-binary-internals/update-request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "public-jbi", - "is_public": true -} diff --git a/api-ref/source/v1.1/samples/job-binary-internals/update-response.json b/api-ref/source/v1.1/samples/job-binary-internals/update-response.json deleted file mode 100644 index 19fed48c5f..0000000000 --- a/api-ref/source/v1.1/samples/job-binary-internals/update-response.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "job_binary_internal": { - "is_public": true, - "name": "public-jbi", - "tenant_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2015-09-15 13:21:54.485912", - "updated_at": "2015-09-15 13:24:24.590124", - "datasize": 200, - "id": "2433dc4b-8682-4d5b-8a9f-2036d47a0996", - "is_protected": false - } -} diff --git a/api-ref/source/v1.1/samples/job-executions/cancel-response.json b/api-ref/source/v1.1/samples/job-executions/cancel-response.json deleted file mode 100644 index 251c746c6b..0000000000 --- a/api-ref/source/v1.1/samples/job-executions/cancel-response.json +++ /dev/null @@ -1,120 +0,0 @@ -{ - "job_execution": { - "job_configs": { - "configs": { - "mapred.reduce.tasks": "1", - "mapred.map.tasks": "1" - }, - "args": [ - "arg1", - "arg2" - ], - "params": { - "param2": "value2", - "param1": "value1" - } - }, - "is_protected": false, - "input_id": "3e1bc8e6-8c69-4749-8e52-90d9341d15bc", - "job_id": "310b0fc6-e1db-408e-8798-312e7500f3ac", - "cluster_id": "811e1134-666f-4c48-bc92-afb5b10c9d8c", - "created_at": "2015-09-15T09:49:24", - "end_time": "2015-09-15T12:50:46", - "output_id": "52146b52-6540-4aac-a024-fee253cf52a9", - "is_public": false, - "updated_at": "2015-09-15T09:50:46", - "return_code": null, - "data_source_urls": { - "3e1bc8e6-8c69-4749-8e52-90d9341d15bc": "swift://ap-cont/input", - "52146b52-6540-4aac-a024-fee253cf52a9": "swift://ap-cont/output" - }, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "start_time": "2015-09-15T12:49:43", - "id": "20da9edb-12ce-4b45-a473-41baeefef997", - "oozie_job_id": "0000001-150915094349962-oozie-hado-W", - "info": { - "user": "hadoop", - "actions": [ - { - "name": ":start:", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": "job-node", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "errorCode": null, - "id": "0000001-150915094349962-oozie-hado-W@:start:", - "consoleUrl": "-", - "errorMessage": null, - "toString": "Action name[:start:] status[OK]", - "stats": null, - "type": ":START:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "job-node", - "trackerUri": "http://172.18.168.119:8032", - "externalStatus": "FAILED/KILLED", - "status": "ERROR", - "externalId": "job_1442310173665_0002", - "transition": "fail", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "JA018", - "id": "0000001-150915094349962-oozie-hado-W@job-node", - "consoleUrl": "http://ap-cluster-all-0:8088/proxy/application_1442310173665_0002/", - "errorMessage": "Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]", - "toString": "Action name[job-node] status[ERROR]", - "stats": null, - "type": "pig", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "fail", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": null, - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "E0729", - "id": "0000001-150915094349962-oozie-hado-W@fail", - "consoleUrl": "-", - "errorMessage": "Workflow failed, error message[Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]]", - "toString": "Action name[fail] status[OK]", - "stats": null, - "type": ":KILL:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "externalChildIDs": null, - "cred": "null" - } - ], - "createdTime": "Tue, 15 Sep 2015 09:49:58 GMT", - "status": "KILLED", - "group": null, - "externalId": null, - "acl": null, - "run": 0, - "appName": "job-wf", - "parentId": null, - "conf": "\r\n \r\n user.name\r\n hadoop\r\n \r\n \r\n oozie.use.system.libpath\r\n true\r\n \r\n \r\n mapreduce.job.user.name\r\n hadoop\r\n \r\n \r\n nameNode\r\n hdfs://ap-cluster-all-0:9000\r\n \r\n \r\n jobTracker\r\n http://172.18.168.119:8032\r\n \r\n \r\n oozie.wf.application.path\r\n hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml\r\n \r\n", - "id": "0000001-150915094349962-oozie-hado-W", - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "appPath": "hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml", - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "toString": "Workflow id[0000001-150915094349962-oozie-hado-W] status[KILLED]", - "lastModTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "consoleUrl": "http://ap-cluster-all-0.novalocal:11000/oozie?job=0000001-150915094349962-oozie-hado-W" - } - } -} diff --git a/api-ref/source/v1.1/samples/job-executions/job-ex-response.json b/api-ref/source/v1.1/samples/job-executions/job-ex-response.json deleted file mode 100644 index 251c746c6b..0000000000 --- a/api-ref/source/v1.1/samples/job-executions/job-ex-response.json +++ /dev/null @@ -1,120 +0,0 @@ -{ - "job_execution": { - "job_configs": { - "configs": { - "mapred.reduce.tasks": "1", - "mapred.map.tasks": "1" - }, - "args": [ - "arg1", - "arg2" - ], - "params": { - "param2": "value2", - "param1": "value1" - } - }, - "is_protected": false, - "input_id": "3e1bc8e6-8c69-4749-8e52-90d9341d15bc", - "job_id": "310b0fc6-e1db-408e-8798-312e7500f3ac", - "cluster_id": "811e1134-666f-4c48-bc92-afb5b10c9d8c", - "created_at": "2015-09-15T09:49:24", - "end_time": "2015-09-15T12:50:46", - "output_id": "52146b52-6540-4aac-a024-fee253cf52a9", - "is_public": false, - "updated_at": "2015-09-15T09:50:46", - "return_code": null, - "data_source_urls": { - "3e1bc8e6-8c69-4749-8e52-90d9341d15bc": "swift://ap-cont/input", - "52146b52-6540-4aac-a024-fee253cf52a9": "swift://ap-cont/output" - }, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "start_time": "2015-09-15T12:49:43", - "id": "20da9edb-12ce-4b45-a473-41baeefef997", - "oozie_job_id": "0000001-150915094349962-oozie-hado-W", - "info": { - "user": "hadoop", - "actions": [ - { - "name": ":start:", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": "job-node", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "errorCode": null, - "id": "0000001-150915094349962-oozie-hado-W@:start:", - "consoleUrl": "-", - "errorMessage": null, - "toString": "Action name[:start:] status[OK]", - "stats": null, - "type": ":START:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "job-node", - "trackerUri": "http://172.18.168.119:8032", - "externalStatus": "FAILED/KILLED", - "status": "ERROR", - "externalId": "job_1442310173665_0002", - "transition": "fail", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "JA018", - "id": "0000001-150915094349962-oozie-hado-W@job-node", - "consoleUrl": "http://ap-cluster-all-0:8088/proxy/application_1442310173665_0002/", - "errorMessage": "Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]", - "toString": "Action name[job-node] status[ERROR]", - "stats": null, - "type": "pig", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "fail", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": null, - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "E0729", - "id": "0000001-150915094349962-oozie-hado-W@fail", - "consoleUrl": "-", - "errorMessage": "Workflow failed, error message[Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]]", - "toString": "Action name[fail] status[OK]", - "stats": null, - "type": ":KILL:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "externalChildIDs": null, - "cred": "null" - } - ], - "createdTime": "Tue, 15 Sep 2015 09:49:58 GMT", - "status": "KILLED", - "group": null, - "externalId": null, - "acl": null, - "run": 0, - "appName": "job-wf", - "parentId": null, - "conf": "\r\n \r\n user.name\r\n hadoop\r\n \r\n \r\n oozie.use.system.libpath\r\n true\r\n \r\n \r\n mapreduce.job.user.name\r\n hadoop\r\n \r\n \r\n nameNode\r\n hdfs://ap-cluster-all-0:9000\r\n \r\n \r\n jobTracker\r\n http://172.18.168.119:8032\r\n \r\n \r\n oozie.wf.application.path\r\n hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml\r\n \r\n", - "id": "0000001-150915094349962-oozie-hado-W", - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "appPath": "hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml", - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "toString": "Workflow id[0000001-150915094349962-oozie-hado-W] status[KILLED]", - "lastModTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "consoleUrl": "http://ap-cluster-all-0.novalocal:11000/oozie?job=0000001-150915094349962-oozie-hado-W" - } - } -} diff --git a/api-ref/source/v1.1/samples/job-executions/job-ex-update-request.json b/api-ref/source/v1.1/samples/job-executions/job-ex-update-request.json deleted file mode 100644 index 647a4175b9..0000000000 --- a/api-ref/source/v1.1/samples/job-executions/job-ex-update-request.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "is_public": true -} diff --git a/api-ref/source/v1.1/samples/job-executions/job-ex-update-response.json b/api-ref/source/v1.1/samples/job-executions/job-ex-update-response.json deleted file mode 100644 index a5d2484792..0000000000 --- a/api-ref/source/v1.1/samples/job-executions/job-ex-update-response.json +++ /dev/null @@ -1,120 +0,0 @@ -{ - "job_execution": { - "job_configs": { - "configs": { - "mapred.reduce.tasks": "1", - "mapred.map.tasks": "1" - }, - "args": [ - "arg1", - "arg2" - ], - "params": { - "param2": "value2", - "param1": "value1" - } - }, - "is_protected": false, - "input_id": "3e1bc8e6-8c69-4749-8e52-90d9341d15bc", - "job_id": "310b0fc6-e1db-408e-8798-312e7500f3ac", - "cluster_id": "811e1134-666f-4c48-bc92-afb5b10c9d8c", - "created_at": "2015-09-15T09:49:24", - "end_time": "2015-09-15T12:50:46", - "output_id": "52146b52-6540-4aac-a024-fee253cf52a9", - "is_public": true, - "updated_at": "2015-09-15T09:50:46", - "return_code": null, - "data_source_urls": { - "3e1bc8e6-8c69-4749-8e52-90d9341d15bc": "swift://ap-cont/input", - "52146b52-6540-4aac-a024-fee253cf52a9": "swift://ap-cont/output" - }, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "start_time": "2015-09-15T12:49:43", - "id": "20da9edb-12ce-4b45-a473-41baeefef997", - "oozie_job_id": "0000001-150915094349962-oozie-hado-W", - "info": { - "user": "hadoop", - "actions": [ - { - "name": ":start:", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": "job-node", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "errorCode": null, - "id": "0000001-150915094349962-oozie-hado-W@:start:", - "consoleUrl": "-", - "errorMessage": null, - "toString": "Action name[:start:] status[OK]", - "stats": null, - "type": ":START:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "job-node", - "trackerUri": "http://172.18.168.119:8032", - "externalStatus": "FAILED/KILLED", - "status": "ERROR", - "externalId": "job_1442310173665_0002", - "transition": "fail", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "JA018", - "id": "0000001-150915094349962-oozie-hado-W@job-node", - "consoleUrl": "http://ap-cluster-all-0:8088/proxy/application_1442310173665_0002/", - "errorMessage": "Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]", - "toString": "Action name[job-node] status[ERROR]", - "stats": null, - "type": "pig", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "fail", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": null, - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "E0729", - "id": "0000001-150915094349962-oozie-hado-W@fail", - "consoleUrl": "-", - "errorMessage": "Workflow failed, error message[Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]]", - "toString": "Action name[fail] status[OK]", - "stats": null, - "type": ":KILL:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "externalChildIDs": null, - "cred": "null" - } - ], - "createdTime": "Tue, 15 Sep 2015 09:49:58 GMT", - "status": "KILLED", - "group": null, - "externalId": null, - "acl": null, - "run": 0, - "appName": "job-wf", - "parentId": null, - "conf": "\r\n \r\n user.name\r\n hadoop\r\n \r\n \r\n oozie.use.system.libpath\r\n true\r\n \r\n \r\n mapreduce.job.user.name\r\n hadoop\r\n \r\n \r\n nameNode\r\n hdfs://ap-cluster-all-0:9000\r\n \r\n \r\n jobTracker\r\n http://172.18.168.119:8032\r\n \r\n \r\n oozie.wf.application.path\r\n hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml\r\n \r\n", - "id": "0000001-150915094349962-oozie-hado-W", - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "appPath": "hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml", - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "toString": "Workflow id[0000001-150915094349962-oozie-hado-W] status[KILLED]", - "lastModTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "consoleUrl": "http://ap-cluster-all-0.novalocal:11000/oozie?job=0000001-150915094349962-oozie-hado-W" - } - } -} diff --git a/api-ref/source/v1.1/samples/job-executions/list-response.json b/api-ref/source/v1.1/samples/job-executions/list-response.json deleted file mode 100644 index fb085254a3..0000000000 --- a/api-ref/source/v1.1/samples/job-executions/list-response.json +++ /dev/null @@ -1,122 +0,0 @@ -{ - "job_executions": [ - { - "job_configs": { - "configs": { - "mapred.reduce.tasks": "1", - "mapred.map.tasks": "1" - }, - "args": [ - "arg1", - "arg2" - ], - "params": { - "param2": "value2", - "param1": "value1" - } - }, - "is_protected": false, - "input_id": "3e1bc8e6-8c69-4749-8e52-90d9341d15bc", - "job_id": "310b0fc6-e1db-408e-8798-312e7500f3ac", - "cluster_id": "811e1134-666f-4c48-bc92-afb5b10c9d8c", - "created_at": "2015-09-15T09:49:24", - "end_time": "2015-09-15T12:50:46", - "output_id": "52146b52-6540-4aac-a024-fee253cf52a9", - "is_public": false, - "updated_at": "2015-09-15T09:50:46", - "return_code": null, - "data_source_urls": { - "3e1bc8e6-8c69-4749-8e52-90d9341d15bc": "swift://ap-cont/input", - "52146b52-6540-4aac-a024-fee253cf52a9": "swift://ap-cont/output" - }, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "start_time": "2015-09-15T12:49:43", - "id": "20da9edb-12ce-4b45-a473-41baeefef997", - "oozie_job_id": "0000001-150915094349962-oozie-hado-W", - "info": { - "user": "hadoop", - "actions": [ - { - "name": ":start:", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": "job-node", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "errorCode": null, - "id": "0000001-150915094349962-oozie-hado-W@:start:", - "consoleUrl": "-", - "errorMessage": null, - "toString": "Action name[:start:] status[OK]", - "stats": null, - "type": ":START:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "job-node", - "trackerUri": "http://172.18.168.119:8032", - "externalStatus": "FAILED/KILLED", - "status": "ERROR", - "externalId": "job_1442310173665_0002", - "transition": "fail", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "JA018", - "id": "0000001-150915094349962-oozie-hado-W@job-node", - "consoleUrl": "http://ap-cluster-all-0:8088/proxy/application_1442310173665_0002/", - "errorMessage": "Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]", - "toString": "Action name[job-node] status[ERROR]", - "stats": null, - "type": "pig", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "fail", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": null, - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "E0729", - "id": "0000001-150915094349962-oozie-hado-W@fail", - "consoleUrl": "-", - "errorMessage": "Workflow failed, error message[Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]]", - "toString": "Action name[fail] status[OK]", - "stats": null, - "type": ":KILL:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "externalChildIDs": null, - "cred": "null" - } - ], - "createdTime": "Tue, 15 Sep 2015 09:49:58 GMT", - "status": "KILLED", - "group": null, - "externalId": null, - "acl": null, - "run": 0, - "appName": "job-wf", - "parentId": null, - "conf": "\r\n \r\n user.name\r\n hadoop\r\n \r\n \r\n oozie.use.system.libpath\r\n true\r\n \r\n \r\n mapreduce.job.user.name\r\n hadoop\r\n \r\n \r\n nameNode\r\n hdfs://ap-cluster-all-0:9000\r\n \r\n \r\n jobTracker\r\n http://172.18.168.119:8032\r\n \r\n \r\n oozie.wf.application.path\r\n hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml\r\n \r\n", - "id": "0000001-150915094349962-oozie-hado-W", - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "appPath": "hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml", - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "toString": "Workflow id[0000001-150915094349962-oozie-hado-W] status[KILLED]", - "lastModTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "consoleUrl": "http://ap-cluster-all-0.novalocal:11000/oozie?job=0000001-150915094349962-oozie-hado-W" - } - } - ] -} diff --git a/api-ref/source/v1.1/samples/job-types/job-types-list-response.json b/api-ref/source/v1.1/samples/job-types/job-types-list-response.json deleted file mode 100644 index c321c4fbd3..0000000000 --- a/api-ref/source/v1.1/samples/job-types/job-types-list-response.json +++ /dev/null @@ -1,209 +0,0 @@ -{ - "job_types": [ - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "Hive" - }, - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "Java" - }, - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "MapReduce" - }, - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "MapReduce.Streaming" - }, - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "Pig" - }, - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "Shell" - }, - { - "plugins": [ - { - "description": "This plugin provides an ability to launch Spark on Hadoop CDH cluster without any management consoles.", - "versions": { - "1.0.0": {} - }, - "title": "Apache Spark", - "name": "spark" - } - ], - "name": "Spark" - } - ] -} diff --git a/api-ref/source/v1.1/samples/jobs/job-create-request.json b/api-ref/source/v1.1/samples/jobs/job-create-request.json deleted file mode 100644 index b8d1a8ed19..0000000000 --- a/api-ref/source/v1.1/samples/jobs/job-create-request.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "description": "This is pig job example", - "mains": [ - "90d9d5ec-11aa-48bd-bc8c-34936ce0db6e" - ], - "libs": [ - "320a2ca7-25fd-4b48-9bc3-4fb1b6c4ff27" - ], - "type": "Pig", - "name": "pig-job-example" -} diff --git a/api-ref/source/v1.1/samples/jobs/job-create-response.json b/api-ref/source/v1.1/samples/jobs/job-create-response.json deleted file mode 100644 index 1413d83d22..0000000000 --- a/api-ref/source/v1.1/samples/jobs/job-create-response.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "job": { - "is_public": false, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-03-27 08:48:38.630827", - "id": "71defc8f-d005-484f-9d86-1aedf644d1ef", - "name": "pig-job-example", - "description": "This is pig job example", - "interface": [], - "libs": [ - { - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:53", - "id": "320a2ca7-25fd-4b48-9bc3-4fb1b6c4ff27", - "name": "binary-job", - "updated_at": null, - "description": "", - "url": "internal-db://c6a925fa-ac1d-4b2e-b88a-7054e1927521" - } - ], - "type": "Pig", - "is_protected": false, - "mains": [ - { - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-03 10:47:51", - "id": "90d9d5ec-11aa-48bd-bc8c-34936ce0db6e", - "name": "pig", - "updated_at": null, - "description": "", - "url": "internal-db://872878f6-72ea-44db-8d1d-e6a6396d2df0" - } - ] - } -} diff --git a/api-ref/source/v1.1/samples/jobs/job-execute-request.json b/api-ref/source/v1.1/samples/jobs/job-execute-request.json deleted file mode 100644 index 588358c819..0000000000 --- a/api-ref/source/v1.1/samples/jobs/job-execute-request.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "cluster_id": "811e1134-666f-4c48-bc92-afb5b10c9d8c", - "input_id": "3e1bc8e6-8c69-4749-8e52-90d9341d15bc", - "output_id": "52146b52-6540-4aac-a024-fee253cf52a9", - "job_configs": { - "configs": { - "mapred.map.tasks": "1", - "mapred.reduce.tasks": "1" - }, - "args": [ - "arg1", - "arg2" - ], - "params": { - "param2": "value2", - "param1": "value1" - } - } -} diff --git a/api-ref/source/v1.1/samples/jobs/job-execute-response.json b/api-ref/source/v1.1/samples/jobs/job-execute-response.json deleted file mode 100644 index 28e66fe908..0000000000 --- a/api-ref/source/v1.1/samples/jobs/job-execute-response.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "job_execution": { - "input_id": "3e1bc8e6-8c69-4749-8e52-90d9341d15bc", - "is_protected": false, - "job_id": "310b0fc6-e1db-408e-8798-312e7500f3ac", - "cluster_id": "811e1134-666f-4c48-bc92-afb5b10c9d8c", - "output_id": "52146b52-6540-4aac-a024-fee253cf52a9", - "created_at": "2015-09-15T09:49:24", - "is_public": false, - "id": "20da9edb-12ce-4b45-a473-41baeefef997", - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "job_configs": { - "configs": { - "mapred.reduce.tasks": "1", - "mapred.map.tasks": "1" - }, - "args": [ - "arg1", - "arg2" - ], - "params": { - "param2": "value2", - "param1": "value1" - } - }, - "info": { - "status": "PENDING" - } - } -} diff --git a/api-ref/source/v1.1/samples/jobs/job-show-response.json b/api-ref/source/v1.1/samples/jobs/job-show-response.json deleted file mode 100644 index 43653e971e..0000000000 --- a/api-ref/source/v1.1/samples/jobs/job-show-response.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "job": { - "is_public": false, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "1a674c31-9aaa-4d07-b844-2bf200a1b836", - "name": "Edp-test-job", - "updated_at": null, - "description": "", - "interface": [], - "libs": [ - { - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "0ff4ac10-94a4-4e25-9ac9-603afe27b100", - "name": "binary-job.jar", - "updated_at": null, - "description": "", - "url": "swift://Edp-test-c71e6bce.sahara/binary-job.jar" - } - ], - "type": "MapReduce", - "mains": [], - "is_protected": false - } -} diff --git a/api-ref/source/v1.1/samples/jobs/job-update-request.json b/api-ref/source/v1.1/samples/jobs/job-update-request.json deleted file mode 100644 index 810b8a60b1..0000000000 --- a/api-ref/source/v1.1/samples/jobs/job-update-request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "description": "This is public pig job example", - "name": "public-pig-job-example" -} diff --git a/api-ref/source/v1.1/samples/jobs/job-update-response.json b/api-ref/source/v1.1/samples/jobs/job-update-response.json deleted file mode 100644 index 0ee7e27343..0000000000 --- a/api-ref/source/v1.1/samples/jobs/job-update-response.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "job": { - "is_public": false, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "1a674c31-9aaa-4d07-b844-2bf200a1b836", - "name": "public-pig-job-example", - "updated_at": null, - "description": "This is public pig job example", - "interface": [], - "libs": [ - { - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "0ff4ac10-94a4-4e25-9ac9-603afe27b100", - "name": "binary-job.jar", - "updated_at": null, - "description": "", - "url": "swift://Edp-test-c71e6bce.sahara/binary-job.jar" - } - ], - "type": "MapReduce", - "mains": [], - "is_protected": false - } -} diff --git a/api-ref/source/v1.1/samples/jobs/jobs-list-response.json b/api-ref/source/v1.1/samples/jobs/jobs-list-response.json deleted file mode 100644 index e19b3d59c4..0000000000 --- a/api-ref/source/v1.1/samples/jobs/jobs-list-response.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "jobs": [ - { - "is_public": false, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "1a674c31-9aaa-4d07-b844-2bf200a1b836", - "name": "Edp-test-job-3d60854e", - "updated_at": null, - "description": "", - "interface": [], - "libs": [ - { - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "0ff4ac10-94a4-4e25-9ac9-603afe27b100", - "name": "binary-job-339c2d1a.jar", - "updated_at": null, - "description": "", - "url": "swift://Edp-test-c71e6bce.sahara/binary-job-339c2d1a.jar" - } - ], - "type": "MapReduce", - "mains": [], - "is_protected": false - }, - { - "is_public": false, - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:44", - "id": "4d1f3759-3497-4927-8352-910bacf24e62", - "name": "Edp-test-job-6b6953c8", - "updated_at": null, - "description": "", - "interface": [], - "libs": [ - { - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:44", - "id": "e0d47800-4ac1-4d63-a2e1-c92d669a44e2", - "name": "binary-job-6f21a2f8.jar", - "updated_at": null, - "description": "", - "url": "swift://Edp-test-b409ec68.sahara/binary-job-6f21a2f8.jar" - } - ], - "type": "Pig", - "mains": [ - { - "tenant_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:44", - "id": "e073e896-f123-4b76-995f-901d786262df", - "name": "binary-job-d4f8bd75.pig", - "updated_at": null, - "description": "", - "url": "swift://Edp-test-b409ec68.sahara/binary-job-d4f8bd75.pig" - } - ], - "is_protected": false - } - ], - "markers": { - "prev": null, - "next": "c53832da-6e7b-449e-a166-9f9ce1718d03" - } -} diff --git a/api-ref/source/v1.1/samples/node-group-templates/node-group-template-create-request.json b/api-ref/source/v1.1/samples/node-group-templates/node-group-template-create-request.json deleted file mode 100644 index 802fcfb307..0000000000 --- a/api-ref/source/v1.1/samples/node-group-templates/node-group-template-create-request.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "plugin_name": "vanilla", - "hadoop_version": "2.7.1", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "name": "master", - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "flavor_id": "2" -} diff --git a/api-ref/source/v1.1/samples/node-group-templates/node-group-template-create-response.json b/api-ref/source/v1.1/samples/node-group-templates/node-group-template-create-response.json deleted file mode 100644 index c9f9851a41..0000000000 --- a/api-ref/source/v1.1/samples/node-group-templates/node-group-template-create-response.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "node_group_template": { - "is_public": false, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "is_default": false, - "availability_zone": null, - "plugin_name": "vanilla", - "is_protected": false, - "flavor_id": "2", - "id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:20:11", - "security_groups": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } -} diff --git a/api-ref/source/v1.1/samples/node-group-templates/node-group-template-show-response.json b/api-ref/source/v1.1/samples/node-group-templates/node-group-template-show-response.json deleted file mode 100644 index 78fa9f970c..0000000000 --- a/api-ref/source/v1.1/samples/node-group-templates/node-group-template-show-response.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "node_group_template": { - "is_public": false, - "image_id": null, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "is_default": false, - "availability_zone": null, - "plugin_name": "vanilla", - "flavor_id": "2", - "id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "description": null, - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:20:11", - "is_protected": false, - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "security_groups": null, - "volume_type": null - } -} diff --git a/api-ref/source/v1.1/samples/node-group-templates/node-group-template-update-request.json b/api-ref/source/v1.1/samples/node-group-templates/node-group-template-update-request.json deleted file mode 100644 index 124ef61875..0000000000 --- a/api-ref/source/v1.1/samples/node-group-templates/node-group-template-update-request.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "plugin_name": "vanilla", - "hadoop_version": "2.7.1", - "node_processes": [ - "datanode" - ], - "name": "new", - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "flavor_id": "2" -} diff --git a/api-ref/source/v1.1/samples/node-group-templates/node-group-template-update-response.json b/api-ref/source/v1.1/samples/node-group-templates/node-group-template-update-response.json deleted file mode 100644 index bbb3161f2e..0000000000 --- a/api-ref/source/v1.1/samples/node-group-templates/node-group-template-update-response.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "node_group_template": { - "is_public": false, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "is_default": false, - "availability_zone": null, - "plugin_name": "vanilla", - "is_protected": false, - "flavor_id": "2", - "id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:20:11", - "security_groups": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "new", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } -} diff --git a/api-ref/source/v1.1/samples/node-group-templates/node-group-templates-list-response.json b/api-ref/source/v1.1/samples/node-group-templates/node-group-templates-list-response.json deleted file mode 100644 index c41683e926..0000000000 --- a/api-ref/source/v1.1/samples/node-group-templates/node-group-templates-list-response.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "node_group_templates": [ - { - "is_public": false, - "image_id": null, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "is_default": false, - "availability_zone": null, - "plugin_name": "vanilla", - "flavor_id": "2", - "id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "description": null, - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:20:11", - "is_protected": false, - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "security_groups": null, - "volume_type": null - }, - { - "is_public": false, - "image_id": null, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "is_default": false, - "availability_zone": null, - "plugin_name": "vanilla", - "flavor_id": "2", - "id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "description": null, - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:27:00", - "is_protected": false, - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "security_groups": null, - "volume_type": null - } - ], - "markers": { - "prev":"39dfc852-8588-4b61-8d2b-eb08a67ab240", - "next":"eaa0bd97-ab54-43df-83ab-77a9774d7358" - } -} diff --git a/api-ref/source/v1.1/samples/plugins/plugin-show-response.json b/api-ref/source/v1.1/samples/plugins/plugin-show-response.json deleted file mode 100644 index 00b948a0e6..0000000000 --- a/api-ref/source/v1.1/samples/plugins/plugin-show-response.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "plugin": { - "name": "vanilla", - "versions": [ - "1.2.1", - "2.4.1", - "2.6.0" - ], - "title": "Vanilla Apache Hadoop", - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component." - } -} diff --git a/api-ref/source/v1.1/samples/plugins/plugin-update-request.json b/api-ref/source/v1.1/samples/plugins/plugin-update-request.json deleted file mode 100644 index 97a17c38f2..0000000000 --- a/api-ref/source/v1.1/samples/plugins/plugin-update-request.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "plugin_labels": { - "enabled": { - "status": false - } - } -} diff --git a/api-ref/source/v1.1/samples/plugins/plugin-update-response.json b/api-ref/source/v1.1/samples/plugins/plugin-update-response.json deleted file mode 100644 index 7541ae939c..0000000000 --- a/api-ref/source/v1.1/samples/plugins/plugin-update-response.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "plugin": { - "plugin_labels": { - "hidden": { - "status": true, - "mutable": true, - "description": "Existence of plugin or its version is hidden, but still can be used for cluster creation by CLI and directly by client." - }, - "enabled": { - "status": false, - "mutable": true, - "description": "Plugin or its version is enabled and can be used by user." - } - }, - "description": "It's a fake plugin that aimed to work on the CirrOS images. It doesn't install Hadoop. It's needed to be able to test provisioning part of Sahara codebase itself.", - "versions": [ - "0.1" - ], - "tenant_id": "993f53c1f51845e48e013aeb632358d8", - "title": "Fake Plugin", - "version_labels": { - "0.1": { - "enabled": { - "status": true, - "mutable": true, - "description": "Plugin or its version is enabled and can be used by user." - } - } - }, - "name": "fake" - } -} diff --git a/api-ref/source/v1.1/samples/plugins/plugin-version-show-response.json b/api-ref/source/v1.1/samples/plugins/plugin-version-show-response.json deleted file mode 100644 index cb1c175a59..0000000000 --- a/api-ref/source/v1.1/samples/plugins/plugin-version-show-response.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "plugin": { - "name": "vanilla", - "versions": [ - "1.2.1", - "2.4.1", - "2.6.0" - ], - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "required_image_tags": [ - "vanilla", - "2.6.0" - ], - "node_processes": { - "JobFlow": [ - "oozie" - ], - "HDFS": [ - "namenode", - "datanode", - "secondarynamenode" - ], - "YARN": [ - "resourcemanager", - "nodemanager" - ], - "MapReduce": [ - "historyserver" - ], - "Hadoop": [], - "Hive": [ - "hiveserver" - ] - }, - "configs": [ - { - "default_value": "/tmp/hadoop-${user.name}", - "name": "hadoop.tmp.dir", - "priority": 2, - "config_type": "string", - "applicable_target": "HDFS", - "is_optional": true, - "scope": "node", - "description": "A base for other temporary directories." - }, - { - "default_value": true, - "name": "hadoop.native.lib", - "priority": 2, - "config_type": "bool", - "applicable_target": "HDFS", - "is_optional": true, - "scope": "node", - "description": "Should native hadoop libraries, if present, be used." - }, - { - "default_value": 1024, - "name": "NodeManager Heap Size", - "config_values": null, - "priority": 1, - "config_type": "int", - "applicable_target": "YARN", - "is_optional": false, - "scope": "node", - "description": null - }, - { - "default_value": true, - "name": "Enable Swift", - "config_values": null, - "priority": 1, - "config_type": "bool", - "applicable_target": "general", - "is_optional": false, - "scope": "cluster", - "description": null - }, - { - "default_value": true, - "name": "Enable MySQL", - "config_values": null, - "priority": 1, - "config_type": "bool", - "applicable_target": "general", - "is_optional": true, - "scope": "cluster", - "description": null - } - ], - "title": "Vanilla Apache Hadoop" - } -} diff --git a/api-ref/source/v1.1/samples/plugins/plugins-list-response.json b/api-ref/source/v1.1/samples/plugins/plugins-list-response.json deleted file mode 100644 index d92d85c114..0000000000 --- a/api-ref/source/v1.1/samples/plugins/plugins-list-response.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "plugins": [ - { - "name": "vanilla", - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": [ - "1.2.1", - "2.4.1", - "2.6.0" - ], - "title": "Vanilla Apache Hadoop" - }, - { - "name": "hdp", - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": [ - "1.3.2", - "2.0.6" - ], - "title": "Hortonworks Data Platform" - }, - { - "name": "spark", - "description": "This plugin provides an ability to launch Spark on Hadoop CDH cluster without any management consoles.", - "versions": [ - "1.0.0", - "0.9.1" - ], - "title": "Apache Spark" - }, - { - "name": "cdh", - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": [ - "5", - "5.3.0" - ], - "title": "Cloudera Plugin" - } - ] -} diff --git a/api-ref/source/v2/cluster-templates.inc b/api-ref/source/v2/cluster-templates.inc deleted file mode 100644 index 575af2a737..0000000000 --- a/api-ref/source/v2/cluster-templates.inc +++ /dev/null @@ -1,241 +0,0 @@ -.. -*- rst -*- - -================= -Cluster templates -================= - -A cluster template configures a cluster. A cluster template -lists node groups with the number of instances in each group. You -can also define cluster-scoped configurations in a cluster -template. - - -Show cluster template details -============================= - -.. rest_method:: GET /v2/cluster-templates/{cluster_template_id} - -Shows details for a cluster template. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - cluster_template_id: url_cluster_template_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: cluster_template_description - - use_autoconfig: use_autoconfig - - cluster_configs: cluster_configs - - created_at: created_at - - default_image_id: default_image_id - - updated_at: updated_at - - plugin_name: plugin_name - - is_default: is_default - - is_protected: object_is_protected - - shares: object_shares - - domain_name: domain_name - - project_id: project_id - - node_groups: node_groups - - is_public: object_is_public - - plugin_version: plugin_version - - id: cluster_template_id - - name: cluster_template_name - - - -Response Example ----------------- - -.. literalinclude:: samples/cluster-templates/cluster-templates-list-response.json - :language: javascript - - - - -Update cluster templates -======================== - -.. rest_method:: PATCH /v2/cluster-templates/{cluster_template_id} - -Updates a cluster template. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - cluster_template_id: cluster_template_id - -Request Example ---------------- - -.. literalinclude:: samples/cluster-templates/cluster-template-update-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: cluster_template_description - - use_autoconfig: use_autoconfig - - cluster_configs: cluster_configs - - created_at: created_at - - default_image_id: default_image_id - - updated_at: updated_at - - plugin_name: plugin_name - - is_default: is_default - - is_protected: object_is_protected - - shares: object_shares - - domain_name: domain_name - - project_id: project_id - - node_groups: node_groups - - is_public: object_is_public - - plugin_version: plugin_version - - id: cluster_template_id - - name: cluster_template_name - - - - - -Delete cluster template -======================= - -.. rest_method:: DELETE /v2/cluster-templates/{cluster_template_id} - -Deletes a cluster template. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - cluster_template_id: cluster_template_id - - - - - - -List cluster templates -====================== - -.. rest_method:: GET /v2/cluster-templates - -Lists available cluster templates. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - limit: limit - - marker: marker - - sort_by: sort_by_cluster_templates - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - description: cluster_template_description - - use_autoconfig: use_autoconfig - - cluster_configs: cluster_configs - - created_at: created_at - - default_image_id: default_image_id - - updated_at: updated_at - - plugin_name: plugin_name - - is_default: is_default - - is_protected: object_is_protected - - shares: object_shares - - domain_name: domain_name - - project_id: project_id - - node_groups: node_groups - - is_public: object_is_public - - plugin_version: plugin_version - - id: cluster_template_id - - name: cluster_template_name - - - -Response Example ----------------- -.. rest_method:: GET /v2/cluster-templates?limit=2 - -.. literalinclude:: samples/cluster-templates/cluster-templates-list-response.json - :language: javascript - - - - -Create cluster templates -======================== - -.. rest_method:: POST /v2/cluster-templates - -Creates a cluster template. - -Normal response codes:202 - - -Request Example ---------------- - -.. literalinclude:: samples/cluster-templates/cluster-template-create-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: cluster_template_description - - use_autoconfig: use_autoconfig - - cluster_configs: cluster_configs - - created_at: created_at - - default_image_id: default_image_id - - updated_at: updated_at - - plugin_name: plugin_name - - is_default: is_default - - is_protected: object_is_protected - - shares: object_shares - - domain_name: domain_name - - project_id: project_id - - node_groups: node_groups - - is_public: object_is_public - - plugin_version: plugin_version - - id: cluster_template_id - - name: cluster_template_name - - - - diff --git a/api-ref/source/v2/clusters.inc b/api-ref/source/v2/clusters.inc deleted file mode 100644 index 6fad3aa4a4..0000000000 --- a/api-ref/source/v2/clusters.inc +++ /dev/null @@ -1,293 +0,0 @@ -.. -*- rst -*- - -======== -Clusters -======== - -A cluster is a group of nodes with the same configuration. - - -List available clusters -======================= - -.. rest_method:: GET /v2/clusters - -Lists available clusters. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - limit: limit - - marker: marker - - sort_by: sort_by_clusters - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - count: count - - info: info - - cluster_template_id: cluster_template_id - - is_transient: is_transient - - provision_progress: provision_progress - - status: status - - neutron_management_network: neutron_management_network - - clusters: clusters - - management_public_key: management_public_key - - status_description: status_description - - trust_id: trust_id - - domain_name: domain_name - - - -Response Example ----------------- -.. rest_method:: GET /v2/clusters - -.. literalinclude:: samples/clusters/clusters-list-response.json - :language: javascript - - - - -Create cluster -============== - -.. rest_method:: POST /v2/clusters - -Creates a cluster. - -Normal response codes: 202 - - - -Request Example ---------------- - -.. literalinclude:: samples/clusters/cluster-create-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - count: count - - info: info - - cluster_template_id: cluster_template_id - - is_transient: is_transient - - provision_progress: provision_progress - - status: status - - neutron_management_network: neutron_management_network - - management_public_key: management_public_key - - status_description: status_description - - trust_id: trust_id - - domain_name: domain_name - - - - - -Show details of a cluster -========================= - -.. rest_method:: GET /v2/clusters/{cluster_id} - -Shows details for a cluster, by ID. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - cluster_id: url_cluster_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - count: count - - info: info - - cluster_template_id: cluster_template_id - - is_transient: is_transient - - provision_progress: provision_progress - - status: status - - neutron_management_network: neutron_management_network - - management_public_key: management_public_key - - status_description: status_description - - trust_id: trust_id - - domain_name: domain_name - - - -Response Example ----------------- - -.. literalinclude:: samples/clusters/cluster-show-response.json - :language: javascript - - - - -Delete a cluster -================ - -.. rest_method:: DELETE /v2/clusters/{cluster_id} - -Deletes a cluster. - -Normal response codes: 204 or 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - cluster_id: url_cluster_id - - force: force - - - - - - -Scale cluster -============= - -.. rest_method:: PUT /v2/clusters/{cluster_id} - -Scales a cluster. - -Normal response codes: 202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - cluster_id: cluster_id - -Request Example ---------------- - -.. literalinclude:: samples/clusters/cluster-scale-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - count: count - - info: info - - cluster_template_id: cluster_template_id - - is_transient: is_transient - - provision_progress: provision_progress - - status: status - - neutron_management_network: neutron_management_network - - management_public_key: management_public_key - - status_description: status_description - - trust_id: trust_id - - domain_name: domain_name - - - - -Update cluster -============== - -.. rest_method:: PATCH /v2/clusters/{cluster_id} - -Updates a cluster. - -Normal response codes: 202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - cluster_id: url_cluster_id - -Request Example ---------------- - -.. literalinclude:: samples/clusters/cluster-update-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - count: count - - info: info - - cluster_template_id: cluster_template_id - - is_transient: is_transient - - provision_progress: provision_progress - - status: status - - neutron_management_network: neutron_management_network - - management_public_key: management_public_key - - status_description: status_description - - trust_id: trust_id - - domain_name: domain_name - - - - -Show progress -============= - -.. rest_method:: GET /v2/clusters/{cluster_id} - -Shows provisioning progress for a cluster. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - cluster_id: url_cluster_id - - - - -Response Example ----------------- - -.. literalinclude:: samples/event-log/cluster-progress-response.json - :language: javascript - - - diff --git a/api-ref/source/v2/data-sources.inc b/api-ref/source/v2/data-sources.inc deleted file mode 100644 index f4e98ae7bf..0000000000 --- a/api-ref/source/v2/data-sources.inc +++ /dev/null @@ -1,202 +0,0 @@ -.. -*- rst -*- - -============ -Data sources -============ - -A data source object defines the location of input or output for -MapReduce jobs and might reference different types of storage. - -The Data Processing service does not validate data source -locations. - - -Show data source details -======================== - -.. rest_method:: GET /v2/data-sources/{data_source_id} - -Shows details for a data source. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - data_source_id: url_data_source_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: data_source_description - - url: url - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - type: type - - id: data_source_id - - name: data_source_name - - - -Response Example ----------------- - -.. literalinclude:: samples/data-sources/data-source-show-response.json - :language: javascript - - - - -Delete data source -================== - -.. rest_method:: DELETE /v2/data-sources/{data_source_id} - -Deletes a data source. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - data_source_id: url_data_source_id - - - - - - -Update data source -================== - -.. rest_method:: PATCH /v2/data-sources/{data_source_id} - -Updates a data source. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - data_source_id: url_data_source_id - -Request Example ---------------- - -.. literalinclude:: samples/data-sources/data-source-update-request.json - :language: javascript - - - - - - - -List data sources -================= - -.. rest_method:: GET /v2/data-sources - -Lists all data sources. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - limit: limit - - marker: marker - - sort_by: sort_by_data_sources - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - description: data_source_description - - url: url - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - type: type - - id: data_source_id - - name: data_source_name - - - -Response Example ----------------- - -.. rest_method:: GET /v2/data-sourses?sort_by=-name - -.. literalinclude:: samples/data-sources/data-sources-list-response.json - :language: javascript - - - - -Create data source -================== - -.. rest_method:: POST /v2/data-sources - -Creates a data source. - -Normal response codes:202 - - - -Request Example ---------------- - -.. literalinclude:: samples/data-sources/data-source-register-hdfs-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: data_source_description - - url: url - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - type: type - - id: data_source_id - - name: data_source_name - - - - - diff --git a/api-ref/source/v2/event-log.inc b/api-ref/source/v2/event-log.inc deleted file mode 100644 index 11de7c1fc9..0000000000 --- a/api-ref/source/v2/event-log.inc +++ /dev/null @@ -1,42 +0,0 @@ -.. -*- rst -*- - -========= -Event log -========= - -The event log feature provides information about cluster -provisioning. In the event of errors, the event log shows the -reason for the failure. - - -Show progress -============= - -.. rest_method:: GET /v2/clusters/{cluster_id} - -Shows provisioning progress of cluster. - - -Normal response codes: 200 -Error response codes: - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - cluster_id: cluster_id - - - - -Response Example ----------------- - -.. literalinclude:: samples/event-log/cluster-progress-response.json - :language: javascript - - - - diff --git a/api-ref/source/v2/image-registry.inc b/api-ref/source/v2/image-registry.inc deleted file mode 100644 index 89acb09dde..0000000000 --- a/api-ref/source/v2/image-registry.inc +++ /dev/null @@ -1,244 +0,0 @@ -.. -*- rst -*- - -============== -Image registry -============== - -Use the image registry tool to manage images, add tags to and -remove tags from images, and define the user name for an instance -operating system. Each plugin lists required tags for an image. To -run remote operations, the Data Processing service requires a user -name with which to log in to the operating system for an instance. - - -Add tags to image -================= - -.. rest_method:: PUT /v2/images/{image_id}/tags - -Adds tags to an image. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - tags: tags - - image_id: url_image_id - -Request Example ---------------- - -.. literalinclude:: samples/image-registry/image-tags-add-request.json - :language: javascript - - - - - - - -Show image details -================== - -.. rest_method:: GET /v2/images/{image_id} - -Shows details for an image. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - image_id: url_image_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - status: status - - username: username - - updated: updated - - description: image_description - - created: created - - image: image - - tags: tags - - minDisk: minDisk - - name: image_name - - progress: progress - - minRam: minRam - - id: image_id - - metadata: metadata - - - -Response Example ----------------- - -.. literalinclude:: samples/image-registry/image-show-response.json - :language: javascript - - - - -Register image -============== - -.. rest_method:: POST /v2/images/{image_id} - -Registers an image in the registry. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - username: username - - description: image_description - - image_id: url_image_id - -Request Example ---------------- - -.. literalinclude:: samples/image-registry/image-register-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - status: status - - username: username - - updated: updated - - description: image_description - - created: created - - image: image - - tags: tags - - minDisk: minDisk - - name: image_name - - progress: progress - - minRam: minRam - - id: image_id - - metadata: metadata - - - - - -Unregister image -================ - -.. rest_method:: DELETE /v2/images/{image_id} - -Removes an image from the registry. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - image_id: url_image_id - - - - - - -Remove tags from image -====================== - -.. rest_method:: DELETE /v2/images/{image_id}/tag - -Removes tags from an image. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - tags: tags - - image_id: url_image_id - -Request Example ---------------- - -.. literalinclude:: samples/image-registry/image-tags-delete-request.json - :language: javascript - - - - - - - -List images -=========== - -.. rest_method:: GET /v2/images - -Lists all images registered in the registry. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - tags: tags - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - status: status - - username: username - - updated: updated - - description: image_description - - created: created - - image: image - - tags: tags - - minDisk: minDisk - - name: image_name - - images: images - - progress: progress - - minRam: minRam - - id: image_id - - metadata: metadata - - - -Response Example ----------------- - -.. literalinclude:: samples/image-registry/images-list-response.json - :language: javascript - - - - diff --git a/api-ref/source/v2/index.rst b/api-ref/source/v2/index.rst deleted file mode 100644 index 3f346bf0dd..0000000000 --- a/api-ref/source/v2/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -:tocdepth: 3 - ----------------------- -Data Processing API v2 ----------------------- - -.. rest_expand_all:: - -.. include:: cluster-templates.inc -.. include:: clusters.inc -.. include:: data-sources.inc -.. include:: event-log.inc -.. include:: image-registry.inc -.. include:: job-binaries.inc -.. include:: job-templates.inc -.. include:: job-types.inc -.. include:: jobs.inc -.. include:: node-group-templates.inc -.. include:: plugins.inc - diff --git a/api-ref/source/v2/job-binaries.inc b/api-ref/source/v2/job-binaries.inc deleted file mode 100644 index 9093b17a5e..0000000000 --- a/api-ref/source/v2/job-binaries.inc +++ /dev/null @@ -1,256 +0,0 @@ -.. -*- rst -*- - -============ -Job binaries -============ - -Job binary objects represent data processing applications and -libraries that are stored in Object Storage service(S3 or Swift) or -in Manila Shares. - - -List job binaries -================= - -.. rest_method:: GET /v2/job-binaries - -Lists the available job binaries. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - limit: limit - - marker: marker - - sort_by: sort_by_job_binary - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - description: job_binary_description - - url: url - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - binaries: binaries - - id: job_binary_id - - name: job_binary_name - - - -Response Example ----------------- -.. rest_method:: GET /v2/job-binaries?sort_by=created_at - -.. literalinclude:: samples/job-binaries/list-response.json - :language: javascript - - - - -Create job binary -================= - -.. rest_method:: POST /v2/job-binaries - -Creates a job binary. - -Normal response codes:202 - - - - -Request Example ---------------- - -.. literalinclude:: samples/job-binaries/create-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: job_binary_description - - url: url - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - id: job_binary_id - - name: job_binary_name - - - - - -Show job binary details -======================= - -.. rest_method:: GET /v2/job-binaries/{job_binary_id} - -Shows details for a job binary. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - job_binary_id: url_job_binary_id - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: job_binary_description - - url: url - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - - is_protected: object_is_protected - - is_public: object_is_public - - id: job_binary_id - - name: job_binary_name - - - -Response Example ----------------- - -.. literalinclude:: samples/job-binaries/show-response.json - :language: javascript - - - - -Delete job binary -================= - -.. rest_method:: DELETE /v2/job-binaries/{job_binary_id} - -Deletes a job binary. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - - job_binary_id: url_job_binary_id - - - - - - -Update job binary -================= - -.. rest_method:: PATCH /v2/job-binaries/{job_binary_id} - -Updates a job binary. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - - job_binary_id: url_job_binary_id - - -Request Example ---------------- - -.. literalinclude:: samples/job-binaries/update-request.json - :language: javascript - - - - - - - -Show job binary data -==================== - -.. rest_method:: GET /v2/job-binaries/{job_binary_id}/data - -Shows data for a job binary. - -The response body shows the job binary raw data and the response -headers show the data length. - -Example response: - -:: - - HTTP/1.1 200 OK - Connection: keep-alive - Content-Length: 161 - Content-Type: text/html; charset=utf-8 - Date: Sat, 28 Mar 2016 02:42:48 GMT - A = load '$INPUT' using PigStorage(':') as (fruit: chararray); - B = foreach A generate com.hadoopbook.pig.Trim(fruit); - store B into '$OUTPUT' USING PigStorage(); - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - job_binary_id: url_job_binary_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - Content-Length: Content-Length - - - -Response Example ----------------- - -.. literalinclude:: samples/job-binaries/show-data-response - :language: text - - - - diff --git a/api-ref/source/v2/job-templates.inc b/api-ref/source/v2/job-templates.inc deleted file mode 100644 index 9d94645854..0000000000 --- a/api-ref/source/v2/job-templates.inc +++ /dev/null @@ -1,257 +0,0 @@ -.. -*- rst -*- - -============= -Job templates -============= - -A job templates object lists the binaries that a job needs to run. -To run a job, you must specify data sources and job parameters. - -You can run a job on an existing or new transient cluster. - - -List job templates -================== - -.. rest_method:: GET /v2/job-templates - -Lists all job templates. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - limit: limit - - marker: marker - - sort_by: sort_by_job_templates - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - job_templates: job_templates - - description: job_description - - project_id: project_id - - created_at: created_at - - mains: mains - - updated_at: updated_at - - libs: libs - - is_protected: object_is_protected - - interface: interface - - is_public: object_is_public - - type: type - - id: job_template_id - - name: job_template_name - - markers: markers - - prev: prev - - next: next - - -Response Example ----------------- -..rest_method:: GET /v2/job-templates?limit=2 - -.. literalinclude:: samples/job-templates/job-templates-list-response.json - :language: javascript - - - - -Create job template -=================== - -.. rest_method:: POST /v2/job-templates - -Creates a job object. - -Normal response codes:202 - - - - -Request Example ---------------- - -.. literalinclude:: samples/job-templates/job-template-create-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: job_description - - project_id: project_id - - created_at: created_at - - mains: mains - - updated_at: updated_at - - libs: libs - - is_protected: object_is_protected - - interface: interface - - is_public: object_is_public - - type: type - - id: job_template_id - - name: job_template_name - - - - - -Show job template details -========================= - -.. rest_method:: GET /v2/job-templates/{job_template_id} - -Shows details for a job template. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - job_template_id: url_job_template_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: job_description - - project_id: project_id - - created_at: created_at - - mains: mains - - updated_at: updated_at - - libs: libs - - is_protected: object_is_protected - - interface: interface - - is_public: object_is_public - - type: type - - id: job_template_id - - name: job_template_name - - - -Response Example ----------------- - -.. literalinclude:: samples/job-templates/job-template-show-response.json - :language: javascript - - - - -Remove job template -=================== - -.. rest_method:: DELETE /v2/job-templates/{job_template_id} - -Removes a job. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - job_template_id: url_job_template_id - - - - - - -Update job template object -========================== - -.. rest_method:: PATCH /v2/job-templates/{job_template_id} - -Updates a job template object. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - job_template_id: url_job_template_id - -Request Example ---------------- - -.. literalinclude:: samples/job-templates/job-template-update-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - description: job_description - - project_id: project_id - - created_at: created_at - - mains: mains - - updated_at: updated_at - - libs: libs - - is_protected: object_is_protected - - interface: interface - - is_public: object_is_public - - type: type - - id: job_template_id - - name: job_template_name - - - - - -Get job template config hints -============================= - -.. rest_method:: GET /v2/job-templates/config-hints/{job_type} - -Get job template config hints - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - job_type: url_job_type - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - job_config: job_config - - args: args - - configs: configs - - - - - diff --git a/api-ref/source/v2/job-types.inc b/api-ref/source/v2/job-types.inc deleted file mode 100644 index ff7dc9b50d..0000000000 --- a/api-ref/source/v2/job-types.inc +++ /dev/null @@ -1,61 +0,0 @@ -.. -*- rst -*- - -========= -Job types -========= - -Each plugin that supports EDP also supports specific job types. -Different versions of a plugin might actually support different job -types. Configuration options vary by plugin, version, and job type. - -The job types provide information about which plugins support which -job types and how to configure the job types. - - -List job types -============== - -.. rest_method:: GET /v2/job-types - -Lists all job types. - -You can use query parameters to filter the response. - - -Normal response codes: 200 -Error response codes: - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - plugin: plugin - - version: version - - type: type - - hints: hints - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - versions: versions - - title: title - - description: description_plugin - - job_types: job_types - - name: plugin_name - - - -Response Example ----------------- - -.. literalinclude:: samples/job-types/job-types-list-response.json - :language: javascript - - - - diff --git a/api-ref/source/v2/jobs.inc b/api-ref/source/v2/jobs.inc deleted file mode 100644 index 7e5b1024cc..0000000000 --- a/api-ref/source/v2/jobs.inc +++ /dev/null @@ -1,262 +0,0 @@ -.. -*- rst -*- - -==== -Jobs -==== - -A job object represents a job that runs on a cluster. -A job polls the status of a running job and reports it to the user. - - -Execute Job -=========== - -.. rest_method:: POST /v2/jobs - -Executes a job. - - -Normal response codes: 200 - -Request Example ----------------- -.. rest_method:: /v2/jobs - -.. literalinclude:: samples/jobs/job-request.json - :language: javascript - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - info: info - - output_id: output_id - - start_time: start_time - - job_template_id: job_template_id - - updated_at: updated_at - - project_id: project_id - - created_at: created_at - - args: args - - data_source_urls: data_source_urls - - return_code: return_code - - oozie_job_id: oozie_job_id - - is_protected: is_protected_3 - - cluster_id: cluster_id - - end_time: end_time - - params: params - - is_public: job_is_public - - input_id: input_id - - configs: configs - - job: job - - id: job_id - - - -Response Example ----------------- - -.. literalinclude:: samples/jobs/job-response.json - :language: javascript - - - - -List jobs -========= - -.. rest_method:: GET /v2/jobs - -Lists available jobs. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - limit: limit - - marker: marker - - sort_by: sort_by_job - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - info: info - - output_id: output_id - - start_time: start_time - - job_template_id: job_template_id - - updated_at: updated_at - - project_id: project_id - - created_at: created_at - - args: args - - data_source_urls: data_source_urls - - return_code: return_code - - oozie_job_id: oozie_job_id - - is_protected: is_protected_3 - - cluster_id: cluster_id - - end_time: end_time - - params: params - - is_public: job_is_public - - input_id: input_id - - configs: configs - - job: job - - id: job_id - - jobs: jobs - - - -Response Example ----------------- -.. rest_method:: /v2/jobs - -.. literalinclude:: samples/jobs/list-response.json - :language: javascript - - - - -Show job -======== - -.. rest_method:: GET /v2/jobs/{job_id} - -Shows details for a job, by ID. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - job_id: url_job_id - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - info: info - - output_id: output_id - - start_time: start_time - - job_template_id: job_template_id - - updated_at: updated_at - - project_id: project_id - - created_at: created_at - - args: args - - data_source_urls: data_source_urls - - return_code: return_code - - oozie_job_id: oozie_job_id - - is_protected: is_protected_3 - - cluster_id: cluster_id - - end_time: end_time - - params: params - - is_public: job_is_public - - input_id: input_id - - configs: configs - - job: job - - id: job_id - - - -Response Example ----------------- - -.. literalinclude:: samples/jobs/job-response.json - :language: javascript - - - - -Delete job -========== - -.. rest_method:: DELETE /v2/jobs/{job_id} - -Deletes a job. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - job_id: url_job_id - - - - - - -Update job -========== - -.. rest_method:: PATCH /v2/jobs/{job_id} - -Updates a job. - -Normal response codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - job_id: url_job_id - -Request Example ---------------- - -.. literalinclude:: samples/jobs/job-update-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - info: info - - output_id: output_id - - start_time: start_time - - job_template_id: job_template_id - - updated_at: updated_at - - project_id: project_id - - created_at: created_at - - args: args - - data_source_urls: data_source_urls - - return_code: return_code - - oozie_job_id: oozie_job_id - - is_protected: is_protected_3 - - cluster_id: cluster_id - - end_time: end_time - - params: params - - is_public: job_is_public - - input_id: input_id - - configs: configs - - job: job - - id: job_id - - - - - diff --git a/api-ref/source/v2/node-group-templates.inc b/api-ref/source/v2/node-group-templates.inc deleted file mode 100644 index c91e4e36ae..0000000000 --- a/api-ref/source/v2/node-group-templates.inc +++ /dev/null @@ -1,289 +0,0 @@ -.. -*- rst -*- - -==================== -Node group templates -==================== - -A cluster is a group of nodes with the same configuration. A node -group template configures a node in the cluster. - -A template configures Hadoop processes and VM characteristics, such -as the number of reduced slots for task tracker, the number of -CPUs, and the amount of RAM. The template specifies the VM -characteristics through an OpenStack flavor. - - -List node group templates -========================= - -.. rest_method:: GET /v2/node-group-templates - -Lists available node group templates. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - limit: limit - - marker: marker - - sort_by: sort_by_node_group_templates - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - markers: markers - - prev: prev - - next: next - - volume_local_to_instance: volume_local_to_instance - - availability_zone: availability_zone - - updated_at: updated_at - - use_autoconfig: use_autoconfig - - volumes_per_node: volumes_per_node - - id: node_group_template_id - - security_groups: security_groups - - shares: object_shares - - node_configs: node_configs - - auto_security_group: auto_security_group - - volumes_availability_zone: volumes_availability_zone - - description: node_group_template_description - - volume_mount_prefix: volume_mount_prefix - - plugin_name: plugin_name - - floating_ip_pool: floating_ip_pool - - is_default: is_default - - image_id: image_id - - volumes_size: volumes_size - - is_proxy_gateway: is_proxy_gateway - - is_public: object_is_public - - plugin_version: plugin_version - - name: node_group_template_name - - project_id: project_id - - created_at: created_at - - volume_type: volume_type - - is_protected: object_is_protected - - node_processes: node_processes - - flavor_id: flavor_id - - - -Response Example ----------------- -.. rest_method:: GET /v2/node-group-templates?limit=2&marker=38b4e146-1d39-4822-bad2-fef1bf304a52&sort_by=name - -.. literalinclude:: samples/node-group-templates/node-group-templates-list-response.json - :language: javascript - - - - -Create node group template -========================== - -.. rest_method:: POST /v2/node-group-templates - -Creates a node group template. - -Normal response codes: 202 - - - - -Request Example ---------------- - -.. literalinclude:: samples/node-group-templates/node-group-template-create-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - volume_local_to_instance: volume_local_to_instance - - availability_zone: availability_zone - - updated_at: updated_at - - use_autoconfig: use_autoconfig - - volumes_per_node: volumes_per_node - - id: node_group_template_id - - security_groups: security_groups - - shares: object_shares - - node_configs: node_configs - - auto_security_group: auto_security_group - - volumes_availability_zone: volumes_availability_zone - - description: node_group_template_description - - volume_mount_prefix: volume_mount_prefix - - plugin_name: plugin_name - - floating_ip_pool: floating_ip_pool - - is_default: is_default - - image_id: image_id - - volumes_size: volumes_size - - is_proxy_gateway: is_proxy_gateway - - is_public: object_is_public - - plugin_version: plugin_version - - name: node_group_template_name - - project_id: project_id - - created_at: created_at - - volume_type: volume_type - - is_protected: object_is_protected - - node_processes: node_processes - - flavor_id: flavor_id - - - - - -Show node group template details -================================ - -.. rest_method:: GET /v2/node-group-templates/{node_group_template_id} - -Shows a node group template, by ID. - - -Normal response codes: 200 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - node_group_template_id: url_node_group_template_id - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - volume_local_to_instance: volume_local_to_instance - - availability_zone: availability_zone - - updated_at: updated_at - - use_autoconfig: use_autoconfig - - volumes_per_node: volumes_per_node - - id: node_group_template_id - - security_groups: security_groups - - shares: object_shares - - node_configs: node_configs - - auto_security_group: auto_security_group - - volumes_availability_zone: volumes_availability_zone - - description: node_group_template_description - - volume_mount_prefix: volume_mount_prefix - - plugin_name: plugin_name - - floating_ip_pool: floating_ip_pool - - is_default: is_default - - image_id: image_id - - volumes_size: volumes_size - - is_proxy_gateway: is_proxy_gateway - - is_public: object_is_public - - plugin_version: plugin_version - - name: node_group_template_name - - project_id: project_id - - created_at: created_at - - volume_type: volume_type - - is_protected: object_is_protected - - node_processes: node_processes - - flavor_id: flavor_id - - - -Response Example ----------------- - -.. literalinclude:: samples/node-group-templates/node-group-template-show-response.json - :language: javascript - - - - -Delete node group template -========================== - -.. rest_method:: DELETE /v2/node-group-templates/{node_group_template_id} - -Deletes a node group template. - -Normal response codes:204 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - node_group_template_id: url_node_group_template_id - - - - - - -Update node group template -========================== - -.. rest_method:: PATCH /v2/node-group-templates/{node_group_template_id} - -Updates a node group template. - -Normal respose codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - node_group_template_id: url_node_group_template_id - -Request Example ---------------- - -.. literalinclude:: samples/node-group-templates/node-group-template-update-request.json - :language: javascript - - - - - - - -Export node group template -========================== - -.. rest_method:: GET /v2/node-group-templates/{node_group_template_id}/export - -Exports a node group template. - -Normal respose codes:202 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - node_group_template_id: url_node_group_template_id - -Request Example ---------------- - -.. literalinclude:: samples/node-group-templates/node-group-template-update-request.json - :language: javascript - - - - - - - diff --git a/api-ref/source/v2/parameters.yaml b/api-ref/source/v2/parameters.yaml deleted file mode 100644 index 52d568afc1..0000000000 --- a/api-ref/source/v2/parameters.yaml +++ /dev/null @@ -1,1183 +0,0 @@ -# variables in header -Content-Length: - description: | - The length of the data, in bytes. - in: header - required: true - type: string - -# variables in path -hints: - description: | - Includes configuration hints in the response. - in: path - required: false - type: boolean -job_binary_id: - description: | - The UUID of the job binary. - in: path - required: true - type: string -limit: - description: | - Maximum number of objects in response data. - in: path - required: false - type: integer -marker: - description: | - ID of the last element on the list which - won't be in response. - in: path - required: false - type: string -plugin: - description: | - Filters the response by a plugin name. - in: path - required: false - type: string -sort_by_cluster_templates: - description: | - The field for sorting cluster templates. - this parameter accepts the following values: - ``name``, ``plugin_name``, ``plugin_version``, - ``created_at``, ``updated_at``, ``id``. Also - this values can started with ``-`` prefix for - descending sort. For example: ``-name``. - in: path - required: false - type: string - -sort_by_clusters: - description: | - The field for sorting clusters. - this parameter accepts the following values: - ``name``, ``plugin_name``, ``plugin_version``, - ``status``, ``id``. Also this values can - started with ``-`` prefix for descending sort. - For example: ``-name``. - in: path - required: false - type: string - -sort_by_data_sources: - description: | - The field for sorting data sources. - this parameter accepts the following values: - ``id``, ``name``, ``type``, ``created_at``, - ``updated_at``. Also this values can started - with ``-`` prefix for descending sort. - For example: ``-name``. - in: path - required: false - type: string - -sort_by_job: - description: | - The field for sorting job executions. - this parameter accepts the following values: - ``id``, ``job_template``, ``cluster``, - ``status``. Also this values can started - with ``-`` prefix for descending sort. - For example: ``-cluster``. - in: path - required: false - type: string - -sort_by_job_binary: - description: | - The field for sorting job binaries. - this parameter accepts the following values: - ``id``, ``name``, ``created_at``, ``updated_at``. - Also this values can started with ``-`` prefix - for descending sort. For example: ``-name``. - in: path - required: false - type: string - -sort_by_job_binary_internals: - description: | - The field for sorting job binary internals. - this parameter accepts the following values: - ``id``, ``name``, ``created_at``, ``updated_at``. - Also this values can started with ``-`` prefix - for descending sort. For example: ``-name``. - in: path - required: false - type: string - -sort_by_job_templates: - description: | - The field for sorting jobs. - this parameter accepts the following values: - ``id``, ``name``, ``type``, ``created_at``, - ``updated_at``. Also this values can started - with ``-`` prefix for descending sort. - For example: ``-name``. - in: path - required: false - type: string - -sort_by_node_group_templates: - description: | - The field for sorting node group templates. - this parameter accepts the following values: - ``name``, ``plugin_name``, ``plugin_version``, - ``created_at``, ``updated_at``, ``id``. Also - this values can started with ``-`` prefix for - descending sort. For example: ``-name``. - in: path - required: false - type: string - -type_2: - description: | - Filters the response by a job type. - in: path - required: false - type: string -url_cluster_id: - description: | - The ID of the cluster - in: path - required: true - type: string -url_cluster_template_id: - description: | - The unique identifier of the cluster template. - in: path - required: true - type: string -url_data_source_id: - description: | - The UUID of the data source. - in: path - required: true - type: string -url_image_id: - description: | - The UUID of the image. - in: path - required: true - type: string -url_job_binary_id: - description: | - The UUID of the job binary. - in: path - required: true - type: string -url_job_binary_internals_id: - description: | - The UUID of the job binary internal. - in: path - required: true - type: string -url_job_binary_internals_name: - description: | - The name of the job binary internal. - in: path - required: true - type: string -url_job_id: - description: | - The UUID of the job. - in: path - required: true - type: string -url_job_template_id: - description: | - The UUID of the template job. - in: path - required: true - type: string -url_job_type: - description: | - The job type. - in: path - required: true - type: string -url_node_group_template_id: - description: | - The UUID of the node group template. - in: path - required: true - type: string -url_plugin_name: - description: | - Name of the plugin. - in: path - required: true - type: string -url_project_id: - description: | - UUID of the project. - in: path - required: true - type: string -version: - description: | - Filters the response by a plugin version. - in: path - required: true - type: string -version_1: - description: | - Version of the plugin. - in: path - required: false - type: string - - -# variables in body -args: - description: | - The list of arguments. - in: body - required: true - type: array -auto_security_group: - description: | - If set to ``True``, the cluster group is - automatically secured. - in: body - required: true - type: boolean -availability_zone: - description: | - The availability of the node in the cluster. - in: body - required: true - type: string -binaries: - description: | - The list of job binary internal objects. - in: body - required: true - type: array -cluster_configs: - description: | - A set of key and value pairs that contain the - cluster configuration. - in: body - required: true - type: object -cluster_id: - description: | - The UUID of the cluster. - in: body - required: true - type: string -cluster_template_description: - description: | - Description of the cluster template - in: body - required: false - type: string -cluster_template_id: - description: | - The UUID of the cluster template. - in: body - required: true - type: string -cluster_template_name: - description: | - The name of the cluster template. - in: body - required: true - type: string -clusters: - description: | - The list of clusters. - in: body - required: true - type: array -configs: - description: | - The mappings of the job tasks. - in: body - required: true - type: object -count: - description: | - The number of nodes in the cluster. - in: body - required: true - type: integer -created: - description: | - The date and time when the image was created. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - For example, ``2015-08-27T09:49:58-05:00``. - - The ``±hh:mm`` value, if included, is the time zone as an offset - from UTC. - in: body - required: true - type: string -created_at: - description: | - The date and time when the cluster was created. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -created_at_1: - description: | - The date and time when the object was created. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -created_at_2: - description: | - The date and time when the node was created in the cluster. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -created_at_3: - description: | - The date and time when the job execution object was created. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -data_source_description: - description: | - The description of the data source object. - in: body - required: true - type: string -data_source_id: - description: | - The UUID of the data source. - in: body - required: true - type: string -data_source_name: - description: | - The name of the data source. - in: body - required: true - type: string -data_source_urls: - description: | - The data source URLs. - in: body - required: true - type: object -datasize: - description: | - The size of the data stored in the internal - database. - in: body - required: true - type: integer -default_image_id: - description: | - The default ID of the image. - in: body - required: true - type: string -description: - description: | - The description of the cluster. - in: body - required: true - type: string -description_3: - description: | - The description of the node in the cluster. - in: body - required: true - type: string -description_7: - description: | - Description of the image. - in: body - required: false - type: string -description_plugin: - description: | - The full description of the plugin. - in: body - required: true - type: string -domain_name: - description: | - Domain name for internal and external hostname resolution. - Required if DNS service is enabled. - in: body - required: false - type: string -end_time: - description: | - The end date and time of the job execution. - - The date and time when the job completed execution. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -flavor_id: - description: | - The ID of the flavor. - in: body - required: true - type: string -floating_ip_pool: - description: | - The UUID of the pool in the template. - in: body - required: true - type: string -force: - description: | - If set to ``true``, Sahara will force cluster deletion. - in: body - required: false - type: boolean -id: - description: | - The UUID of the cluster. - in: body - required: true - type: string -id_1: - description: | - The ID of the object. - in: body - required: true - type: string -image: - description: | - A set of key and value pairs that contain image - properties. - in: body - required: true - type: object -image_description: - description: | - The description of the image. - in: body - required: true - type: string -image_id: - description: | - The UUID of the image. - in: body - required: true - type: string -image_name: - description: | - The name of the operating system image. - in: body - required: true - type: string -images: - description: | - The list of images and their properties. - in: body - required: true - type: array -info: - description: | - A set of key and value pairs that contain cluster - information. - in: body - required: true - type: object -info_1: - description: | - The report of the executed job objects. - in: body - required: true - type: object -input_id: - description: | - The UUID of the input. - in: body - required: true - type: string -interface: - description: | - The interfaces of the job object. - in: body - required: true - type: array -is_default: - description: | - If set to ``true``, the cluster is the default - cluster. - in: body - required: true - type: boolean -is_protected: - description: | - If set to ``true``, the cluster is protected. - in: body - required: true - type: boolean -is_protected_2: - description: | - If set to ``true``, the node is protected. - in: body - required: true - type: boolean -is_protected_3: - description: | - If set to ``true``, the job execution object is - protected. - in: body - required: true - type: boolean -is_proxy_gateway: - description: | - If set to ``true``, the node is the proxy - gateway. - in: body - required: true - type: boolean -is_public: - description: | - If set to ``true``, the cluster is public. - in: body - required: true - type: boolean -is_transient: - description: | - If set to ``true``, the cluster is transient. - in: body - required: true - type: boolean -job: - description: | - A set of key and value pairs that contain the job - object. - in: body - required: true - type: object -job_binary_description: - description: | - The description of the job binary object. - in: body - required: true - type: string -job_binary_internals_id: - description: | - The UUID of the job binary internal. - in: body - required: true - type: string -job_binary_internals_name: - description: | - The name of the job binary internal. - in: body - required: true - type: string -job_binary_name: - description: | - The name of the object. - in: body - required: true - type: string -job_config: - description: | - The job configuration. - in: body - required: true - type: string -job_description: - description: | - The description of the job object. - in: body - required: true - type: string -job_id: - description: | - The UUID of the job object. - in: body - required: true - type: string -job_is_public: - description: | - If set to ``true``, the job object is - public. - in: body - required: true - type: boolean -job_name: - description: | - The name of the job object. - in: body - required: true - type: string -job_template_id: - description: | - The UUID of the job template object. - in: body - required: true - type: string -job_template_name: - description: | - The name of the job template object. - in: body - required: true - type: string -job_templates: - description: | - The list of the job templates. - in: body - required: true - type: array -job_types: - description: | - The list of plugins and their job types. - in: body - required: true - type: array -jobs: - description: | - The list of job objects. - in: body - required: true - type: array -libs: - description: | - The list of the job object properties. - in: body - required: true - type: array -mains: - description: | - The list of the job object and their properties. - in: body - required: true - type: array -management_public_key: - description: | - The SSH key for the management network. - in: body - required: true - type: string -markers: - description: | - The markers of previous and following pages of data. - This field exists only if ``limit`` is passed to - request. - in: body - required: false - type: object -metadata: - description: | - A set of key and value pairs that contain image - metadata. - in: body - required: true - type: object -minDisk: - description: | - The minimum disk space, in GB. - in: body - required: true - type: integer -minRam: - description: | - The minimum amount of random access memory (RAM) - for the image, in GB. - in: body - required: true - type: integer -name: - description: | - The name of the cluster. - in: body - required: true - type: string -name_1: - description: | - The name of the object. - in: body - required: true - type: string -neutron_management_network: - description: | - The UUID of the neutron management network. - in: body - required: true - type: string -next: - description: | - The marker of next page of list data. - in: body - required: false - type: string -node_configs: - description: | - A set of key and value pairs that contain the - node configuration in the cluster. - in: body - required: true - type: object -node_group_template_description: - description: | - Description of the node group template - in: body - required: false - type: string -node_group_template_id: - description: | - The UUID of the node group template. - in: body - required: true - type: string -node_group_template_name: - description: | - The name of the node group template. - in: body - required: true - type: string -node_groups: - description: | - The detail properties of the node in key-value - pairs. - in: body - required: true - type: object -node_processes: - description: | - The list of the processes performed by the node. - in: body - required: true - type: array -object_is_protected: - description: | - If set to ``true``, the object is protected. - in: body - required: true - type: boolean -object_is_public: - description: | - If set to ``true``, the object is public. - in: body - required: true - type: boolean -object_shares: - description: | - The sharing of resources in the cluster. - in: body - required: true - type: string -oozie_job_id: - description: | - The UUID of the ``oozie_job``. - in: body - required: true - type: string -output_id: - description: | - The UUID of the output of job execution object. - in: body - required: true - type: string -params: - description: | - The mappings of values to the parameters. - in: body - required: true - type: object -plugin_name: - description: | - The name of the plugin. - in: body - required: true - type: string -plugin_version: - description: | - The version of the Plugin used in the cluster. - in: body - required: true - type: string -plugin_version_1: - description: | - The version of the Plugin. - in: body - required: true - type: string -plugins: - description: | - The list of plugins. - in: body - required: true - type: array -prev: - description: | - The marker of previous page. May be ``null`` if - previous page is first or if current page is first. - in: body - required: false - type: string -progress: - description: | - A progress indicator, as a percentage value, for - the amount of image content that has been processed. - in: body - required: true - type: integer -project_id: - description: | - The UUID of the project. - in: body - required: true - type: string -provision_progress: - description: | - A list of the cluster progresses. - in: body - required: true - type: array -return_code: - description: | - The code returned after job has executed. - in: body - required: true - type: string -security_groups: - description: | - The security groups of the node. - in: body - required: true - type: string -shares: - description: | - The shares of the cluster. - in: body - required: true - type: string -start_time: - description: | - The date and time when the job started. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -status: - description: | - The status of the cluster. - in: body - required: true - type: string -status_1: - description: | - The current status of the image. - in: body - required: true - type: string -status_description: - description: | - The description of the cluster status. - in: body - required: true - type: string -tags: - description: | - List of tags to add. - in: body - required: true - type: array -tags_1: - description: | - Lists images only with specific tag. Can be used - multiple times. - in: body - required: false - type: string -tags_2: - description: | - One or more image tags. - in: body - required: true - type: array -tags_3: - description: | - List of tags to remove. - in: body - required: true - type: array -tenant_id: - description: | - The UUID of the tenant. - in: body - required: true - type: string -title: - description: | - The title of the plugin. - in: body - required: true - type: string -trust_id: - description: | - The id of the trust. - in: body - required: true - type: integer -type: - description: | - The type of the data source object. - in: body - required: true - type: string -type_1: - description: | - The type of the job object. - in: body - required: true - type: string -updated: - description: | - The date and time when the image was updated. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - For example, ``2015-08-27T09:49:58-05:00``. - - The ``±hh:mm`` value, if included, is the time zone as an offset - from UTC. - in: body - required: true - type: string -updated_at: - description: | - The date and time when the cluster was updated. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -updated_at_1: - description: | - The date and time when the object was updated. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -updated_at_2: - description: | - The date and time when the node was updated. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -updated_at_3: - description: | - The date and time when the job execution object was updated. - - The date and time stamp format is `ISO 8601 - `_: - - :: - - CCYY-MM-DDThh:mm:ss±hh:mm - - The ``±hh:mm`` value, if included, returns the time zone as an - offset from UTC. - - For example, ``2015-08-27T09:49:58-05:00``. - in: body - required: true - type: string -url: - description: | - The url of the data source object. - in: body - required: true - type: string -url_1: - description: | - The url of the job binary object. - in: body - required: true - type: string -use_autoconfig: - description: | - If set to ``true``, the cluster is auto - configured. - in: body - required: true - type: boolean -use_autoconfig_1: - description: | - If set to ``true``, the node is auto configured. - in: body - required: true - type: boolean -username: - description: | - The name of the user for the image. - in: body - required: true - type: string -username_1: - description: | - The user name to log in to an instance operating - system for remote operations execution. - in: body - required: true - type: string -versions: - description: | - The list of plugin versions. - in: body - required: true - type: array -volume_local_to_instance: - description: | - If set to ``true``, the volume is local to the - instance. - in: body - required: true - type: boolean -volume_mount_prefix: - description: | - The mount point of the node. - in: body - required: true - type: string -volume_type: - description: | - The type of volume in a node. - in: body - required: true - type: string -volumes_availability_zone: - description: | - The availability zone of the volumes. - in: body - required: true - type: string -volumes_per_node: - description: | - The number of volumes for the node. - in: body - required: true - type: integer -volumes_size: - description: | - The size of the volumes in a node. - in: body - required: true - type: integer - diff --git a/api-ref/source/v2/plugins.inc b/api-ref/source/v2/plugins.inc deleted file mode 100644 index 6ece649d9c..0000000000 --- a/api-ref/source/v2/plugins.inc +++ /dev/null @@ -1,179 +0,0 @@ -.. -*- rst -*- - -======= -Plugins -======= - -A plugin object defines the Hadoop or Spark version that it can -install and which configurations can be set for the cluster. - - -Show plugin details -=================== - -.. rest_method:: GET /v2/plugins/{plugin_name} - -Shows details for a plugin. - - -Normal response codes: 200 -Error response codes: 400, 500 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - plugin_name: url_plugin_name - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - versions: versions - - title: title - - description: description_plugin - - name: plugin_name - - - -Response Example ----------------- - -.. literalinclude:: samples/plugins/plugin-show-response.json - :language: javascript - - - - -List plugins -============ - -.. rest_method:: GET /v2/plugins - -Lists all registered plugins. - - -Normal response codes: 200 -Error response codes: 400, 500 - - - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - title: title - - versions: versions - - plugins: plugins - - description: description_plugin - - name: plugin_name - - - -Response Example ----------------- - -.. literalinclude:: samples/plugins/plugins-list-response.json - :language: javascript - - - - -Show plugin version details -=========================== - -.. rest_method:: GET /v2/plugins/{plugin_name}/{version} - -Shows details for a plugin version. - - -Normal response codes: 200 -Error response codes: 400, 500 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - plugin_name: url_plugin_name - - version: version - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - versions: versions - - title: title - - description: description_plugin - - name: plugin_name - - - -Response Example ----------------- - -.. literalinclude:: samples/plugins/plugin-version-show-response.json - :language: javascript - - - - -Update plugin details -===================== - -.. rest_method:: PATCH /v2/plugins/{plugin_name} - -Updates details for a plugin. - - -Normal response codes: 202 -Error response codes: 400, 500 - - -Request -------- - -.. rest_parameters:: parameters.yaml - - - plugin_name: url_plugin_name - - -Request Example ---------------- - -.. literalinclude:: samples/plugins/plugin-update-request.json - :language: javascript - - - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - title: title - - versions: versions - - description: description_plugin - - name: plugin_name - - -Response Example ----------------- - -.. literalinclude:: samples/plugins/plugin-update-response.json - :language: javascript - - - - - diff --git a/api-ref/source/v2/samples/cluster-templates/cluster-template-create-request.json b/api-ref/source/v2/samples/cluster-templates/cluster-template-create-request.json deleted file mode 100644 index cc6869790a..0000000000 --- a/api-ref/source/v2/samples/cluster-templates/cluster-template-create-request.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "plugin_name": "vanilla", - "plugin_version": "2.7.1", - "node_groups": [ - { - "name": "worker", - "count": 3, - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251" - }, - { - "name": "master", - "count": 1, - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae" - } - ], - "name": "cluster-template" -} diff --git a/api-ref/source/v2/samples/cluster-templates/cluster-template-create-response.json b/api-ref/source/v2/samples/cluster-templates/cluster-template-create-response.json deleted file mode 100644 index 423d09eafe..0000000000 --- a/api-ref/source/v2/samples/cluster-templates/cluster-template-create-response.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "cluster_template": { - "is_public": false, - "anti_affinity": [], - "name": "cluster-template", - "created_at": "2015-09-14T10:38:44", - "project_id": "808d5032ea0446889097723bfc8e919d", - "cluster_configs": {}, - "shares": null, - "id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "default_image_id": null, - "is_default": false, - "updated_at": null, - "plugin_name": "vanilla", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "1751c04e-8f39-467e-a421-480961172d4b", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "3ee85068-c455-4391-9db2-b54a20b99df3", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "neutron_management_network": null, - "domain_name": null, - "plugin_version": "2.7.1", - "use_autoconfig": true, - "description": null, - "is_protected": false - } -} diff --git a/api-ref/source/v2/samples/cluster-templates/cluster-template-show-response.json b/api-ref/source/v2/samples/cluster-templates/cluster-template-show-response.json deleted file mode 100644 index 286571b9a9..0000000000 --- a/api-ref/source/v2/samples/cluster-templates/cluster-template-show-response.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "cluster_template": { - "is_public": false, - "anti_affinity": [], - "name": "cluster-template", - "created_at": "2015-09-14T10:38:44", - "project_id": "808d5032ea0446889097723bfc8e919d", - "cluster_configs": {}, - "shares": null, - "id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "default_image_id": null, - "is_default": false, - "updated_at": null, - "plugin_name": "vanilla", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "1751c04e-8f39-467e-a421-480961172d4b", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "3ee85068-c455-4391-9db2-b54a20b99df3", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "domain_name": null, - "plugin_version": "2.7.1", - "use_autoconfig": true, - "description": null, - "is_protected": false - } -} diff --git a/api-ref/source/v2/samples/cluster-templates/cluster-template-update-request.json b/api-ref/source/v2/samples/cluster-templates/cluster-template-update-request.json deleted file mode 100644 index f4583b4143..0000000000 --- a/api-ref/source/v2/samples/cluster-templates/cluster-template-update-request.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "description": "Updated template", - "plugin_name": "vanilla", - "plugin_version": "2.7.1", - "name": "vanilla-updated", - "cluster_configs": { - "HDFS": { - "dfs.replication": 2 - } - } -} diff --git a/api-ref/source/v2/samples/cluster-templates/cluster-template-update-response.json b/api-ref/source/v2/samples/cluster-templates/cluster-template-update-response.json deleted file mode 100644 index 6d3e365df7..0000000000 --- a/api-ref/source/v2/samples/cluster-templates/cluster-template-update-response.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "cluster_template": { - "is_public": false, - "anti_affinity": [], - "name": "vanilla-updated", - "created_at": "2015-08-21T08:41:24", - "project_id": "808d5032ea0446889097723bfc8e919d", - "cluster_configs": { - "HDFS": { - "dfs.replication": 2 - } - }, - "shares": null, - "id": "84d47e85-6094-473f-bf6d-5a7e6e86564e", - "default_image_id": null, - "is_default": false, - "updated_at": "2015-09-14T10:45:57", - "plugin_name": "vanilla", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": {}, - "JobFlow": {}, - "MapReduce": {}, - "Hive": {}, - "Hadoop": {}, - "HDFS": {} - }, - "auto_security_group": true, - "availability_zone": "", - "count": 1, - "flavor_id": "3", - "id": "57b966ab-617e-4735-bf60-0cb991208a52", - "security_groups": [], - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-08-21T08:41:24", - "node_group_template_id": "a5533187-3f14-42c3-ba3a-196c13fe0fb5", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "all", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "datanode", - "historyserver", - "resourcemanager", - "nodemanager", - "oozie" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "neutron_management_network": null, - "domain_name": null, - "plugin_version": "2.7.1", - "use_autoconfig": true, - "description": "Updated template", - "is_protected": false - } -} diff --git a/api-ref/source/v2/samples/cluster-templates/cluster-templates-list-response.json b/api-ref/source/v2/samples/cluster-templates/cluster-templates-list-response.json deleted file mode 100644 index a8f9f559c8..0000000000 --- a/api-ref/source/v2/samples/cluster-templates/cluster-templates-list-response.json +++ /dev/null @@ -1,140 +0,0 @@ -{ - "cluster_templates": [ - { - "is_public": false, - "anti_affinity": [], - "name": "cluster-template", - "created_at": "2015-09-14T10:38:44", - "project_id": "808d5032ea0446889097723bfc8e919d", - "cluster_configs": {}, - "shares": null, - "id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "default_image_id": null, - "is_default": false, - "updated_at": null, - "plugin_name": "vanilla", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "1751c04e-8f39-467e-a421-480961172d4b", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "3ee85068-c455-4391-9db2-b54a20b99df3", - "security_groups": null, - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:38:44", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "domain_name": null, - "plugin_version": "2.7.1", - "use_autoconfig": true, - "description": null, - "is_protected": false - }, - { - "is_public": true, - "anti_affinity": [], - "name": "asd", - "created_at": "2015-08-18T08:39:39", - "project_id": "808d5032ea0446889097723bfc8e919d", - "cluster_configs": { - "general": {} - }, - "shares": null, - "id": "5a9c787c-2078-4f7d-9a66-27759be9051b", - "default_image_id": null, - "is_default": false, - "updated_at": "2015-09-14T08:41:15", - "plugin_name": "vanilla", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": true, - "availability_zone": "", - "count": 1, - "flavor_id": "2", - "id": "a65864dd-3f99-4d29-a011-f7711cc23fa0", - "security_groups": [], - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-08-18T08:39:39", - "node_group_template_id": "42ce49de-1b8f-41d5-8f4a-244ec0826d92", - "updated_at": null, - "volumes_per_node": 1, - "is_proxy_gateway": false, - "name": "asd", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "jobtracker" - ], - "volumes_size": 10, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "neutron_management_network": null, - "domain_name": null, - "plugin_version": "2.7.1", - "use_autoconfig": true, - "description": "", - "is_protected": false - } - ], - "markers": { - "prev": null, - "next": "2c76e0d3-56cd-4d28-bb4f-4808e538c7b9" - } -} diff --git a/api-ref/source/v2/samples/clusters/cluster-create-request.json b/api-ref/source/v2/samples/clusters/cluster-create-request.json deleted file mode 100644 index c2db34de03..0000000000 --- a/api-ref/source/v2/samples/clusters/cluster-create-request.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "plugin_name": "vanilla", - "plugin_version": "2.7.1", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "user_keypair_id": "test", - "name": "vanilla-cluster", - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd" -} diff --git a/api-ref/source/v2/samples/clusters/cluster-create-response.json b/api-ref/source/v2/samples/clusters/cluster-create-response.json deleted file mode 100644 index 47b3a911ea..0000000000 --- a/api-ref/source/v2/samples/clusters/cluster-create-response.json +++ /dev/null @@ -1,128 +0,0 @@ -{ - "cluster": { - "is_public": false, - "project_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "domain_name": null, - "status_description": "", - "plugin_name": "vanilla", - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "info": {}, - "user_keypair_id": "test", - "management_public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfe9ARO+t9CybtuC1+cusDTeQL7wos1+U2dKPlCUJvNUn0PcunGefqWI4MUZPY9yGmvRqfINy7/xRQCzL0AwgqzwcCXamcK8JCC80uH7j8Vxa4kJheG1jxMoz/FpDSdRnzNZ+m7H5rjOwAQANhL7KatGLyCPQg9fqOoaIyCZE/A3fztm/XjJMpWnuANpUZubZtISEfu4UZKVk/DPSlBrbTZkTOvEog1LwZCZoTt0rq6a7PJFzJJkq0YecRudu/f3tpXbNe/F84sd9PhOSqcrRbm72WzglyEE8PuS1kuWpEz8G+Y5/0tQxnoh6khj9mgflrdCFuvpdutFLH4eN5MFDh Generated-by-Sahara\n", - "id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "0fe07f2a-0275-4bc0-93b2-c3c1e48e2815", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "c7a3bea4-c898-446b-8c67-6d378d4c06c4", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "provision_progress": [], - "plugin_version": "2.7.1", - "use_autoconfig": true, - "trust_id": null, - "description": null, - "created_at": "2015-09-14T10:57:11", - "is_protected": false, - "updated_at": "2015-09-14T10:57:12", - "is_transient": false, - "cluster_configs": { - "HDFS": { - "dfs.replication": 3 - } - }, - "anti_affinity": [], - "name": "vanilla-cluster", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "status": "Validating" - } -} diff --git a/api-ref/source/v2/samples/clusters/cluster-scale-request.json b/api-ref/source/v2/samples/clusters/cluster-scale-request.json deleted file mode 100644 index 8b61d5ea0f..0000000000 --- a/api-ref/source/v2/samples/clusters/cluster-scale-request.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "add_node_groups": [ - { - "count": 1, - "name": "b-worker", - "node_group_template_id": "bc270ffe-a086-4eeb-9baa-2f5a73504622" - } - ], - "resize_node_groups": [ - { - "count": 4, - "name": "worker" - } - ] -} diff --git a/api-ref/source/v2/samples/clusters/cluster-scale-response.json b/api-ref/source/v2/samples/clusters/cluster-scale-response.json deleted file mode 100644 index 57bc007bf6..0000000000 --- a/api-ref/source/v2/samples/clusters/cluster-scale-response.json +++ /dev/null @@ -1,370 +0,0 @@ -{ - "cluster": { - "info": { - "YARN": { - "Web UI": "http://172.18.168.115:8088", - "ResourceManager": "http://172.18.168.115:8032" - }, - "HDFS": { - "Web UI": "http://172.18.168.115:50070", - "NameNode": "hdfs://vanilla-cluster-master-0:9000" - }, - "MapReduce JobHistory Server": { - "Web UI": "http://172.18.168.115:19888" - }, - "JobFlow": { - "Oozie": "http://172.18.168.115:11000" - } - }, - "plugin_name": "vanilla", - "plugin_version": "2.7.1", - "updated_at": "2015-09-14T11:01:15", - "name": "vanilla-cluster", - "id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "management_public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfe9ARO+t9CybtuC1+cusDTeQL7wos1+U2dKPlCUJvNUn0PcunGefqWI4MUZPY9yGmvRqfINy7/xRQCzL0AwgqzwcCXamcK8JCC80uH7j8Vxa4kJheG1jxMoz/FpDSdRnzNZ+m7H5rjOwAQANhL7KatGLyCPQg9fqOoaIyCZE/A3fztm/XjJMpWnuANpUZubZtISEfu4UZKVk/DPSlBrbTZkTOvEog1LwZCZoTt0rq6a7PJFzJJkq0YecRudu/f3tpXbNe/F84sd9PhOSqcrRbm72WzglyEE8PuS1kuWpEz8G+Y5/0tQxnoh6khj9mgflrdCFuvpdutFLH4eN5MFDh Generated-by-Sahara\n", - "trust_id": null, - "status_description": "", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "is_protected": false, - "is_transient": false, - "provision_progress": [ - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Create Heat stack", - "step_type": "Engine: create cluster", - "updated_at": "2015-09-14T10:57:38", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:57:18", - "id": "0a6d95f9-30f4-4434-823a-a38a7999a5af" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 4, - "successful": true, - "step_name": "Configure instances", - "step_type": "Engine: create cluster", - "updated_at": "2015-09-14T10:58:22", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:58:16", - "id": "29f2b587-c34c-4871-9ed9-9235b411cd9a" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Start the following process(es): Oozie", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T11:01:15", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T11:00:27", - "id": "36f1efde-90f9-41c1-b409-aa1cf9623e3e" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 4, - "successful": true, - "step_name": "Configure instances", - "step_type": "Plugin: configure cluster", - "updated_at": "2015-09-14T10:59:21", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:58:22", - "id": "602bcc27-3a2d-42c8-8aca-ebc475319c72" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Configure topology data", - "step_type": "Plugin: configure cluster", - "updated_at": "2015-09-14T10:59:37", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:59:21", - "id": "7e291df1-2d32-410d-ae89-33ab6f83cf17" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 3, - "successful": true, - "step_name": "Start the following process(es): DataNodes, NodeManagers", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T11:00:11", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T11:00:01", - "id": "8ab7933c-ad61-4a4f-88db-23ce78ee10f6" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Await DataNodes start up", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T11:00:21", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T11:00:11", - "id": "9c8dc016-8c5b-4e80-9857-80c41f6bd971" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Start the following process(es): HistoryServer", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T11:00:27", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T11:00:21", - "id": "c6327532-222b-416c-858f-73dbb32b8e97" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 4, - "successful": true, - "step_name": "Wait for instance accessibility", - "step_type": "Engine: create cluster", - "updated_at": "2015-09-14T10:58:14", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:57:41", - "id": "d3eca726-8b44-473a-ac29-fba45a893725" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 0, - "successful": true, - "step_name": "Mount volumes to instances", - "step_type": "Engine: create cluster", - "updated_at": "2015-09-14T10:58:15", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:58:14", - "id": "d7a875ff-64bf-41aa-882d-b5061c8ee152" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Start the following process(es): ResourceManager", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T11:00:00", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:59:55", - "id": "ded7d227-10b8-4cb0-ab6c-25da1462bb7a" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 1, - "successful": true, - "step_name": "Start the following process(es): NameNode", - "step_type": "Plugin: start cluster", - "updated_at": "2015-09-14T10:59:54", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:59:38", - "id": "e1701ff5-930a-4212-945a-43515dfe24d1" - }, - { - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "total": 4, - "successful": true, - "step_name": "Assign IPs", - "step_type": "Engine: create cluster", - "updated_at": "2015-09-14T10:57:41", - "project_id": "808d5032ea0446889097723bfc8e919d", - "created_at": "2015-09-14T10:57:38", - "id": "eaf0ab1b-bf8f-48f0-8f2c-fa4f82f539b9" - } - ], - "status": "Active", - "description": null, - "use_autoconfig": true, - "shares": null, - "domain_name": null, - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "is_public": false, - "project_id": "808d5032ea0446889097723bfc8e919d", - "node_groups": [ - { - "volumes_per_node": 0, - "volume_type": null, - "updated_at": "2015-09-14T10:57:37", - "name": "b-worker", - "id": "b7a6dea4-c898-446b-8c67-4f378d4c06c4", - "node_group_template_id": "bc270ffe-a086-4eeb-9baa-2f5a73504622", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048, - "yarn.scheduler.maximum-allocation-mb": 2048 - }, - "MapReduce": { - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "yarn.app.mapreduce.am.resource.mb": 256 - } - }, - "auto_security_group": false, - "volumes_availability_zone": null, - "use_autoconfig": true, - "security_groups": null, - "shares": null, - "node_processes": [ - "datanode", - "nodemanager" - ], - "availability_zone": null, - "flavor_id": "2", - "image_id": null, - "volume_local_to_instance": false, - "count": 1, - "volumes_size": 0, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "volume_mount_prefix": "/volumes/disk", - "instances": [], - "is_proxy_gateway": false, - "created_at": "2015-09-14T10:57:11" - }, - { - "volumes_per_node": 0, - "volume_type": null, - "updated_at": "2015-09-14T10:57:36", - "name": "master", - "id": "0fe07f2a-0275-4bc0-93b2-c3c1e48e2815", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048, - "yarn.scheduler.maximum-allocation-mb": 2048 - }, - "MapReduce": { - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "yarn.app.mapreduce.am.resource.mb": 256 - } - }, - "auto_security_group": false, - "volumes_availability_zone": null, - "use_autoconfig": true, - "security_groups": null, - "shares": null, - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "availability_zone": null, - "flavor_id": "2", - "image_id": null, - "volume_local_to_instance": false, - "count": 1, - "volumes_size": 0, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "volume_mount_prefix": "/volumes/disk", - "instances": [ - { - "instance_id": "b9f16a07-88fc-423e-83a3-489598fe6737", - "internal_ip": "10.50.0.60", - "instance_name": "vanilla-cluster-master-0", - "updated_at": "2015-09-14T10:57:39", - "management_ip": "172.18.168.115", - "created_at": "2015-09-14T10:57:36", - "id": "4867d92e-cc7b-4cde-9a1a-149e91caa491" - } - ], - "is_proxy_gateway": false, - "created_at": "2015-09-14T10:57:11" - }, - { - "volumes_per_node": 0, - "volume_type": null, - "updated_at": "2015-09-14T10:57:37", - "name": "worker", - "id": "c7a3bea4-c898-446b-8c67-6d378d4c06c4", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048, - "yarn.scheduler.maximum-allocation-mb": 2048 - }, - "MapReduce": { - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "yarn.app.mapreduce.am.resource.mb": 256 - } - }, - "auto_security_group": false, - "volumes_availability_zone": null, - "use_autoconfig": true, - "security_groups": null, - "shares": null, - "node_processes": [ - "datanode", - "nodemanager" - ], - "availability_zone": null, - "flavor_id": "2", - "image_id": null, - "volume_local_to_instance": false, - "count": 4, - "volumes_size": 0, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "volume_mount_prefix": "/volumes/disk", - "instances": [ - { - "instance_id": "0cf1ee81-aa72-48da-be2c-65bc2fa51f8f", - "internal_ip": "10.50.0.63", - "instance_name": "vanilla-cluster-worker-0", - "updated_at": "2015-09-14T10:57:39", - "management_ip": "172.18.168.118", - "created_at": "2015-09-14T10:57:37", - "id": "f3633b30-c1e4-4144-930b-ab5b780b87be" - }, - { - "instance_id": "4a937391-b594-4ad0-9a53-00a99a691383", - "internal_ip": "10.50.0.62", - "instance_name": "vanilla-cluster-worker-1", - "updated_at": "2015-09-14T10:57:40", - "management_ip": "172.18.168.117", - "created_at": "2015-09-14T10:57:37", - "id": "0d66fd93-f277-4a94-b46a-f5866aa0c38f" - }, - { - "instance_id": "839b1d56-6d0d-4aa4-9d05-30e029c276f8", - "internal_ip": "10.50.0.61", - "instance_name": "vanilla-cluster-worker-2", - "updated_at": "2015-09-14T10:57:40", - "management_ip": "172.18.168.116", - "created_at": "2015-09-14T10:57:37", - "id": "0982cefd-5c58-436e-8f1e-c1d0830f18a7" - } - ], - "is_proxy_gateway": false, - "created_at": "2015-09-14T10:57:11" - } - ], - "cluster_configs": { - "HDFS": { - "dfs.replication": 3 - } - }, - "user_keypair_id": "apavlov", - "anti_affinity": [], - "created_at": "2015-09-14T10:57:11" - } -} diff --git a/api-ref/source/v2/samples/clusters/cluster-show-response.json b/api-ref/source/v2/samples/clusters/cluster-show-response.json deleted file mode 100644 index 47b3a911ea..0000000000 --- a/api-ref/source/v2/samples/clusters/cluster-show-response.json +++ /dev/null @@ -1,128 +0,0 @@ -{ - "cluster": { - "is_public": false, - "project_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "domain_name": null, - "status_description": "", - "plugin_name": "vanilla", - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "info": {}, - "user_keypair_id": "test", - "management_public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfe9ARO+t9CybtuC1+cusDTeQL7wos1+U2dKPlCUJvNUn0PcunGefqWI4MUZPY9yGmvRqfINy7/xRQCzL0AwgqzwcCXamcK8JCC80uH7j8Vxa4kJheG1jxMoz/FpDSdRnzNZ+m7H5rjOwAQANhL7KatGLyCPQg9fqOoaIyCZE/A3fztm/XjJMpWnuANpUZubZtISEfu4UZKVk/DPSlBrbTZkTOvEog1LwZCZoTt0rq6a7PJFzJJkq0YecRudu/f3tpXbNe/F84sd9PhOSqcrRbm72WzglyEE8PuS1kuWpEz8G+Y5/0tQxnoh6khj9mgflrdCFuvpdutFLH4eN5MFDh Generated-by-Sahara\n", - "id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "0fe07f2a-0275-4bc0-93b2-c3c1e48e2815", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "c7a3bea4-c898-446b-8c67-6d378d4c06c4", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "provision_progress": [], - "plugin_version": "2.7.1", - "use_autoconfig": true, - "trust_id": null, - "description": null, - "created_at": "2015-09-14T10:57:11", - "is_protected": false, - "updated_at": "2015-09-14T10:57:12", - "is_transient": false, - "cluster_configs": { - "HDFS": { - "dfs.replication": 3 - } - }, - "anti_affinity": [], - "name": "vanilla-cluster", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "status": "Validating" - } -} diff --git a/api-ref/source/v2/samples/clusters/cluster-update-request.json b/api-ref/source/v2/samples/clusters/cluster-update-request.json deleted file mode 100644 index ab01348afa..0000000000 --- a/api-ref/source/v2/samples/clusters/cluster-update-request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "public-vanilla-cluster", - "is_public": true -} diff --git a/api-ref/source/v2/samples/clusters/cluster-update-response.json b/api-ref/source/v2/samples/clusters/cluster-update-response.json deleted file mode 100644 index f78247fa69..0000000000 --- a/api-ref/source/v2/samples/clusters/cluster-update-response.json +++ /dev/null @@ -1,128 +0,0 @@ -{ - "cluster": { - "is_public": true, - "project_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "domain_name": null, - "status_description": "", - "plugin_name": "vanilla", - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "info": {}, - "user_keypair_id": "test", - "management_public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfe9ARO+t9CybtuC1+cusDTeQL7wos1+U2dKPlCUJvNUn0PcunGefqWI4MUZPY9yGmvRqfINy7/xRQCzL0AwgqzwcCXamcK8JCC80uH7j8Vxa4kJheG1jxMoz/FpDSdRnzNZ+m7H5rjOwAQANhL7KatGLyCPQg9fqOoaIyCZE/A3fztm/XjJMpWnuANpUZubZtISEfu4UZKVk/DPSlBrbTZkTOvEog1LwZCZoTt0rq6a7PJFzJJkq0YecRudu/f3tpXbNe/F84sd9PhOSqcrRbm72WzglyEE8PuS1kuWpEz8G+Y5/0tQxnoh6khj9mgflrdCFuvpdutFLH4eN5MFDh Generated-by-Sahara\n", - "id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "0fe07f2a-0275-4bc0-93b2-c3c1e48e2815", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "c7a3bea4-c898-446b-8c67-6d378d4c06c4", - "security_groups": null, - "use_autoconfig": true, - "instances": [], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": "2015-09-14T10:57:12", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "provision_progress": [], - "plugin_version": "2.7.1", - "use_autoconfig": true, - "trust_id": null, - "description": null, - "created_at": "2015-09-14T10:57:11", - "is_protected": false, - "updated_at": "2015-09-14T10:57:12", - "is_transient": false, - "cluster_configs": { - "HDFS": { - "dfs.replication": 3 - } - }, - "anti_affinity": [], - "name": "public-vanilla-cluster", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "status": "Validating" - } -} diff --git a/api-ref/source/v2/samples/clusters/clusters-list-response.json b/api-ref/source/v2/samples/clusters/clusters-list-response.json deleted file mode 100644 index e8d6e9b3c2..0000000000 --- a/api-ref/source/v2/samples/clusters/clusters-list-response.json +++ /dev/null @@ -1,327 +0,0 @@ -{ - "clusters": [ - { - "is_public": false, - "project_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "domain_name": null, - "status_description": "", - "plugin_name": "vanilla", - "neutron_management_network": "b1610452-2933-46b0-bf31-660cfa5621bd", - "info": { - "YARN": { - "Web UI": "http://172.18.168.115:8088", - "ResourceManager": "http://172.18.168.115:8032" - }, - "HDFS": { - "Web UI": "http://172.18.168.115:50070", - "NameNode": "hdfs://vanilla-cluster-master-0:9000" - }, - "JobFlow": { - "Oozie": "http://172.18.168.115:11000" - }, - "MapReduce JobHistory Server": { - "Web UI": "http://172.18.168.115:19888" - } - }, - "user_keypair_id": "apavlov", - "management_public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfe9ARO+t9CybtuC1+cusDTeQL7wos1+U2dKPlCUJvNUn0PcunGefqWI4MUZPY9yGmvRqfINy7/xRQCzL0AwgqzwcCXamcK8JCC80uH7j8Vxa4kJheG1jxMoz/FpDSdRnzNZ+m7H5rjOwAQANhL7KatGLyCPQg9fqOoaIyCZE/A3fztm/XjJMpWnuANpUZubZtISEfu4UZKVk/DPSlBrbTZkTOvEog1LwZCZoTt0rq6a7PJFzJJkq0YecRudu/f3tpXbNe/F84sd9PhOSqcrRbm72WzglyEE8PuS1kuWpEz8G+Y5/0tQxnoh6khj9mgflrdCFuvpdutFLH4eN5MFDh Generated-by-Sahara\n", - "id": "e172d86c-906d-418e-a29c-6189f53bfa42", - "cluster_template_id": "57c92a7c-5c6a-42ea-9c6f-9f40a5aa4b36", - "node_groups": [ - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 1, - "flavor_id": "2", - "id": "0fe07f2a-0275-4bc0-93b2-c3c1e48e2815", - "security_groups": null, - "use_autoconfig": true, - "instances": [ - { - "created_at": "2015-09-14T10:57:36", - "id": "4867d92e-cc7b-4cde-9a1a-149e91caa491", - "management_ip": "172.18.168.115", - "updated_at": "2015-09-14T10:57:39", - "instance_id": "b9f16a07-88fc-423e-83a3-489598fe6737", - "internal_ip": "10.50.0.60", - "instance_name": "vanilla-cluster-master-0" - } - ], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "updated_at": "2015-09-14T10:57:36", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - }, - { - "image_id": null, - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": { - "YARN": { - "yarn.nodemanager.vmem-check-enabled": "false", - "yarn.scheduler.maximum-allocation-mb": 2048, - "yarn.scheduler.minimum-allocation-mb": 256, - "yarn.nodemanager.resource.memory-mb": 2048 - }, - "MapReduce": { - "yarn.app.mapreduce.am.resource.mb": 256, - "mapreduce.task.io.sort.mb": 102, - "mapreduce.reduce.java.opts": "-Xmx409m", - "mapreduce.reduce.memory.mb": 512, - "mapreduce.map.memory.mb": 256, - "yarn.app.mapreduce.am.command-opts": "-Xmx204m", - "mapreduce.map.java.opts": "-Xmx204m" - } - }, - "auto_security_group": false, - "availability_zone": null, - "count": 3, - "flavor_id": "2", - "id": "c7a3bea4-c898-446b-8c67-6d378d4c06c4", - "security_groups": null, - "use_autoconfig": true, - "instances": [ - { - "created_at": "2015-09-14T10:57:37", - "id": "f3633b30-c1e4-4144-930b-ab5b780b87be", - "management_ip": "172.18.168.118", - "updated_at": "2015-09-14T10:57:39", - "instance_id": "0cf1ee81-aa72-48da-be2c-65bc2fa51f8f", - "internal_ip": "10.50.0.63", - "instance_name": "vanilla-cluster-worker-0" - }, - { - "created_at": "2015-09-14T10:57:37", - "id": "0d66fd93-f277-4a94-b46a-f5866aa0c38f", - "management_ip": "172.18.168.117", - "updated_at": "2015-09-14T10:57:40", - "instance_id": "4a937391-b594-4ad0-9a53-00a99a691383", - "internal_ip": "10.50.0.62", - "instance_name": "vanilla-cluster-worker-1" - }, - { - "created_at": "2015-09-14T10:57:37", - "id": "0982cefd-5c58-436e-8f1e-c1d0830f18a7", - "management_ip": "172.18.168.116", - "updated_at": "2015-09-14T10:57:40", - "instance_id": "839b1d56-6d0d-4aa4-9d05-30e029c276f8", - "internal_ip": "10.50.0.61", - "instance_name": "vanilla-cluster-worker-2" - } - ], - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:57:11", - "node_group_template_id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "updated_at": "2015-09-14T10:57:37", - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } - ], - "provision_progress": [ - { - "created_at": "2015-09-14T10:57:18", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "0a6d95f9-30f4-4434-823a-a38a7999a5af", - "step_type": "Engine: create cluster", - "step_name": "Create Heat stack", - "updated_at": "2015-09-14T10:57:38", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:58:16", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "29f2b587-c34c-4871-9ed9-9235b411cd9a", - "step_type": "Engine: create cluster", - "step_name": "Configure instances", - "updated_at": "2015-09-14T10:58:22", - "successful": true, - "total": 4, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T11:00:27", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "36f1efde-90f9-41c1-b409-aa1cf9623e3e", - "step_type": "Plugin: start cluster", - "step_name": "Start the following process(es): Oozie", - "updated_at": "2015-09-14T11:01:15", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:58:22", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "602bcc27-3a2d-42c8-8aca-ebc475319c72", - "step_type": "Plugin: configure cluster", - "step_name": "Configure instances", - "updated_at": "2015-09-14T10:59:21", - "successful": true, - "total": 4, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:59:21", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "7e291df1-2d32-410d-ae89-33ab6f83cf17", - "step_type": "Plugin: configure cluster", - "step_name": "Configure topology data", - "updated_at": "2015-09-14T10:59:37", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T11:00:01", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "8ab7933c-ad61-4a4f-88db-23ce78ee10f6", - "step_type": "Plugin: start cluster", - "step_name": "Start the following process(es): DataNodes, NodeManagers", - "updated_at": "2015-09-14T11:00:11", - "successful": true, - "total": 3, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T11:00:11", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "9c8dc016-8c5b-4e80-9857-80c41f6bd971", - "step_type": "Plugin: start cluster", - "step_name": "Await DataNodes start up", - "updated_at": "2015-09-14T11:00:21", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T11:00:21", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "c6327532-222b-416c-858f-73dbb32b8e97", - "step_type": "Plugin: start cluster", - "step_name": "Start the following process(es): HistoryServer", - "updated_at": "2015-09-14T11:00:27", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:57:41", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "d3eca726-8b44-473a-ac29-fba45a893725", - "step_type": "Engine: create cluster", - "step_name": "Wait for instance accessibility", - "updated_at": "2015-09-14T10:58:14", - "successful": true, - "total": 4, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:58:14", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "d7a875ff-64bf-41aa-882d-b5061c8ee152", - "step_type": "Engine: create cluster", - "step_name": "Mount volumes to instances", - "updated_at": "2015-09-14T10:58:15", - "successful": true, - "total": 0, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:59:55", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "ded7d227-10b8-4cb0-ab6c-25da1462bb7a", - "step_type": "Plugin: start cluster", - "step_name": "Start the following process(es): ResourceManager", - "updated_at": "2015-09-14T11:00:00", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:59:38", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "e1701ff5-930a-4212-945a-43515dfe24d1", - "step_type": "Plugin: start cluster", - "step_name": "Start the following process(es): NameNode", - "updated_at": "2015-09-14T10:59:54", - "successful": true, - "total": 1, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - }, - { - "created_at": "2015-09-14T10:57:38", - "project_id": "808d5032ea0446889097723bfc8e919d", - "id": "eaf0ab1b-bf8f-48f0-8f2c-fa4f82f539b9", - "step_type": "Engine: create cluster", - "step_name": "Assign IPs", - "updated_at": "2015-09-14T10:57:41", - "successful": true, - "total": 4, - "cluster_id": "e172d86c-906d-418e-a29c-6189f53bfa42" - } - ], - "plugin_version": "2.7.1", - "use_autoconfig": true, - "trust_id": null, - "description": null, - "created_at": "2015-09-14T10:57:11", - "is_protected": false, - "updated_at": "2015-09-14T11:01:15", - "is_transient": false, - "cluster_configs": { - "HDFS": { - "dfs.replication": 3 - } - }, - "anti_affinity": [], - "name": "vanilla-cluster", - "default_image_id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "status": "Active" - } - ] -} diff --git a/api-ref/source/v2/samples/clusters/multiple-clusters-create-request.json b/api-ref/source/v2/samples/clusters/multiple-clusters-create-request.json deleted file mode 100644 index 3bd5dca7f1..0000000000 --- a/api-ref/source/v2/samples/clusters/multiple-clusters-create-request.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "plugin_name": "vanilla", - "plugin_version": "2.6.0", - "cluster_template_id": "9951f86d-57ba-43d6-9cb0-14ed2ec7a6cf", - "default_image_id": "bc3c3d3c-2684-4bf8-a9fa-388fb71288a9", - "user_keypair_id": "test", - "name": "def-cluster", - "count": 2, - "cluster_configs": {}, - "neutron_management_network": "7e31648b-4b2e-4f32-9b0a-113581c27076" -} diff --git a/api-ref/source/v2/samples/clusters/multiple-clusters-create-response.json b/api-ref/source/v2/samples/clusters/multiple-clusters-create-response.json deleted file mode 100644 index 5b13bca55d..0000000000 --- a/api-ref/source/v2/samples/clusters/multiple-clusters-create-response.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "clusters": [ - "a007a3e7-658f-4568-b0f2-fe2fd5efc554", - "b012a6et-65hf-4566-b0f2-fe3fd7efc567" - ] -} diff --git a/api-ref/source/v2/samples/data-sources/data-source-register-hdfs-request.json b/api-ref/source/v2/samples/data-sources/data-source-register-hdfs-request.json deleted file mode 100644 index 9d9c9c945c..0000000000 --- a/api-ref/source/v2/samples/data-sources/data-source-register-hdfs-request.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "description": "This is hdfs input", - "url": "hdfs://test-master-node:8020/user/hadoop/input", - "type": "hdfs", - "name": "hdfs_input" -} diff --git a/api-ref/source/v2/samples/data-sources/data-source-register-hdfs-response.json b/api-ref/source/v2/samples/data-sources/data-source-register-hdfs-response.json deleted file mode 100644 index 45cda02bad..0000000000 --- a/api-ref/source/v2/samples/data-sources/data-source-register-hdfs-response.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "data_source": { - "is_public": false, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-03-26 11:09:36.148464", - "id": "d7fffe9c-3b42-46a9-8be8-e98f586fa7a9", - "updated_at": null, - "name": "hdfs_input", - "description": "This is hdfs input", - "url": "hdfs://test-master-node:8020/user/hadoop/input", - "type": "hdfs" - } -} diff --git a/api-ref/source/v2/samples/data-sources/data-source-register-swift-request.json b/api-ref/source/v2/samples/data-sources/data-source-register-swift-request.json deleted file mode 100644 index 30a1e535dd..0000000000 --- a/api-ref/source/v2/samples/data-sources/data-source-register-swift-request.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "description": "This is input", - "url": "swift://container/text", - "credentials": { - "password": "swordfish", - "user": "dev" - }, - "type": "swift", - "name": "swift_input" -} diff --git a/api-ref/source/v2/samples/data-sources/data-source-register-swift-response.json b/api-ref/source/v2/samples/data-sources/data-source-register-swift-response.json deleted file mode 100644 index 7579ae9f68..0000000000 --- a/api-ref/source/v2/samples/data-sources/data-source-register-swift-response.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "data_source": { - "is_public": false, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-03-26 11:18:10.691493", - "id": "953831f2-0852-49d8-ac71-af5805e25256", - "updated_at": null, - "name": "swift_input", - "description": "This is input", - "url": "swift://container/text", - "type": "swift" - } -} diff --git a/api-ref/source/v2/samples/data-sources/data-source-show-response.json b/api-ref/source/v2/samples/data-sources/data-source-show-response.json deleted file mode 100644 index 7579ae9f68..0000000000 --- a/api-ref/source/v2/samples/data-sources/data-source-show-response.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "data_source": { - "is_public": false, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-03-26 11:18:10.691493", - "id": "953831f2-0852-49d8-ac71-af5805e25256", - "updated_at": null, - "name": "swift_input", - "description": "This is input", - "url": "swift://container/text", - "type": "swift" - } -} diff --git a/api-ref/source/v2/samples/data-sources/data-source-update-request.json b/api-ref/source/v2/samples/data-sources/data-source-update-request.json deleted file mode 100644 index 8397ae6545..0000000000 --- a/api-ref/source/v2/samples/data-sources/data-source-update-request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "description": "This is public input", - "is_protected": true -} diff --git a/api-ref/source/v2/samples/data-sources/data-source-update-response.json b/api-ref/source/v2/samples/data-sources/data-source-update-response.json deleted file mode 100644 index ce6ad9206b..0000000000 --- a/api-ref/source/v2/samples/data-sources/data-source-update-response.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "data_source": { - "is_public": true, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-09-15 12:32:24.847493", - "id": "953831f2-0852-49d8-ac71-af5805e25256", - "updated_at": "2015-09-15 12:34:42.597435", - "name": "swift_input", - "description": "This is public input", - "url": "swift://container/text", - "type": "swift" - } -} diff --git a/api-ref/source/v2/samples/data-sources/data-sources-list-response.json b/api-ref/source/v2/samples/data-sources/data-sources-list-response.json deleted file mode 100644 index 9ed7f0980b..0000000000 --- a/api-ref/source/v2/samples/data-sources/data-sources-list-response.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "data_sources": [ - { - "is_public": false, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-03-26 11:18:10", - "id": "953831f2-0852-49d8-ac71-af5805e25256", - "name": "swift_input", - "updated_at": null, - "description": "This is input", - "url": "swift://container/text", - "type": "swift" - }, - { - "is_public": false, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "is_protected": false, - "created_at": "2015-03-26 11:09:36", - "id": "d7fffe9c-3b42-46a9-8be8-e98f586fa7a9", - "name": "hdfs_input", - "updated_at": null, - "description": "This is hdfs input", - "url": "hdfs://test-master-node:8020/user/hadoop/input", - "type": "hdfs" - } - ] -} diff --git a/api-ref/source/v2/samples/event-log/cluster-progress-response.json b/api-ref/source/v2/samples/event-log/cluster-progress-response.json deleted file mode 100644 index 7f4f6c3668..0000000000 --- a/api-ref/source/v2/samples/event-log/cluster-progress-response.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "status": "Error", - "neutron_management_network": "7e31648b-4b2e-4f32-9b0a-113581c27076", - "is_transient": false, - "description": "", - "user_keypair_id": "vgridnev", - "updated_at": "2015-03-31 14:10:59", - "plugin_name": "spark", - "provision_progress": [ - { - "successful": false, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-03-31 14:10:20", - "step_type": "Engine: create cluster", - "updated_at": "2015-03-31 14:10:35", - "events": [ - { - "instance_name": "sample-worker-spark-004", - "successful": false, - "created_at": "2015-03-31 14:10:35", - "updated_at": null, - "event_info": "Node sample-worker-spark-004 has error status\nError ID: 3e238c82-d1f5-4560-8ed8-691e923e16a0", - "instance_id": "b5ba5ba8-e9c1-47f7-9355-3ce0ec0e449d", - "node_group_id": "145cf2fb-dcdf-42af-a4b9-a4047d2919d4", - "step_id": "3f243c67-2c27-47c7-a0c0-0834ad17f8b6", - "id": "34afcfc7-bdb0-43cb-b142-283d560dc6ad" - }, - { - "instance_name": "sample-worker-spark-001", - "successful": true, - "created_at": "2015-03-31 14:10:35", - "updated_at": null, - "event_info": null, - "instance_id": "c532ab71-38da-475a-95f8-f8eb93b8f1c2", - "node_group_id": "145cf2fb-dcdf-42af-a4b9-a4047d2919d4", - "step_id": "3f243c67-2c27-47c7-a0c0-0834ad17f8b6", - "id": "4ba50414-5216-4161-bc7a-12716122b99d" - } - ], - "cluster_id": "c26ec982-ba6b-4d75-818c-a50240164af0", - "step_name": "Wait for instances to become active", - "total": 5, - "id": "3f243c67-2c27-47c7-a0c0-0834ad17f8b6" - }, - { - "successful": true, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-03-31 14:10:12", - "step_type": "Engine: create cluster", - "updated_at": "2015-03-31 14:10:19", - "events": [], - "cluster_id": "c26ec982-ba6b-4d75-818c-a50240164af0", - "step_name": "Run instances", - "total": 5, - "id": "407ba50a-c799-46af-9dfb-6aa5f6ade426" - } - ], - "anti_affinity": [], - "node_groups": [], - "management_public_key": "Sahara", - "status_description": "Creating cluster failed for the following reason(s): Node sample-worker-spark-004 has error status\nError ID: 3e238c82-d1f5-4560-8ed8-691e923e16a0", - "plugin_version": "1.0.0", - "id": "c26ec982-ba6b-4d75-1f8c-a50240164af0", - "trust_id": null, - "info": {}, - "cluster_template_id": "5a9a09a3-9349-43bd-9058-16c401fad2d5", - "name": "sample", - "cluster_configs": {}, - "created_at": "2015-03-31 14:10:07", - "default_image_id": "e6a6c5da-67be-4017-a7d2-81f466efe67e", - "project_id": "9cd1314a0a31493282b6712b76a8fcda" -} diff --git a/api-ref/source/v2/samples/image-registry/image-register-request.json b/api-ref/source/v2/samples/image-registry/image-register-request.json deleted file mode 100644 index 7bd4d15efd..0000000000 --- a/api-ref/source/v2/samples/image-registry/image-register-request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "username": "ubuntu", - "description": "Ubuntu image for Hadoop 2.7.1" -} diff --git a/api-ref/source/v2/samples/image-registry/image-register-response.json b/api-ref/source/v2/samples/image-registry/image-register-response.json deleted file mode 100644 index 5851a58ec2..0000000000 --- a/api-ref/source/v2/samples/image-registry/image-register-response.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "image": { - "updated": "2015-03-24T10:05:10Z", - "metadata": { - "_sahara_description": "Ubuntu image for Hadoop 2.7.1", - "_sahara_username": "ubuntu", - "_sahara_tag_vanilla": true, - "_sahara_tag_2.7.1": true - }, - "id": "bb8d12b5-f9bb-49f0-aecb-739b8a9bec89", - "minDisk": 0, - "status": "ACTIVE", - "tags": [ - "vanilla", - "2.7.1" - ], - "minRam": 0, - "progress": 100, - "username": "ubuntu", - "created": "2015-02-03T10:28:39Z", - "name": "sahara-vanilla-2.7.1-ubuntu-14.04", - "description": "Ubuntu image for Hadoop 2.7.1", - "OS-EXT-IMG-SIZE:size": 1101856768 - } -} diff --git a/api-ref/source/v2/samples/image-registry/image-show-response.json b/api-ref/source/v2/samples/image-registry/image-show-response.json deleted file mode 100644 index 0f09f23f56..0000000000 --- a/api-ref/source/v2/samples/image-registry/image-show-response.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "image": { - "updated": "2015-02-03T10:29:32Z", - "metadata": { - "_sahara_username": "ubuntu", - "_sahara_tag_vanilla": true, - "_sahara_tag_2.6.0": true - }, - "id": "bb8d12b5-f9bb-49f0-aecb-739b8a9bec89", - "minDisk": 0, - "status": "ACTIVE", - "tags": [ - "vanilla", - "2.6.0" - ], - "minRam": 0, - "progress": 100, - "username": "ubuntu", - "created": "2015-02-03T10:28:39Z", - "name": "sahara-vanilla-2.6.0-ubuntu-14.04", - "description": null, - "OS-EXT-IMG-SIZE:size": 1101856768 - } -} diff --git a/api-ref/source/v2/samples/image-registry/image-tags-add-request.json b/api-ref/source/v2/samples/image-registry/image-tags-add-request.json deleted file mode 100644 index aa69662a6a..0000000000 --- a/api-ref/source/v2/samples/image-registry/image-tags-add-request.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "tags": [ - "vanilla", - "2.7.1", - "some_other_tag" - ] -} diff --git a/api-ref/source/v2/samples/image-registry/image-tags-add-response.json b/api-ref/source/v2/samples/image-registry/image-tags-add-response.json deleted file mode 100644 index 2c66b2930d..0000000000 --- a/api-ref/source/v2/samples/image-registry/image-tags-add-response.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "image": { - "updated": "2015-03-24T10:18:33Z", - "metadata": { - "_sahara_tag_vanilla": true, - "_sahara_description": "Ubuntu image for Hadoop 2.7.1", - "_sahara_username": "ubuntu", - "_sahara_tag_some_other_tag": true, - "_sahara_tag_2.7.1": true - }, - "id": "bb8d12b5-f9bb-49f0-aecb-739b8a9bec89", - "minDisk": 0, - "status": "ACTIVE", - "tags": [ - "vanilla", - "some_other_tag", - "2.7.1" - ], - "minRam": 0, - "progress": 100, - "username": "ubuntu", - "created": "2015-02-03T10:28:39Z", - "name": "sahara-vanilla-2.6.0-ubuntu-14.04", - "description": "Ubuntu image for Hadoop 2.7.1", - "OS-EXT-IMG-SIZE:size": 1101856768 - } -} diff --git a/api-ref/source/v2/samples/image-registry/image-tags-delete-request.json b/api-ref/source/v2/samples/image-registry/image-tags-delete-request.json deleted file mode 100644 index 44e1cef468..0000000000 --- a/api-ref/source/v2/samples/image-registry/image-tags-delete-request.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "tags": [ - "some_other_tag" - ] -} diff --git a/api-ref/source/v2/samples/image-registry/image-tags-delete-response.json b/api-ref/source/v2/samples/image-registry/image-tags-delete-response.json deleted file mode 100644 index 44eb131390..0000000000 --- a/api-ref/source/v2/samples/image-registry/image-tags-delete-response.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "image": { - "updated": "2015-03-24T10:19:28Z", - "metadata": { - "_sahara_description": "Ubuntu image for Hadoop 2.7.1", - "_sahara_username": "ubuntu", - "_sahara_tag_vanilla": true, - "_sahara_tag_2.7.1": true - }, - "id": "bb8d12b5-f9bb-49f0-aecb-739b8a9bec89", - "minDisk": 0, - "status": "ACTIVE", - "tags": [ - "vanilla", - "2.7.1" - ], - "minRam": 0, - "progress": 100, - "username": "ubuntu", - "created": "2015-02-03T10:28:39Z", - "name": "sahara-vanilla-2.7.1-ubuntu-14.04", - "description": "Ubuntu image for Hadoop 2.7.1", - "OS-EXT-IMG-SIZE:size": 1101856768 - } -} diff --git a/api-ref/source/v2/samples/image-registry/images-list-response.json b/api-ref/source/v2/samples/image-registry/images-list-response.json deleted file mode 100644 index d40f0c215f..0000000000 --- a/api-ref/source/v2/samples/image-registry/images-list-response.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "images": [ - { - "name": "ubuntu-vanilla-2.7.1", - "id": "4118a476-dfdc-4b0e-8d5c-463cba08e9ae", - "created": "2015-08-06T08:17:14Z", - "metadata": { - "_sahara_tag_2.7.1": true, - "_sahara_username": "ubuntu", - "_sahara_tag_vanilla": true - }, - "username": "ubuntu", - "progress": 100, - "OS-EXT-IMG-SIZE:size": 998716928, - "status": "ACTIVE", - "minDisk": 0, - "tags": [ - "vanilla", - "2.7.1" - ], - "updated": "2015-09-04T09:35:09Z", - "minRam": 0, - "description": null - }, - { - "name": "cdh-latest", - "id": "ff74035b-9da7-4edf-981d-57f270ed337d", - "created": "2015-09-04T11:56:44Z", - "metadata": { - "_sahara_username": "ubuntu", - "_sahara_tag_5.4.0": true, - "_sahara_tag_cdh": true - }, - "username": "ubuntu", - "progress": 100, - "OS-EXT-IMG-SIZE:size": 3281453056, - "status": "ACTIVE", - "minDisk": 0, - "tags": [ - "5.4.0", - "cdh" - ], - "updated": "2015-09-04T12:46:42Z", - "minRam": 0, - "description": null - } - ] -} diff --git a/api-ref/source/v2/samples/job-binaries/create-request.json b/api-ref/source/v2/samples/job-binaries/create-request.json deleted file mode 100644 index f32e15b40f..0000000000 --- a/api-ref/source/v2/samples/job-binaries/create-request.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "url": "swift://container/jar-example.jar", - "name": "jar-example.jar", - "description": "This is a job binary", - "extra": { - "password": "swordfish", - "user": "admin" - } -} diff --git a/api-ref/source/v2/samples/job-binaries/create-response.json b/api-ref/source/v2/samples/job-binaries/create-response.json deleted file mode 100644 index 2d6aed83ea..0000000000 --- a/api-ref/source/v2/samples/job-binaries/create-response.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "job_binary": { - "is_public": false, - "description": "This is a job binary", - "url": "swift://container/jar-example.jar", - "project_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 14:49:20.106452", - "id": "07f86352-ee8a-4b08-b737-d705ded5ff9c", - "updated_at": null, - "name": "jar-example.jar", - "is_protected": false - } -} diff --git a/api-ref/source/v2/samples/job-binaries/list-response.json b/api-ref/source/v2/samples/job-binaries/list-response.json deleted file mode 100644 index f77380eb58..0000000000 --- a/api-ref/source/v2/samples/job-binaries/list-response.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "binaries": [ - { - "is_public": false, - "description": "", - "url": "internal-db://d2498cbf-4589-484a-a814-81436c18beb3", - "project_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 12:36:59.375060", - "updated_at": null, - "id": "84248975-3c82-4206-a58d-6e7fb3a563fd", - "name": "example.pig", - "is_protected": false - }, - { - "is_public": false, - "description": "", - "url": "internal-db://22f1d87a-23c8-483e-a0dd-cb4a16dde5f9", - "project_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 12:43:52.265899", - "updated_at": null, - "id": "508fc62d-1d58-4412-b603-bdab307bb926", - "name": "udf.jar", - "is_protected": false - }, - { - "is_public": false, - "description": "", - "url": "swift://container/jar-example.jar", - "project_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 14:25:04.970513", - "updated_at": null, - "id": "a716a9cd-9add-4b12-b1b6-cdb71aaef350", - "name": "jar-example.jar", - "is_protected": false - } - ] -} diff --git a/api-ref/source/v2/samples/job-binaries/show-data-response b/api-ref/source/v2/samples/job-binaries/show-data-response deleted file mode 100644 index 8765f0c6c5..0000000000 --- a/api-ref/source/v2/samples/job-binaries/show-data-response +++ /dev/null @@ -1,3 +0,0 @@ -A = load '$INPUT' using PigStorage(':') as (fruit: chararray); -B = foreach A generate com.hadoopbook.pig.Trim(fruit); -store B into '$OUTPUT' USING PigStorage(); \ No newline at end of file diff --git a/api-ref/source/v2/samples/job-binaries/show-response.json b/api-ref/source/v2/samples/job-binaries/show-response.json deleted file mode 100644 index 36e12c85e4..0000000000 --- a/api-ref/source/v2/samples/job-binaries/show-response.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "job_binary": { - "is_public": false, - "description": "an example jar file", - "url": "swift://container/jar-example.jar", - "project_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2013-10-15 14:25:04.970513", - "updated_at": null, - "id": "a716a9cd-9add-4b12-b1b6-cdb71aaef350", - "name": "jar-example.jar", - "is_protected": false - } -} diff --git a/api-ref/source/v2/samples/job-binaries/update-request.json b/api-ref/source/v2/samples/job-binaries/update-request.json deleted file mode 100644 index 456b0b209c..0000000000 --- a/api-ref/source/v2/samples/job-binaries/update-request.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "url": "swift://container/new-jar-example.jar", - "name": "new-jar-example.jar", - "description": "This is a new job binary" -} diff --git a/api-ref/source/v2/samples/job-binaries/update-response.json b/api-ref/source/v2/samples/job-binaries/update-response.json deleted file mode 100644 index 6dcbfa8c54..0000000000 --- a/api-ref/source/v2/samples/job-binaries/update-response.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "job_binary": { - "is_public": false, - "description": "This is a new job binary", - "url": "swift://container/new-jar-example.jar", - "project_id": "11587919cc534bcbb1027a161c82cf58", - "created_at": "2015-09-15 12:42:51.421542", - "updated_at": null, - "id": "b713d7ad-4add-4f12-g1b6-cdg71aaef350", - "name": "new-jar-example.jar", - "is_protected": false - } -} diff --git a/api-ref/source/v2/samples/job-templates/job-template-create-request.json b/api-ref/source/v2/samples/job-templates/job-template-create-request.json deleted file mode 100644 index b8d1a8ed19..0000000000 --- a/api-ref/source/v2/samples/job-templates/job-template-create-request.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "description": "This is pig job example", - "mains": [ - "90d9d5ec-11aa-48bd-bc8c-34936ce0db6e" - ], - "libs": [ - "320a2ca7-25fd-4b48-9bc3-4fb1b6c4ff27" - ], - "type": "Pig", - "name": "pig-job-example" -} diff --git a/api-ref/source/v2/samples/job-templates/job-template-create-response.json b/api-ref/source/v2/samples/job-templates/job-template-create-response.json deleted file mode 100644 index c7d15f4fec..0000000000 --- a/api-ref/source/v2/samples/job-templates/job-template-create-response.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "job_template": { - "is_public": false, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-03-27 08:48:38.630827", - "id": "71defc8f-d005-484f-9d86-1aedf644d1ef", - "name": "pig-job-example", - "description": "This is pig job example", - "interface": [], - "libs": [ - { - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:53", - "id": "320a2ca7-25fd-4b48-9bc3-4fb1b6c4ff27", - "name": "binary-job", - "updated_at": null, - "description": "", - "url": "internal-db://c6a925fa-ac1d-4b2e-b88a-7054e1927521" - } - ], - "type": "Pig", - "is_protected": false, - "mains": [ - { - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-03 10:47:51", - "id": "90d9d5ec-11aa-48bd-bc8c-34936ce0db6e", - "name": "pig", - "updated_at": null, - "description": "", - "url": "internal-db://872878f6-72ea-44db-8d1d-e6a6396d2df0" - } - ] - } -} diff --git a/api-ref/source/v2/samples/job-templates/job-template-show-response.json b/api-ref/source/v2/samples/job-templates/job-template-show-response.json deleted file mode 100644 index dafbdc474c..0000000000 --- a/api-ref/source/v2/samples/job-templates/job-template-show-response.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "job_template": { - "is_public": false, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "1a674c31-9aaa-4d07-b844-2bf200a1b836", - "name": "Edp-test-job", - "updated_at": null, - "description": "", - "interface": [], - "libs": [ - { - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "0ff4ac10-94a4-4e25-9ac9-603afe27b100", - "name": "binary-job.jar", - "updated_at": null, - "description": "", - "url": "swift://Edp-test-c71e6bce.sahara/binary-job.jar" - } - ], - "type": "MapReduce", - "mains": [], - "is_protected": false - } -} diff --git a/api-ref/source/v2/samples/job-templates/job-template-update-request.json b/api-ref/source/v2/samples/job-templates/job-template-update-request.json deleted file mode 100644 index 810b8a60b1..0000000000 --- a/api-ref/source/v2/samples/job-templates/job-template-update-request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "description": "This is public pig job example", - "name": "public-pig-job-example" -} diff --git a/api-ref/source/v2/samples/job-templates/job-template-update-response.json b/api-ref/source/v2/samples/job-templates/job-template-update-response.json deleted file mode 100644 index 5d4970457c..0000000000 --- a/api-ref/source/v2/samples/job-templates/job-template-update-response.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "job_template": { - "is_public": false, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "1a674c31-9aaa-4d07-b844-2bf200a1b836", - "name": "public-pig-job-example", - "updated_at": null, - "description": "This is public pig job example", - "interface": [], - "libs": [ - { - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "0ff4ac10-94a4-4e25-9ac9-603afe27b100", - "name": "binary-job.jar", - "updated_at": null, - "description": "", - "url": "swift://Edp-test-c71e6bce.sahara/binary-job.jar" - } - ], - "type": "MapReduce", - "mains": [], - "is_protected": false - } -} diff --git a/api-ref/source/v2/samples/job-templates/job-templates-list-response.json b/api-ref/source/v2/samples/job-templates/job-templates-list-response.json deleted file mode 100644 index d7250dab47..0000000000 --- a/api-ref/source/v2/samples/job-templates/job-templates-list-response.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "job_templates": [ - { - "is_public": false, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "1a674c31-9aaa-4d07-b844-2bf200a1b836", - "name": "Edp-test-job-3d60854e", - "updated_at": null, - "description": "", - "interface": [], - "libs": [ - { - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:48", - "id": "0ff4ac10-94a4-4e25-9ac9-603afe27b100", - "name": "binary-job-339c2d1a.jar", - "updated_at": null, - "description": "", - "url": "swift://Edp-test-c71e6bce.sahara/binary-job-339c2d1a.jar" - } - ], - "type": "MapReduce", - "mains": [], - "is_protected": false - }, - { - "is_public": false, - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:44", - "id": "4d1f3759-3497-4927-8352-910bacf24e62", - "name": "Edp-test-job-6b6953c8", - "updated_at": null, - "description": "", - "interface": [], - "libs": [ - { - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:44", - "id": "e0d47800-4ac1-4d63-a2e1-c92d669a44e2", - "name": "binary-job-6f21a2f8.jar", - "updated_at": null, - "description": "", - "url": "swift://Edp-test-b409ec68.sahara/binary-job-6f21a2f8.jar" - } - ], - "type": "Pig", - "mains": [ - { - "project_id": "9cd1314a0a31493282b6712b76a8fcda", - "created_at": "2015-02-10 14:25:44", - "id": "e073e896-f123-4b76-995f-901d786262df", - "name": "binary-job-d4f8bd75.pig", - "updated_at": null, - "description": "", - "url": "swift://Edp-test-b409ec68.sahara/binary-job-d4f8bd75.pig" - } - ], - "is_protected": false - } - ], - "markers": { - "prev": null, - "next": "c53832da-6e7b-449e-a166-9f9ce1718d03" - } -} diff --git a/api-ref/source/v2/samples/job-types/job-types-list-response.json b/api-ref/source/v2/samples/job-types/job-types-list-response.json deleted file mode 100644 index c321c4fbd3..0000000000 --- a/api-ref/source/v2/samples/job-types/job-types-list-response.json +++ /dev/null @@ -1,209 +0,0 @@ -{ - "job_types": [ - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "Hive" - }, - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "Java" - }, - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "MapReduce" - }, - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "MapReduce.Streaming" - }, - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "Pig" - }, - { - "plugins": [ - { - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": { - "1.2.1": {}, - "2.6.0": {} - }, - "title": "Vanilla Apache Hadoop", - "name": "vanilla" - }, - { - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": { - "1.3.2": {}, - "2.0.6": {} - }, - "title": "Hortonworks Data Platform", - "name": "hdp" - }, - { - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": { - "5": {}, - "5.3.0": {} - }, - "title": "Cloudera Plugin", - "name": "cdh" - } - ], - "name": "Shell" - }, - { - "plugins": [ - { - "description": "This plugin provides an ability to launch Spark on Hadoop CDH cluster without any management consoles.", - "versions": { - "1.0.0": {} - }, - "title": "Apache Spark", - "name": "spark" - } - ], - "name": "Spark" - } - ] -} diff --git a/api-ref/source/v2/samples/jobs/cancel-response.json b/api-ref/source/v2/samples/jobs/cancel-response.json deleted file mode 100644 index 61b7e3547b..0000000000 --- a/api-ref/source/v2/samples/jobs/cancel-response.json +++ /dev/null @@ -1,120 +0,0 @@ -{ - "job": { - "job_configs": { - "configs": { - "mapred.reduce.tasks": "1", - "mapred.map.tasks": "1" - }, - "args": [ - "arg1", - "arg2" - ], - "params": { - "param2": "value2", - "param1": "value1" - } - }, - "is_protected": false, - "input_id": "3e1bc8e6-8c69-4749-8e52-90d9341d15bc", - "job_id": "310b0fc6-e1db-408e-8798-312e7500f3ac", - "cluster_id": "811e1134-666f-4c48-bc92-afb5b10c9d8c", - "created_at": "2015-09-15T09:49:24", - "end_time": "2015-09-15T12:50:46", - "output_id": "52146b52-6540-4aac-a024-fee253cf52a9", - "is_public": false, - "updated_at": "2015-09-15T09:50:46", - "return_code": null, - "data_source_urls": { - "3e1bc8e6-8c69-4749-8e52-90d9341d15bc": "swift://ap-cont/input", - "52146b52-6540-4aac-a024-fee253cf52a9": "swift://ap-cont/output" - }, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "start_time": "2015-09-15T12:49:43", - "id": "20da9edb-12ce-4b45-a473-41baeefef997", - "oozie_job_id": "0000001-150915094349962-oozie-hado-W", - "info": { - "user": "hadoop", - "actions": [ - { - "name": ":start:", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": "job-node", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "errorCode": null, - "id": "0000001-150915094349962-oozie-hado-W@:start:", - "consoleUrl": "-", - "errorMessage": null, - "toString": "Action name[:start:] status[OK]", - "stats": null, - "type": ":START:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "job-node", - "trackerUri": "http://172.18.168.119:8032", - "externalStatus": "FAILED/KILLED", - "status": "ERROR", - "externalId": "job_1442310173665_0002", - "transition": "fail", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "JA018", - "id": "0000001-150915094349962-oozie-hado-W@job-node", - "consoleUrl": "http://ap-cluster-all-0:8088/proxy/application_1442310173665_0002/", - "errorMessage": "Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]", - "toString": "Action name[job-node] status[ERROR]", - "stats": null, - "type": "pig", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "fail", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": null, - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "E0729", - "id": "0000001-150915094349962-oozie-hado-W@fail", - "consoleUrl": "-", - "errorMessage": "Workflow failed, error message[Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]]", - "toString": "Action name[fail] status[OK]", - "stats": null, - "type": ":KILL:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "externalChildIDs": null, - "cred": "null" - } - ], - "createdTime": "Tue, 15 Sep 2015 09:49:58 GMT", - "status": "KILLED", - "group": null, - "externalId": null, - "acl": null, - "run": 0, - "appName": "job-wf", - "parentId": null, - "conf": "\r\n \r\n user.name\r\n hadoop\r\n \r\n \r\n oozie.use.system.libpath\r\n true\r\n \r\n \r\n mapreduce.job.user.name\r\n hadoop\r\n \r\n \r\n nameNode\r\n hdfs://ap-cluster-all-0:9000\r\n \r\n \r\n jobTracker\r\n http://172.18.168.119:8032\r\n \r\n \r\n oozie.wf.application.path\r\n hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml\r\n \r\n", - "id": "0000001-150915094349962-oozie-hado-W", - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "appPath": "hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml", - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "toString": "Workflow id[0000001-150915094349962-oozie-hado-W] status[KILLED]", - "lastModTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "consoleUrl": "http://ap-cluster-all-0.novalocal:11000/oozie?job=0000001-150915094349962-oozie-hado-W" - } - } -} diff --git a/api-ref/source/v2/samples/jobs/job-request.json b/api-ref/source/v2/samples/jobs/job-request.json deleted file mode 100644 index eabb89075d..0000000000 --- a/api-ref/source/v2/samples/jobs/job-request.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "cluster_id": "811e1134-666f-4c48-bc92-afb5b10c9d8c", - "job_template_id": "548ea8d4-a5sd-33a4-bt22-asf4n87a8e2dh", - "input_id": "3e1bc8e6-8c69-4749-8e52-90d9341d15bc", - "output_id": "52146b52-6540-4aac-a024-fee253cf52a9", - "job_configs": { - "configs": { - "mapred.map.tasks": "1", - "mapred.reduce.tasks": "1" - }, - "args": [ - "arg1", - "arg2" - ], - "params": { - "param2": "value2", - "param1": "value1" - } - } -} diff --git a/api-ref/source/v2/samples/jobs/job-response.json b/api-ref/source/v2/samples/jobs/job-response.json deleted file mode 100644 index d461f6752f..0000000000 --- a/api-ref/source/v2/samples/jobs/job-response.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "job": { - "input_id": "3e1bc8e6-8c69-4749-8e52-90d9341d15bc", - "is_protected": false, - "job_id": "310b0fc6-e1db-408e-8798-312e7500f3ac", - "cluster_id": "811e1134-666f-4c48-bc92-afb5b10c9d8c", - "output_id": "52146b52-6540-4aac-a024-fee253cf52a9", - "created_at": "2015-09-15T09:49:24", - "is_public": false, - "id": "20da9edb-12ce-4b45-a473-41baeefef997", - "project_id": "808d5032ea0446889097723bfc8e919d", - "job_configs": { - "configs": { - "mapred.reduce.tasks": "1", - "mapred.map.tasks": "1" - }, - "args": [ - "arg1", - "arg2" - ], - "params": { - "param2": "value2", - "param1": "value1" - } - }, - "info": { - "status": "PENDING" - } - } -} diff --git a/api-ref/source/v2/samples/jobs/job-update-request.json b/api-ref/source/v2/samples/jobs/job-update-request.json deleted file mode 100644 index 647a4175b9..0000000000 --- a/api-ref/source/v2/samples/jobs/job-update-request.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "is_public": true -} diff --git a/api-ref/source/v2/samples/jobs/job-update-response.json b/api-ref/source/v2/samples/jobs/job-update-response.json deleted file mode 100644 index 3121f0a53e..0000000000 --- a/api-ref/source/v2/samples/jobs/job-update-response.json +++ /dev/null @@ -1,120 +0,0 @@ -{ - "job: { - "job_configs": { - "configs": { - "mapred.reduce.tasks": "1", - "mapred.map.tasks": "1" - }, - "args": [ - "arg1", - "arg2" - ], - "params": { - "param2": "value2", - "param1": "value1" - } - }, - "is_protected": false, - "input_id": "3e1bc8e6-8c69-4749-8e52-90d9341d15bc", - "job_id": "310b0fc6-e1db-408e-8798-312e7500f3ac", - "cluster_id": "811e1134-666f-4c48-bc92-afb5b10c9d8c", - "created_at": "2015-09-15T09:49:24", - "end_time": "2015-09-15T12:50:46", - "output_id": "52146b52-6540-4aac-a024-fee253cf52a9", - "is_public": true, - "updated_at": "2015-09-15T09:50:46", - "return_code": null, - "data_source_urls": { - "3e1bc8e6-8c69-4749-8e52-90d9341d15bc": "swift://ap-cont/input", - "52146b52-6540-4aac-a024-fee253cf52a9": "swift://ap-cont/output" - }, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "start_time": "2015-09-15T12:49:43", - "id": "20da9edb-12ce-4b45-a473-41baeefef997", - "oozie_job_id": "0000001-150915094349962-oozie-hado-W", - "info": { - "user": "hadoop", - "actions": [ - { - "name": ":start:", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": "job-node", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "errorCode": null, - "id": "0000001-150915094349962-oozie-hado-W@:start:", - "consoleUrl": "-", - "errorMessage": null, - "toString": "Action name[:start:] status[OK]", - "stats": null, - "type": ":START:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "job-node", - "trackerUri": "http://172.18.168.119:8032", - "externalStatus": "FAILED/KILLED", - "status": "ERROR", - "externalId": "job_1442310173665_0002", - "transition": "fail", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "JA018", - "id": "0000001-150915094349962-oozie-hado-W@job-node", - "consoleUrl": "http://ap-cluster-all-0:8088/proxy/application_1442310173665_0002/", - "errorMessage": "Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]", - "toString": "Action name[job-node] status[ERROR]", - "stats": null, - "type": "pig", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "fail", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": null, - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "E0729", - "id": "0000001-150915094349962-oozie-hado-W@fail", - "consoleUrl": "-", - "errorMessage": "Workflow failed, error message[Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]]", - "toString": "Action name[fail] status[OK]", - "stats": null, - "type": ":KILL:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "externalChildIDs": null, - "cred": "null" - } - ], - "createdTime": "Tue, 15 Sep 2015 09:49:58 GMT", - "status": "KILLED", - "group": null, - "externalId": null, - "acl": null, - "run": 0, - "appName": "job-wf", - "parentId": null, - "conf": "\r\n \r\n user.name\r\n hadoop\r\n \r\n \r\n oozie.use.system.libpath\r\n true\r\n \r\n \r\n mapreduce.job.user.name\r\n hadoop\r\n \r\n \r\n nameNode\r\n hdfs://ap-cluster-all-0:9000\r\n \r\n \r\n jobTracker\r\n http://172.18.168.119:8032\r\n \r\n \r\n oozie.wf.application.path\r\n hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml\r\n \r\n", - "id": "0000001-150915094349962-oozie-hado-W", - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "appPath": "hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml", - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "toString": "Workflow id[0000001-150915094349962-oozie-hado-W] status[KILLED]", - "lastModTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "consoleUrl": "http://ap-cluster-all-0.novalocal:11000/oozie?job=0000001-150915094349962-oozie-hado-W" - } - } -} diff --git a/api-ref/source/v2/samples/jobs/list-response.json b/api-ref/source/v2/samples/jobs/list-response.json deleted file mode 100644 index 118645fb25..0000000000 --- a/api-ref/source/v2/samples/jobs/list-response.json +++ /dev/null @@ -1,122 +0,0 @@ -{ - "jobs": [ - { - "job_configs": { - "configs": { - "mapred.reduce.tasks": "1", - "mapred.map.tasks": "1" - }, - "args": [ - "arg1", - "arg2" - ], - "params": { - "param2": "value2", - "param1": "value1" - } - }, - "is_protected": false, - "input_id": "3e1bc8e6-8c69-4749-8e52-90d9341d15bc", - "job_id": "310b0fc6-e1db-408e-8798-312e7500f3ac", - "cluster_id": "811e1134-666f-4c48-bc92-afb5b10c9d8c", - "created_at": "2015-09-15T09:49:24", - "end_time": "2015-09-15T12:50:46", - "output_id": "52146b52-6540-4aac-a024-fee253cf52a9", - "is_public": false, - "updated_at": "2015-09-15T09:50:46", - "return_code": null, - "data_source_urls": { - "3e1bc8e6-8c69-4749-8e52-90d9341d15bc": "swift://ap-cont/input", - "52146b52-6540-4aac-a024-fee253cf52a9": "swift://ap-cont/output" - }, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "start_time": "2015-09-15T12:49:43", - "id": "20da9edb-12ce-4b45-a473-41baeefef997", - "oozie_job_id": "0000001-150915094349962-oozie-hado-W", - "info": { - "user": "hadoop", - "actions": [ - { - "name": ":start:", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": "job-node", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "errorCode": null, - "id": "0000001-150915094349962-oozie-hado-W@:start:", - "consoleUrl": "-", - "errorMessage": null, - "toString": "Action name[:start:] status[OK]", - "stats": null, - "type": ":START:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "job-node", - "trackerUri": "http://172.18.168.119:8032", - "externalStatus": "FAILED/KILLED", - "status": "ERROR", - "externalId": "job_1442310173665_0002", - "transition": "fail", - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "JA018", - "id": "0000001-150915094349962-oozie-hado-W@job-node", - "consoleUrl": "http://ap-cluster-all-0:8088/proxy/application_1442310173665_0002/", - "errorMessage": "Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]", - "toString": "Action name[job-node] status[ERROR]", - "stats": null, - "type": "pig", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "externalChildIDs": null, - "cred": "null" - }, - { - "name": "fail", - "trackerUri": "-", - "externalStatus": "OK", - "status": "OK", - "externalId": "-", - "transition": null, - "data": null, - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "errorCode": "E0729", - "id": "0000001-150915094349962-oozie-hado-W@fail", - "consoleUrl": "-", - "errorMessage": "Workflow failed, error message[Main class [org.apache.oozie.action.hadoop.PigMain], exit code [2]]", - "toString": "Action name[fail] status[OK]", - "stats": null, - "type": ":KILL:", - "retries": 0, - "startTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "externalChildIDs": null, - "cred": "null" - } - ], - "createdTime": "Tue, 15 Sep 2015 09:49:58 GMT", - "status": "KILLED", - "group": null, - "externalId": null, - "acl": null, - "run": 0, - "appName": "job-wf", - "parentId": null, - "conf": "\r\n \r\n user.name\r\n hadoop\r\n \r\n \r\n oozie.use.system.libpath\r\n true\r\n \r\n \r\n mapreduce.job.user.name\r\n hadoop\r\n \r\n \r\n nameNode\r\n hdfs://ap-cluster-all-0:9000\r\n \r\n \r\n jobTracker\r\n http://172.18.168.119:8032\r\n \r\n \r\n oozie.wf.application.path\r\n hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml\r\n \r\n", - "id": "0000001-150915094349962-oozie-hado-W", - "startTime": "Tue, 15 Sep 2015 09:49:59 GMT", - "appPath": "hdfs://ap-cluster-all-0:9000/user/hadoop/pig-job-example/3038025d-9974-4993-a778-26a074cdfb8d/workflow.xml", - "endTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "toString": "Workflow id[0000001-150915094349962-oozie-hado-W] status[KILLED]", - "lastModTime": "Tue, 15 Sep 2015 09:50:17 GMT", - "consoleUrl": "http://ap-cluster-all-0.novalocal:11000/oozie?job=0000001-150915094349962-oozie-hado-W" - } - } - ] -} diff --git a/api-ref/source/v2/samples/node-group-templates/node-group-template-create-request.json b/api-ref/source/v2/samples/node-group-templates/node-group-template-create-request.json deleted file mode 100644 index 96c40f097c..0000000000 --- a/api-ref/source/v2/samples/node-group-templates/node-group-template-create-request.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "plugin_name": "vanilla", - "plugin_version": "2.7.1", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "name": "master", - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "flavor_id": "2" -} diff --git a/api-ref/source/v2/samples/node-group-templates/node-group-template-create-response.json b/api-ref/source/v2/samples/node-group-templates/node-group-template-create-response.json deleted file mode 100644 index c9b8e96312..0000000000 --- a/api-ref/source/v2/samples/node-group-templates/node-group-template-create-response.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "node_group_template": { - "is_public": false, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "is_default": false, - "availability_zone": null, - "plugin_name": "vanilla", - "is_protected": false, - "flavor_id": "2", - "id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "plugin_version": "2.7.1", - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:20:11", - "security_groups": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } -} diff --git a/api-ref/source/v2/samples/node-group-templates/node-group-template-show-response.json b/api-ref/source/v2/samples/node-group-templates/node-group-template-show-response.json deleted file mode 100644 index d948694db9..0000000000 --- a/api-ref/source/v2/samples/node-group-templates/node-group-template-show-response.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "node_group_template": { - "is_public": false, - "image_id": null, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "is_default": false, - "availability_zone": null, - "plugin_name": "vanilla", - "flavor_id": "2", - "id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "description": null, - "plugin_version": "2.7.1", - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:20:11", - "is_protected": false, - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "security_groups": null, - "volume_type": null - } -} diff --git a/api-ref/source/v2/samples/node-group-templates/node-group-template-update-request.json b/api-ref/source/v2/samples/node-group-templates/node-group-template-update-request.json deleted file mode 100644 index 27428f7aee..0000000000 --- a/api-ref/source/v2/samples/node-group-templates/node-group-template-update-request.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "plugin_name": "vanilla", - "plugin_version": "2.7.1", - "node_processes": [ - "datanode" - ], - "name": "new", - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "flavor_id": "2" -} diff --git a/api-ref/source/v2/samples/node-group-templates/node-group-template-update-response.json b/api-ref/source/v2/samples/node-group-templates/node-group-template-update-response.json deleted file mode 100644 index 84a549ac44..0000000000 --- a/api-ref/source/v2/samples/node-group-templates/node-group-template-update-response.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "node_group_template": { - "is_public": false, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "is_default": false, - "availability_zone": null, - "plugin_name": "vanilla", - "is_protected": false, - "flavor_id": "2", - "id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "plugin_version": "2.7.1", - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:20:11", - "security_groups": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "new", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "volume_type": null - } -} diff --git a/api-ref/source/v2/samples/node-group-templates/node-group-templates-list-response.json b/api-ref/source/v2/samples/node-group-templates/node-group-templates-list-response.json deleted file mode 100644 index bccd121c6e..0000000000 --- a/api-ref/source/v2/samples/node-group-templates/node-group-templates-list-response.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "node_group_templates": [ - { - "is_public": false, - "image_id": null, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "is_default": false, - "availability_zone": null, - "plugin_name": "vanilla", - "flavor_id": "2", - "id": "0bb9f1a4-0c44-4dc5-9452-6741c62ed9ae", - "description": null, - "plugin_version": "2.7.1", - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:20:11", - "is_protected": false, - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "master", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "namenode", - "resourcemanager", - "oozie", - "historyserver" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "security_groups": null, - "volume_type": null - }, - { - "is_public": false, - "image_id": null, - "tenant_id": "808d5032ea0446889097723bfc8e919d", - "shares": null, - "floating_ip_pool": "033debed-aeb8-488c-b7d0-adb74c61faa5", - "node_configs": {}, - "auto_security_group": false, - "is_default": false, - "availability_zone": null, - "plugin_name": "vanilla", - "flavor_id": "2", - "id": "846edb31-add5-46e6-a4ee-a4c339f99251", - "description": null, - "hadoop_version": "2.7.1", - "use_autoconfig": true, - "volumes_availability_zone": null, - "created_at": "2015-09-14T10:27:00", - "is_protected": false, - "updated_at": null, - "volumes_per_node": 0, - "is_proxy_gateway": false, - "name": "worker", - "volume_mount_prefix": "/volumes/disk", - "node_processes": [ - "datanode", - "nodemanager" - ], - "volumes_size": 0, - "volume_local_to_instance": false, - "security_groups": null, - "volume_type": null - } - ], - "markers": { - "prev":"39dfc852-8588-4b61-8d2b-eb08a67ab240", - "next":"eaa0bd97-ab54-43df-83ab-77a9774d7358" - } -} diff --git a/api-ref/source/v2/samples/plugins/plugin-show-response.json b/api-ref/source/v2/samples/plugins/plugin-show-response.json deleted file mode 100644 index 00b948a0e6..0000000000 --- a/api-ref/source/v2/samples/plugins/plugin-show-response.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "plugin": { - "name": "vanilla", - "versions": [ - "1.2.1", - "2.4.1", - "2.6.0" - ], - "title": "Vanilla Apache Hadoop", - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component." - } -} diff --git a/api-ref/source/v2/samples/plugins/plugin-update-request.json b/api-ref/source/v2/samples/plugins/plugin-update-request.json deleted file mode 100644 index 97a17c38f2..0000000000 --- a/api-ref/source/v2/samples/plugins/plugin-update-request.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "plugin_labels": { - "enabled": { - "status": false - } - } -} diff --git a/api-ref/source/v2/samples/plugins/plugin-update-response.json b/api-ref/source/v2/samples/plugins/plugin-update-response.json deleted file mode 100644 index 7541ae939c..0000000000 --- a/api-ref/source/v2/samples/plugins/plugin-update-response.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "plugin": { - "plugin_labels": { - "hidden": { - "status": true, - "mutable": true, - "description": "Existence of plugin or its version is hidden, but still can be used for cluster creation by CLI and directly by client." - }, - "enabled": { - "status": false, - "mutable": true, - "description": "Plugin or its version is enabled and can be used by user." - } - }, - "description": "It's a fake plugin that aimed to work on the CirrOS images. It doesn't install Hadoop. It's needed to be able to test provisioning part of Sahara codebase itself.", - "versions": [ - "0.1" - ], - "tenant_id": "993f53c1f51845e48e013aeb632358d8", - "title": "Fake Plugin", - "version_labels": { - "0.1": { - "enabled": { - "status": true, - "mutable": true, - "description": "Plugin or its version is enabled and can be used by user." - } - } - }, - "name": "fake" - } -} diff --git a/api-ref/source/v2/samples/plugins/plugin-version-show-response.json b/api-ref/source/v2/samples/plugins/plugin-version-show-response.json deleted file mode 100644 index cb1c175a59..0000000000 --- a/api-ref/source/v2/samples/plugins/plugin-version-show-response.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "plugin": { - "name": "vanilla", - "versions": [ - "1.2.1", - "2.4.1", - "2.6.0" - ], - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "required_image_tags": [ - "vanilla", - "2.6.0" - ], - "node_processes": { - "JobFlow": [ - "oozie" - ], - "HDFS": [ - "namenode", - "datanode", - "secondarynamenode" - ], - "YARN": [ - "resourcemanager", - "nodemanager" - ], - "MapReduce": [ - "historyserver" - ], - "Hadoop": [], - "Hive": [ - "hiveserver" - ] - }, - "configs": [ - { - "default_value": "/tmp/hadoop-${user.name}", - "name": "hadoop.tmp.dir", - "priority": 2, - "config_type": "string", - "applicable_target": "HDFS", - "is_optional": true, - "scope": "node", - "description": "A base for other temporary directories." - }, - { - "default_value": true, - "name": "hadoop.native.lib", - "priority": 2, - "config_type": "bool", - "applicable_target": "HDFS", - "is_optional": true, - "scope": "node", - "description": "Should native hadoop libraries, if present, be used." - }, - { - "default_value": 1024, - "name": "NodeManager Heap Size", - "config_values": null, - "priority": 1, - "config_type": "int", - "applicable_target": "YARN", - "is_optional": false, - "scope": "node", - "description": null - }, - { - "default_value": true, - "name": "Enable Swift", - "config_values": null, - "priority": 1, - "config_type": "bool", - "applicable_target": "general", - "is_optional": false, - "scope": "cluster", - "description": null - }, - { - "default_value": true, - "name": "Enable MySQL", - "config_values": null, - "priority": 1, - "config_type": "bool", - "applicable_target": "general", - "is_optional": true, - "scope": "cluster", - "description": null - } - ], - "title": "Vanilla Apache Hadoop" - } -} diff --git a/api-ref/source/v2/samples/plugins/plugins-list-response.json b/api-ref/source/v2/samples/plugins/plugins-list-response.json deleted file mode 100644 index d92d85c114..0000000000 --- a/api-ref/source/v2/samples/plugins/plugins-list-response.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "plugins": [ - { - "name": "vanilla", - "description": "The Apache Vanilla plugin provides the ability to launch upstream Vanilla Apache Hadoop cluster without any management consoles. It can also deploy the Oozie component.", - "versions": [ - "1.2.1", - "2.4.1", - "2.6.0" - ], - "title": "Vanilla Apache Hadoop" - }, - { - "name": "hdp", - "description": "The Hortonworks Sahara plugin automates the deployment of the Hortonworks Data Platform (HDP) on OpenStack.", - "versions": [ - "1.3.2", - "2.0.6" - ], - "title": "Hortonworks Data Platform" - }, - { - "name": "spark", - "description": "This plugin provides an ability to launch Spark on Hadoop CDH cluster without any management consoles.", - "versions": [ - "1.0.0", - "0.9.1" - ], - "title": "Apache Spark" - }, - { - "name": "cdh", - "description": "The Cloudera Sahara plugin provides the ability to launch the Cloudera distribution of Apache Hadoop (CDH) with Cloudera Manager management console.", - "versions": [ - "5", - "5.3.0" - ], - "title": "Cloudera Plugin" - } - ] -} diff --git a/bandit.yaml b/bandit.yaml deleted file mode 100644 index 09e5cd50dd..0000000000 --- a/bandit.yaml +++ /dev/null @@ -1,133 +0,0 @@ -# optional: after how many files to update progress -#show_progress_every: 100 - -# optional: plugins directory name -#plugins_dir: 'plugins' - -# optional: plugins discovery name pattern -plugin_name_pattern: '*.py' - -# optional: terminal escape sequences to display colors -#output_colors: -# DEFAULT: '\033[0m' -# HEADER: '\033[95m' -# LOW: '\033[94m' -# WARN: '\033[93m' -# ERROR: '\033[91m' - -# optional: log format string -#log_format: "[%(module)s]\t%(levelname)s\t%(message)s" - -# globs of files which should be analyzed -include: - - '*.py' - - '*.pyw' - -# a list of strings, which if found in the path will cause files to be excluded -# for example /tests/ - to remove all files in tests directory -exclude_dirs: - -profiles: - sahara_default: - include: - - hardcoded_password_string - - hardcoded_password_funcarg - # - hardcoded_password_default - - blacklist_calls - - blacklist_imports - - subprocess_popen_with_shell_equals_true - - subprocess_without_shell_equals_true - - any_other_function_with_shell_equals_true - - start_process_with_a_shell - - start_process_with_no_shell - - hardcoded_sql_expressions - - jinja2_autoescape_false - - use_of_mako_templates - -blacklist_calls: - bad_name_sets: - - pickle: - qualnames: [pickle.loads, pickle.load, pickle.Unpickler, - cPickle.loads, cPickle.load, cPickle.Unpickler] - message: "Pickle library appears to be in use, possible security issue." - - marshal: - qualnames: [marshal.load, marshal.loads] - message: "Deserialization with the marshal module is possibly dangerous." - - md5: - qualnames: [hashlib.md5] - message: "Use of insecure MD5 hash function." - - mktemp_q: - qualnames: [tempfile.mktemp] - message: "Use of insecure and deprecated function (mktemp)." - - eval: - qualnames: [eval] - message: "Use of possibly insecure function - consider using safer ast.literal_eval." - - mark_safe: - qualnames: [mark_safe] - message: "Use of mark_safe() may expose cross-site scripting vulnerabilities and should be reviewed." - - httpsconnection: - qualnames: [httplib.HTTPSConnection] - message: "Use of HTTPSConnection does not provide security, see https://wiki.openstack.org/wiki/OSSN/OSSN-0033" - - yaml_load: - qualnames: [yaml.load] - message: "Use of unsafe yaml load. Allows instantiation of arbitrary objects. Consider yaml.safe_load()." - - urllib_urlopen: - qualnames: [urllib.urlopen, urllib.urlretrieve, urllib.URLopener, urllib.FancyURLopener, urllib2.urlopen, urllib2.Request] - message: "Audit url open for permitted schemes. Allowing use of file:/ or custom schemes is often unexpected." - -shell_injection: - # Start a process using the subprocess module, or one of its wrappers. - subprocess: [subprocess.Popen, subprocess.call, subprocess.check_call, - subprocess.check_output, utils.execute, utils.execute_with_timeout] - # Start a process with a function vulnerable to shell injection. - shell: [os.system, os.popen, os.popen2, os.popen3, os.popen4, - popen2.popen2, popen2.popen3, popen2.popen4, popen2.Popen3, - popen2.Popen4, commands.getoutput, commands.getstatusoutput] - # Start a process with a function that is not vulnerable to shell injection. - no_shell: [os.execl, os.execle, os.execlp, os.execlpe, os.execv,os.execve, - os.execvp, os.execvpe, os.spawnl, os.spawnle, os.spawnlp, - os.spawnlpe, os.spawnv, os.spawnve, os.spawnvp, os.spawnvpe, - os.startfile] - -blacklist_imports: - bad_import_sets: - - telnet: - imports: [telnetlib] - level: ERROR - message: "Telnet is considered insecure. Use SSH or some other encrypted protocol." - - info_libs: - imports: [pickle, cPickle, subprocess, Crypto] - level: LOW - message: "Consider possible security implications associated with {module} module." - -hardcoded_tmp_directory: - tmp_dirs: [/tmp, /var/tmp, /dev/shm] - -hardcoded_password: - word_list: "wordlist/default-passwords" - -ssl_with_bad_version: - bad_protocol_versions: - - 'PROTOCOL_SSLv2' - - 'SSLv2_METHOD' - - 'SSLv23_METHOD' - - 'PROTOCOL_SSLv3' # strict option - - 'PROTOCOL_TLSv1' # strict option - - 'SSLv3_METHOD' # strict option - - 'TLSv1_METHOD' # strict option - -password_config_option_not_marked_secret: - function_names: - - oslo.config.cfg.StrOpt - - oslo_config.cfg.StrOpt - -execute_with_run_as_root_equals_true: - function_names: - - ceilometer.utils.execute - - cinder.utils.execute - - neutron.agent.linux.utils.execute - - nova.utils.execute - - nova.utils.trycmd - -try_except_pass: - check_typed_exception: True diff --git a/bindep.txt b/bindep.txt deleted file mode 100644 index 95d8031819..0000000000 --- a/bindep.txt +++ /dev/null @@ -1,30 +0,0 @@ -# This file contains runtime (non-python) dependencies -# More info at: https://docs.openstack.org/infra/bindep/readme.html - -libssl-dev [platform:dpkg] -openssl-devel [platform:rpm] - -# updates of the localized release notes require msgmerge -gettext - -# Define the basic (test) requirements extracted from bindata-fallback.txt -# - mysqladmin and psql -mariadb [platform:rpm] -mariadb-devel [platform:rpm] -dev-db/mariadb [platform:gentoo] -mysql-client [platform:dpkg !platform:debian] -mysql-server [platform:dpkg !platform:debian] -mariadb-server [platform:debian] -postgresql -postgresql-client [platform:dpkg] -libpq-dev [platform:dpkg] -postgresql-server [platform:rpm] -postgresql-devel [platform:rpm] - -# The Python binding for libguestfs are used by the sahara-image-pack -# command. -python3-guestfs [platform:dpkg] -libguestfs-xfs [platform:dpkg] -python3-libguestfs [platform:rpm] -libguestfs-xfs [platform:redhat] -xfsprogs [platform:suse] diff --git a/devstack/README.rst b/devstack/README.rst deleted file mode 100644 index 464d5c428f..0000000000 --- a/devstack/README.rst +++ /dev/null @@ -1,22 +0,0 @@ -==================== -Enabling in Devstack -==================== - -1. Download DevStack - -2. Add this repo as an external repository in ``local.conf`` - -.. sourcecode:: bash - - [[local|localrc]] - enable_plugin sahara https://opendev.org/openstack/sahara - enable_plugin heat https://opendev.org/openstack/heat - -Optionally, a git refspec may be provided as follows: - -.. sourcecode:: bash - - [[local|localrc]] - enable_plugin sahara https://opendev.org/openstack/sahara - -3. run ``stack.sh`` diff --git a/devstack/exercise.sh b/devstack/exercise.sh deleted file mode 100644 index e100169608..0000000000 --- a/devstack/exercise.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env bash - -# Sanity check that Sahara started if enabled - -echo "*********************************************************************" -echo "Begin DevStack Exercise: $0" -echo "*********************************************************************" - -# This script exits on an error so that errors don't compound and you see -# only the first error that occurred. -set -o errexit - -# Print the commands being run so that we can see the command that triggers -# an error. It is also useful for following allowing as the install occurs. -set -o xtrace - - -# Settings -# ======== - -# Keep track of the current directory -EXERCISE_DIR=$(cd $(dirname "$0") && pwd) -TOP_DIR=$(cd $EXERCISE_DIR/..; pwd) - -# Import common functions -. $TOP_DIR/functions - -# Import configuration -. $TOP_DIR/openrc - -# Import exercise configuration -. $TOP_DIR/exerciserc - -is_service_enabled sahara || exit 55 - -if is_ssl_enabled_service "sahara" ||\ - is_ssl_enabled_service "sahara-api" ||\ - is_service_enabled tls-proxy; then - SAHARA_SERVICE_PROTOCOL="https" -fi - -SAHARA_SERVICE_PROTOCOL=${SAHARA_SERVICE_PROTOCOL:-$SERVICE_PROTOCOL} - -$CURL_GET $SAHARA_SERVICE_PROTOCOL://$SERVICE_HOST:8386/ 2>/dev/null \ - | grep -q 'Auth' || die $LINENO "Sahara API isn't functioning!" - -set +o xtrace -echo "*********************************************************************" -echo "SUCCESS: End DevStack Exercise: $0" -echo "*********************************************************************" diff --git a/devstack/files/apache-sahara-api.template b/devstack/files/apache-sahara-api.template deleted file mode 100644 index 591739155c..0000000000 --- a/devstack/files/apache-sahara-api.template +++ /dev/null @@ -1,27 +0,0 @@ -Listen %PUBLICPORT% - - - WSGIDaemonProcess sahara-api processes=2 threads=1 user=%USER% display-name=%{GROUP} %VIRTUALENV% - WSGIProcessGroup sahara-api - WSGIScriptAlias / %SAHARA_BIN_DIR%/sahara-wsgi-api - WSGIApplicationGroup %{GLOBAL} - WSGIPassAuthorization On - AllowEncodedSlashes On - = 2.4> - ErrorLogFormat "%{cu}t %M" - - ErrorLog /var/log/%APACHE_NAME%/sahara-api.log - %SSLENGINE% - %SSLCERTFILE% - %SSLKEYFILE% - - - = 2.4> - Require all granted - - - Order allow,deny - Allow from all - - - diff --git a/devstack/plugin.sh b/devstack/plugin.sh deleted file mode 100755 index 44b5ba4763..0000000000 --- a/devstack/plugin.sh +++ /dev/null @@ -1,373 +0,0 @@ -#!/bin/bash -# -# lib/sahara - -# Dependencies: -# ``functions`` file -# ``DEST``, ``DATA_DIR``, ``STACK_USER`` must be defined - -# ``stack.sh`` calls the entry points in this order: -# -# install_sahara -# install_python_saharaclient -# configure_sahara -# start_sahara -# stop_sahara -# cleanup_sahara - -# Save trace setting -XTRACE=$(set +o | grep xtrace) -set -o xtrace - - -# Functions -# --------- - -# create_sahara_accounts() - Set up common required sahara accounts -# -# Tenant User Roles -# ------------------------------ -# service sahara admin -function create_sahara_accounts { - - create_service_user "sahara" - - get_or_create_service "sahara" "data-processing" "Sahara Data Processing" - get_or_create_endpoint "data-processing" \ - "$REGION_NAME" \ - "$SAHARA_SERVICE_PROTOCOL://$SAHARA_SERVICE_HOST:$SAHARA_SERVICE_PORT" \ - "$SAHARA_SERVICE_PROTOCOL://$SAHARA_SERVICE_HOST:$SAHARA_SERVICE_PORT" \ - "$SAHARA_SERVICE_PROTOCOL://$SAHARA_SERVICE_HOST:$SAHARA_SERVICE_PORT" -} - -# cleanup_sahara() - Remove residual data files, anything left over from -# previous runs that would need to clean up. -function cleanup_sahara { - - # Cleanup auth cache dir - if [ "$SAHARA_USE_MOD_WSGI" == "True" ]; then - sudo rm -f $(apache_site_config_for sahara-api) - fi -} - -function configure_sahara_apache_wsgi { - - local sahara_apache_conf=$(apache_site_config_for sahara-api) - local sahara_ssl="" - local sahara_certfile="" - local sahara_keyfile="" - local venv_path="" - - if is_ssl_enabled_service sahara; then - sahara_ssl="SSLEngine On" - sahara_certfile="SSLCertificateFile $SAHARA_SSL_CERT" - sahara_keyfile="SSLCertificateKeyFile $SAHARA_SSL_KEY" - fi - - sudo cp $SAHARA_DIR/devstack/files/apache-sahara-api.template $sahara_apache_conf - sudo sed -e " - s|%PUBLICPORT%|$SAHARA_SERVICE_PORT|g; - s|%APACHE_NAME%|$APACHE_NAME|g; - s|%SAHARA_BIN_DIR%|$SAHARA_BIN_DIR|g; - s|%SSLENGINE%|$sahara_ssl|g; - s|%SSLCERTFILE%|$sahara_certfile|g; - s|%SSLKEYFILE%|$sahara_keyfile|g; - s|%USER%|$STACK_USER|g; - s|%VIRTUALENV%|$venv_path|g - " -i $sahara_apache_conf - -} - -# configure_sahara() - Set config files, create data dirs, etc -function configure_sahara { - sudo install -d -o $STACK_USER $SAHARA_CONF_DIR - - cp -p $SAHARA_DIR/etc/sahara/api-paste.ini $SAHARA_CONF_DIR - - configure_keystone_authtoken_middleware $SAHARA_CONF_FILE sahara - - # Set admin user parameters needed for trusts creation - iniset $SAHARA_CONF_FILE \ - trustee project_name $SERVICE_TENANT_NAME - iniset $SAHARA_CONF_FILE trustee username sahara - iniset $SAHARA_CONF_FILE \ - trustee password $SERVICE_PASSWORD - iniset $SAHARA_CONF_FILE \ - trustee user_domain_name "$SERVICE_DOMAIN_NAME" - iniset $SAHARA_CONF_FILE \ - trustee project_domain_name "$SERVICE_DOMAIN_NAME" - iniset $SAHARA_CONF_FILE \ - trustee auth_url "$KEYSTONE_SERVICE_URI/v3" - - iniset_rpc_backend sahara $SAHARA_CONF_FILE DEFAULT - - # Set configuration to send notifications - - if is_service_enabled ceilometer; then - iniset $SAHARA_CONF_FILE oslo_messaging_notifications driver "messaging" - fi - - iniset $SAHARA_CONF_FILE DEFAULT debug $ENABLE_DEBUG_LOG_LEVEL - - iniset $SAHARA_CONF_FILE DEFAULT plugins $SAHARA_ENABLED_PLUGINS - - iniset $SAHARA_CONF_FILE \ - database connection `database_connection_url sahara` - - if is_service_enabled neutron; then - iniset $SAHARA_CONF_FILE neutron endpoint_type $SAHARA_ENDPOINT_TYPE - if is_ssl_enabled_service "neutron" \ - || is_service_enabled tls-proxy; then - iniset $SAHARA_CONF_FILE neutron ca_file $SSL_BUNDLE_FILE - fi - fi - - if is_ssl_enabled_service "heat" || is_service_enabled tls-proxy; then - iniset $SAHARA_CONF_FILE heat ca_file $SSL_BUNDLE_FILE - fi - iniset $SAHARA_CONF_FILE heat endpoint_type $SAHARA_ENDPOINT_TYPE - - if is_ssl_enabled_service "cinder" || is_service_enabled tls-proxy; then - iniset $SAHARA_CONF_FILE cinder ca_file $SSL_BUNDLE_FILE - fi - iniset $SAHARA_CONF_FILE cinder endpoint_type $SAHARA_ENDPOINT_TYPE - - if is_ssl_enabled_service "nova" || is_service_enabled tls-proxy; then - iniset $SAHARA_CONF_FILE nova ca_file $SSL_BUNDLE_FILE - fi - iniset $SAHARA_CONF_FILE nova endpoint_type $SAHARA_ENDPOINT_TYPE - - if is_ssl_enabled_service "swift" || is_service_enabled tls-proxy; then - iniset $SAHARA_CONF_FILE swift ca_file $SSL_BUNDLE_FILE - fi - iniset $SAHARA_CONF_FILE swift endpoint_type $SAHARA_ENDPOINT_TYPE - - if is_ssl_enabled_service "key" || is_service_enabled tls-proxy; then - iniset $SAHARA_CONF_FILE keystone ca_file $SSL_BUNDLE_FILE - fi - iniset $SAHARA_CONF_FILE keystone endpoint_type $SAHARA_ENDPOINT_TYPE - - if is_ssl_enabled_service "glance" || is_service_enabled tls-proxy; then - iniset $SAHARA_CONF_FILE glance ca_file $SSL_BUNDLE_FILE - fi - iniset $SAHARA_CONF_FILE glance endpoint_type $SAHARA_ENDPOINT_TYPE - - # Register SSL certificates if provided - if is_ssl_enabled_service sahara; then - ensure_certificates SAHARA - - iniset $SAHARA_CONF_FILE ssl cert_file "$SAHARA_SSL_CERT" - iniset $SAHARA_CONF_FILE ssl key_file "$SAHARA_SSL_KEY" - fi - - iniset $SAHARA_CONF_FILE DEFAULT use_syslog $SYSLOG - - # Format logging - if [ "$LOG_COLOR" == "True" ] && [ "$SYSLOG" == "False" ]; then - if [ "$SAHARA_USE_MOD_WSGI" == "False" ]; then - setup_colorized_logging $SAHARA_CONF_FILE DEFAULT - fi - fi - - if is_service_enabled tls-proxy; then - # Set the service port for a proxy to take the original - iniset $SAHARA_CONF_FILE DEFAULT port $SAHARA_SERVICE_PORT_INT - fi - - if [ "$SAHARA_ENABLE_DISTRIBUTED_PERIODICS" == "True" ]; then - # Enable distributed periodic tasks - iniset $SAHARA_CONF_FILE DEFAULT periodic_coordinator_backend_url\ - $SAHARA_PERIODIC_COORDINATOR_URL - pip_install tooz[memcached] - - restart_service memcached - fi - - recreate_database sahara - $SAHARA_BIN_DIR/sahara-db-manage \ - --config-file $SAHARA_CONF_FILE upgrade head - - if [ "$SAHARA_USE_MOD_WSGI" == "True" ]; then - configure_sahara_apache_wsgi - fi -} - -# install_sahara() - Collect source and prepare -function install_sahara { - setup_develop $SAHARA_DIR - if [ "$SAHARA_USE_MOD_WSGI" == "True" ]; then - install_apache_wsgi - fi -} - -# install_ambari() - Collect source and prepare -function install_ambari { - git_clone $AMBARI_PLUGIN_REPO $AMBARI_PLUGIN_DIR $AMBARI_PLUGIN_BRANCH - setup_develop $AMBARI_PLUGIN_DIR -} - -# install_cdh() - Collect source and prepare -function install_cdh { - git_clone $CDH_PLUGIN_REPO $CDH_PLUGIN_DIR $CDH_PLUGIN_BRANCH - setup_develop $CDH_PLUGIN_DIR -} - -# install_mapr() - Collect source and prepare -function install_mapr { - git_clone $MAPR_PLUGIN_REPO $MAPR_PLUGIN_DIR $MAPR_PLUGIN_BRANCH - setup_develop $MAPR_PLUGIN_DIR -} - -# install_spark() - Collect source and prepare -function install_spark { - git_clone $SPARK_PLUGIN_REPO $SPARK_PLUGIN_DIR $SPARK_PLUGIN_BRANCH - setup_develop $SPARK_PLUGIN_DIR -} - -# install_storm() - Collect source and prepare -function install_storm { - git_clone $STORM_PLUGIN_REPO $STORM_PLUGIN_DIR $STORM_PLUGIN_BRANCH - setup_develop $STORM_PLUGIN_DIR -} - -# install_vanilla() - Collect source and prepare -function install_vanilla { - git_clone $VANILLA_PLUGIN_REPO $VANILLA_PLUGIN_DIR $VANILLA_PLUGIN_BRANCH - setup_develop $VANILLA_PLUGIN_DIR -} - -# install_python_saharaclient() - Collect source and prepare -function install_python_saharaclient { - if use_library_from_git "python-saharaclient"; then - git_clone $SAHARACLIENT_REPO $SAHARACLIENT_DIR $SAHARACLIENT_BRANCH - setup_develop $SAHARACLIENT_DIR - fi -} - -# start_sahara() - Start running processes, including screen -function start_sahara { - local service_port=$SAHARA_SERVICE_PORT - local service_protocol=$SAHARA_SERVICE_PROTOCOL - if is_service_enabled tls-proxy; then - service_port=$SAHARA_SERVICE_PORT_INT - service_protocol="http" - fi - - if [ "$SAHARA_USE_MOD_WSGI" == "True" ] ; then - enable_apache_site sahara-api - restart_apache_server - else - run_process sahara-api "$SAHARA_BIN_DIR/sahara-api \ - --config-file $SAHARA_CONF_FILE" - fi - - run_process sahara-eng "$SAHARA_BIN_DIR/sahara-engine \ - --config-file $SAHARA_CONF_FILE" - - echo "Waiting for Sahara to start..." - if ! wait_for_service $SERVICE_TIMEOUT \ - $service_protocol://$SAHARA_SERVICE_HOST:$service_port; then - die $LINENO "Sahara did not start" - fi - - # Start proxies if enabled - if is_service_enabled tls-proxy; then - start_tls_proxy sahara '*' $SAHARA_SERVICE_PORT \ - $SAHARA_SERVICE_HOST \ - $SAHARA_SERVICE_PORT_INT & - fi -} - -# configure_tempest_for_sahara() - Tune Tempest configuration for Sahara -function configure_tempest_for_sahara { - if is_service_enabled tempest; then - iniset $TEMPEST_CONFIG service_available sahara True - iniset $TEMPEST_CONFIG data-processing-feature-enabled plugins $SAHARA_INSTALLED_PLUGINS - fi -} - -# stop_sahara() - Stop running processes -function stop_sahara { - # Kill the Sahara screen windows - if [ "$SAHARA_USE_MOD_WSGI" == "True" ]; then - disable_apache_site sahara-api - restart_apache_server - else - stop_process sahara-all - stop_process sahara-api - stop_process sahara-eng - fi -} - -# is_sahara_enabled. This allows is_service_enabled sahara work -# correctly throughout devstack. -function is_sahara_enabled { - if is_service_enabled sahara-api || \ - is_service_enabled sahara-eng; then - return 0 - else - return 1 - fi -} - -function is_plugin_required { - if [ "${SAHARA_INSTALLED_PLUGINS/$1}" = "$SAHARA_INSTALLED_PLUGINS" ] ; then - return 1 - else - return 0 - fi -} - -# Dispatcher for Sahara plugin -if is_service_enabled sahara; then - if [[ "$1" == "stack" && "$2" == "install" ]]; then - echo_summary "Installing sahara" - install_sahara - if is_plugin_required ambari; then - install_ambari - fi - if is_plugin_required cdh; then - install_cdh - fi - if is_plugin_required mapr; then - install_mapr - fi - if is_plugin_required spark; then - install_spark - fi - if is_plugin_required storm; then - install_storm - fi - if is_plugin_required vanilla; then - install_vanilla - fi - install_python_saharaclient - cleanup_sahara - elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then - echo_summary "Configuring sahara" - configure_sahara - create_sahara_accounts - elif [[ "$1" == "stack" && "$2" == "extra" ]]; then - echo_summary "Initializing sahara" - start_sahara - elif [[ "$1" == "stack" && "$2" == "test-config" ]]; then - echo_summary "Configuring tempest" - configure_tempest_for_sahara - fi - - if [[ "$1" == "unstack" ]]; then - stop_sahara - fi - - if [[ "$1" == "clean" ]]; then - cleanup_sahara - fi -fi - - -# Restore xtrace -$XTRACE - -# Local variables: -# mode: shell-script -# End: diff --git a/devstack/settings b/devstack/settings deleted file mode 100644 index 22278a2f5c..0000000000 --- a/devstack/settings +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash - -# Settings needed for the Sahara plugin -# ------------------------------------- - -# Set up default directories -SAHARACLIENT_DIR=$DEST/python-saharaclient -SAHARA_DIR=$DEST/sahara -AMBARI_PLUGIN_DIR=$DEST/sahara-plugin-ambari -CDH_PLUGIN_DIR=$DEST/sahara-plugin-cdh -MAPR_PLUGIN_DIR=$DEST/sahara-plugin-mapr -SPARK_PLUGIN_DIR=$DEST/sahara-plugin-spark -STORM_PLUGIN_DIR=$DEST/sahara-plugin-storm -VANILLA_PLUGIN_DIR=$DEST/sahara-plugin-vanilla - -SAHARACLIENT_REPO=${SAHARACLIENT_REPO:-\ -${GIT_BASE}/openstack/python-saharaclient.git} -SAHARACLIENT_BRANCH=${SAHARACLIENT_BRANCH:-master} -AMBARI_PLUGIN_REPO=${AMBARI_PLUGIN_REPO:-https://opendev.org/openstack/sahara-plugin-ambari/} -AMBARI_PLUGIN_BRANCH=${AMBARI_PLUGIN_BRANCH:-master} -CDH_PLUGIN_REPO=${CDH_PLUGIN_REPO:-https://opendev.org/openstack/sahara-plugin-cdh/} -CDH_PLUGIN_BRANCH=${CDH_PLUGIN_BRANCH:-master} -MAPR_PLUGIN_REPO=${MAPR_PLUGIN_REPO:-https://opendev.org/openstack/sahara-plugin-mapr/} -MAPR_PLUGIN_BRANCH=${MAPR_PLUGIN_BRANCH:-master} -SPARK_PLUGIN_REPO=${SPARK_PLUGIN_REPO:-https://opendev.org/openstack/sahara-plugin-spark/} -SPARK_PLUGIN_BRANCH=${SPARK_PLUGIN_BRANCH:-master} -STORM_PLUGIN_REPO=${STORM_PLUGIN_REPO:-https://opendev.org/openstack/sahara-plugin-storm/} -STORM_PLUGIN_BRANCH=${STORM_PLUGIN_BRANCH:-master} -VANILLA_PLUGIN_REPO=${VANILLA_PLUGIN_REPO:-https://opendev.org/openstack/sahara-plugin-vanilla/} -VANILLA_PLUGIN_BRANCH=${VANILLA_PLUGIN_BRANCH:-master} - - -SAHARA_CONF_DIR=${SAHARA_CONF_DIR:-/etc/sahara} -SAHARA_CONF_FILE=${SAHARA_CONF_DIR}/sahara.conf - -# TODO(slukjanov): Should we append sahara to SSL_ENABLED_SERVICES? - -if is_ssl_enabled_service "sahara" || is_service_enabled tls-proxy; then - SAHARA_SERVICE_PROTOCOL="https" -fi -SAHARA_SERVICE_HOST=${SAHARA_SERVICE_HOST:-$SERVICE_HOST} -SAHARA_SERVICE_PORT=${SAHARA_SERVICE_PORT:-8386} -SAHARA_SERVICE_PORT_INT=${SAHARA_SERVICE_PORT_INT:-18386} -SAHARA_SERVICE_PROTOCOL=${SAHARA_SERVICE_PROTOCOL:-$SERVICE_PROTOCOL} -SAHARA_ENDPOINT_TYPE=${SAHARA_ENDPOINT_TYPE:-publicURL} - -SAHARA_ENABLED_PLUGINS=${SAHARA_ENABLED_PLUGINS:-\ -vanilla,ambari,cdh,mapr,spark,storm,fake} -SAHARA_INSTALLED_PLUGINS=${SAHARA_INSTALLED_PLUGINS:-\ -vanilla,ambari,cdh,mapr,spark,storm,fake} -SAHARA_BIN_DIR=$(get_python_exec_prefix) - -SAHARA_ENABLE_DISTRIBUTED_PERIODICS=${SAHARA_ENABLE_DISTRIBUTED_PERIODICS:-\ -True} -SAHARA_PERIODIC_COORDINATOR_URL=${SAHARA_PERIODIC_COORDINATOR_URL:-\ -memcached://127.0.0.1:11211} - -#Toggle for deploying Sahara API with Apache + mod_wsgi -SAHARA_USE_MOD_WSGI=${SAHARA_USE_MOD_WSGI:-True} - -enable_service sahara-api sahara-eng -enable_service heat h-eng h-api h-api-cfn h-api-cw diff --git a/devstack/upgrade/from-liberty/upgrade-sahara b/devstack/upgrade/from-liberty/upgrade-sahara deleted file mode 100644 index 4ddcf50314..0000000000 --- a/devstack/upgrade/from-liberty/upgrade-sahara +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash - -# ``upgrade-sahara`` - -function configure_sahara_upgrade { - XTRACE=$(set +o | grep xtrace) - set -o xtrace - - # Copy api-paste.ini to configuration directory - cp -p $SAHARA_DIR/etc/sahara/api-paste.ini $SAHARA_CONF_DIR - - # reset to previous state - $XTRACE -} diff --git a/devstack/upgrade/from-mitaka/upgrade-sahara b/devstack/upgrade/from-mitaka/upgrade-sahara deleted file mode 100755 index d916b0df05..0000000000 --- a/devstack/upgrade/from-mitaka/upgrade-sahara +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash - -# ``upgrade-sahara`` - -function configure_sahara_upgrade { - XTRACE=$(set +o | grep xtrace) - set -o xtrace - local old_plugins - - old_plugins=$(cat $SAHARA_CONF_DIR/sahara.conf | grep ^plugins) - sed -i.bak "s/$old_plugins/plugins=fake,vanilla,cdh/g" $SAHARA_CONF_DIR/sahara.conf - - # reset to previous state - $XTRACE -} diff --git a/devstack/upgrade/from-rocky/upgrade-sahara b/devstack/upgrade/from-rocky/upgrade-sahara deleted file mode 100755 index 940e7ca7dd..0000000000 --- a/devstack/upgrade/from-rocky/upgrade-sahara +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env bash - -# ``upgrade-sahara`` - -function configure_sahara_upgrade { - XTRACE=$(set +o | grep xtrace) - set -o xtrace - - install_ambari - install_cdh - install_mapr - install_spark - install_storm - install_vanilla - - # reset to previous state - $XTRACE -} diff --git a/devstack/upgrade/resources.sh b/devstack/upgrade/resources.sh deleted file mode 100755 index 3b0c886eaa..0000000000 --- a/devstack/upgrade/resources.sh +++ /dev/null @@ -1,243 +0,0 @@ -#!/bin/bash - -set -o errexit - -. $GRENADE_DIR/grenaderc -. $GRENADE_DIR/functions - -. $TOP_DIR/openrc admin admin - -set -o xtrace - -SAHARA_USER=sahara_grenade -SAHARA_PROJECT=sahara_grenade -SAHARA_PASS=pass -SAHARA_KEY=sahara_key -SAHARA_KEY_FILE=$SAVE_DIR/sahara_key.pem - -PUBLIC_NETWORK_NAME=${PUBLIC_NETWORK_NAME:-public} - -# cirros image is not appropriate for cluster creation -SAHARA_IMAGE_NAME=${SAHARA_IMAGE_NAME:-fedora-heat-test-image} -SAHARA_IMAGE_USER=${SAHARA_IMAGE_USER:-fedora} - -# custom flavor parameters -SAHARA_FLAVOR_NAME=${SAHARA_FLAVOR_NAME:-sahara_flavor} -SAHARA_FLAVOR_RAM=${SAHARA_FLAVOR_RAM:-1024} -SAHARA_FLAVOR_DISK=${SAHARA_FLAVOR_DISK:-10} - -NG_TEMPLATE_NAME=ng-template-grenade -CLUSTER_TEMPLATE_NAME=cluster-template-grenade -CLUSTER_NAME=cluster-grenade - -function sahara_set_user { - # set ourselves to the created sahara user - OS_TENANT_NAME=$SAHARA_PROJECT - OS_PROJECT_NAME=$SAHARA_PROJECT - OS_USERNAME=$SAHARA_USER - OS_PASSWORD=$SAHARA_PASS -} - -function create_tenant { - # create a tenant for the server - eval $(openstack project create -f shell -c id $SAHARA_PROJECT) - if [[ -z "$id" ]]; then - die $LINENO "Didn't create $SAHARA_PROJECT project" - fi - resource_save sahara project_id $id -} - -function create_user { - local project_id=$id - eval $(openstack user create $SAHARA_USER \ - --project $project_id \ - --password $SAHARA_PASS \ - -f shell -c id) - if [[ -z "$id" ]]; then - die $LINENO "Didn't create $SAHARA_USER user" - fi - resource_save sahara user_id $id - - # Workaround for bug: https://bugs.launchpad.net/keystone/+bug/1662911 - openstack role add member --user $id --project $project_id -} - -function create_keypair { - # create key pair for access - openstack keypair create $SAHARA_KEY > $SAHARA_KEY_FILE - chmod 600 $SAHARA_KEY_FILE -} - -function create_flavor { - eval $(openstack flavor create -f shell -c id \ - --ram $SAHARA_FLAVOR_RAM \ - --disk $SAHARA_FLAVOR_DISK \ - $SAHARA_FLAVOR_NAME) - resource_save sahara flavor_id $id -} - -function register_image { - eval $(openstack image show \ - -f shell -c id $SAHARA_IMAGE_NAME) - resource_save sahara image_id $id - openstack dataprocessing image register $id --username $SAHARA_IMAGE_USER - openstack dataprocessing image tags set $id --tags fake 0.1 -} - -function create_node_group_template { - eval $(openstack network show -f shell -c id $PUBLIC_NETWORK_NAME) - local public_net_id=$id - local flavor_id=$(resource_get sahara flavor_id) - openstack dataprocessing node group template create \ - --name $NG_TEMPLATE_NAME \ - --flavor $flavor_id \ - --plugin fake \ - --plugin-version 0.1 \ - --processes jobtracker namenode tasktracker datanode \ - --floating-ip-pool $public_net_id \ - --auto-security-group -} - -function create_cluster_template { - openstack dataprocessing cluster template create \ - --name $CLUSTER_TEMPLATE_NAME \ - --node-groups $NG_TEMPLATE_NAME:1 -} - -function create_cluster { - local net_id=$(resource_get network net_id) - local image_id=$(resource_get sahara image_id) - if [[ -n "$net_id" ]]; then - eval $(openstack dataprocessing cluster create \ - --name $CLUSTER_NAME \ - --cluster-template $CLUSTER_TEMPLATE_NAME \ - --image $image_id \ - --user-keypair $SAHARA_KEY \ - --neutron-network $net_id \ - -f shell -c id) - else - eval $(openstack dataprocessing cluster create \ - --name $CLUSTER_NAME \ - --cluster-template $CLUSTER_TEMPLATE_NAME \ - --image $image_id \ - --user-keypair $SAHARA_KEY \ - -f shell -c id) - fi - resource_save sahara cluster_id $id -} - -function wait_active_state { - # wait until cluster moves to active state - local timeleft=1000 - while [[ $timeleft -gt 0 ]]; do - eval $(openstack dataprocessing cluster show -f shell \ - -c Status $CLUSTER_NAME) - if [[ "$status" != "Active" ]]; then - if [[ "$status" == "Error" ]]; then - die $LINENO "Cluster is in Error state" - fi - echo "Cluster is still not in Active state" - sleep 10 - timeleft=$((timeleft - 10)) - if [[ $timeleft == 0 ]]; then - die $LINENO "Cluster hasn't moved to Active state \ - during 1000 seconds" - fi - else - break - fi - done -} - -function check_active { - # check that cluster is in Active state - eval $(openstack dataprocessing cluster show -f shell \ - -c Status $CLUSTER_NAME) - if [[ "$status" != "Active" ]]; then - die $LINENO "Cluster is not in Active state anymore" - fi - echo "Sahara verification: SUCCESS" -} - -function create { - create_tenant - - create_user - - create_flavor - - register_image - - sahara_set_user - - create_keypair - - create_node_group_template - - create_cluster_template - - create_cluster - - wait_active_state -} - -function verify { - : -} - -function verify_noapi { - : -} - -function destroy { - sahara_set_user - set +o errexit - - # delete cluster - check_active - - openstack dataprocessing cluster delete $CLUSTER_NAME --wait - - set -o errexit - - # delete cluster template - openstack dataprocessing cluster template delete $CLUSTER_TEMPLATE_NAME - - # delete node group template - openstack dataprocessing node group template delete $NG_TEMPLATE_NAME - - source_quiet $TOP_DIR/openrc admin admin - - # unregister image - local image_id=$(resource_get sahara image_id) - openstack dataprocessing image unregister $image_id - - # delete flavor - openstack flavor delete $SAHARA_FLAVOR_NAME - - # delete user and project - local user_id=$(resource_get sahara user_id) - local project_id=$(resource_get sahara project_id) - openstack user delete $user_id - openstack project delete $project_id -} - -# Dispatcher -case $1 in - "create") - create - ;; - "verify_noapi") - verify_noapi - ;; - "verify") - verify - ;; - "destroy") - destroy - ;; - "force_destroy") - set +o errexit - destroy - ;; -esac diff --git a/devstack/upgrade/settings b/devstack/upgrade/settings deleted file mode 100644 index d50dcfc0b2..0000000000 --- a/devstack/upgrade/settings +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash - -register_project_for_upgrade sahara -register_db_to_save sahara -devstack_localrc base IMAGE_URLS=\ -"http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-uec.tar.gz,\ -http://tarballs.openstack.org/heat-test-image/fedora-heat-test-image.qcow2" -devstack_localrc base enable_plugin sahara \ - https://opendev.org/openstack/sahara \ - stable/train -devstack_localrc base enable_plugin heat \ - https://opendev.org/openstack/heat \ - stable/train -devstack_localrc base DEFAULT_IMAGE_NAME="cirros-0.3.5-x86_64-uec" - -devstack_localrc target IMAGE_URLS=\ -"http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-uec.tar.gz,\ -http://tarballs.openstack.org/heat-test-image/fedora-heat-test-image.qcow2" -devstack_localrc target enable_plugin sahara \ - https://opendev.org/openstack/sahara -devstack_localrc target enable_plugin heat \ - https://opendev.org/openstack/heat -devstack_localrc target LIBS_FROM_GIT=python-saharaclient -devstack_localrc target DEFAULT_IMAGE_NAME="cirros-0.3.5-x86_64-uec" diff --git a/devstack/upgrade/shutdown.sh b/devstack/upgrade/shutdown.sh deleted file mode 100755 index 22ac599c5c..0000000000 --- a/devstack/upgrade/shutdown.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash - -# ``shutdown-sahara`` - -set -o errexit - -. $GRENADE_DIR/grenaderc -. $GRENADE_DIR/functions - -# We need base DevStack functions for this -. $BASE_DEVSTACK_DIR/functions -. $BASE_DEVSTACK_DIR/stackrc # needed for status directory - -. $BASE_DEVSTACK_DIR/lib/tls -. $BASE_DEVSTACK_DIR/lib/apache -. ${GITDIR[sahara]}/devstack/plugin.sh - -set -o xtrace - -export ENABLED_SERVICES+=,sahara-api,sahara-eng, -stop_sahara - -# sanity check that service is actually down -ensure_services_stopped sahara-eng diff --git a/devstack/upgrade/upgrade.sh b/devstack/upgrade/upgrade.sh deleted file mode 100755 index f754529b7e..0000000000 --- a/devstack/upgrade/upgrade.sh +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env bash - -# ``upgrade-sahara`` - -echo "*********************************************************************" -echo "Begin $0" -echo "*********************************************************************" - -# Clean up any resources that may be in use -cleanup() { - set +o errexit - - echo "********************************************************************" - echo "ERROR: Abort $0" - echo "********************************************************************" - - # Kill ourselves to signal any calling process - trap 2; kill -2 $$ -} - -trap cleanup SIGHUP SIGINT SIGTERM - -# Keep track of the grenade directory -RUN_DIR=$(cd $(dirname "$0") && pwd) - -# Source params -. $GRENADE_DIR/grenaderc - -# Import common functions -. $GRENADE_DIR/functions - -# This script exits on an error so that errors don't compound and you see -# only the first error that occurred. -set -o errexit - -# Upgrade Sahara -# ============ - -# Get functions from current DevStack -. $TARGET_DEVSTACK_DIR/stackrc -. $TARGET_DEVSTACK_DIR/lib/apache -. $TARGET_DEVSTACK_DIR/lib/tls -. $(dirname $(dirname $BASH_SOURCE))/plugin.sh -. $(dirname $(dirname $BASH_SOURCE))/settings - -# Print the commands being run so that we can see the command that triggers -# an error. It is also useful for following allowing as the install occurs. -set -o xtrace - -# Save current config files for posterity -[[ -d $SAVE_DIR/etc.sahara ]] || cp -pr $SAHARA_CONF_DIR $SAVE_DIR/etc.sahara - -# install_sahara() -stack_install_service sahara -install_python_saharaclient - -# calls upgrade-sahara for specific release -upgrade_project sahara $RUN_DIR $BASE_DEVSTACK_BRANCH $TARGET_DEVSTACK_BRANCH - -# Migrate the database -$SAHARA_BIN_DIR/sahara-db-manage --config-file $SAHARA_CONF_FILE \ - upgrade head || die $LINENO "DB sync error" - -# Start Sahara -start_sahara - -# Don't succeed unless the service come up -ensure_services_started sahara-eng - -set +o xtrace -echo "*********************************************************************" -echo "SUCCESS: End $0" -echo "*********************************************************************" diff --git a/doc/requirements.txt b/doc/requirements.txt deleted file mode 100644 index 9b1ab70dad..0000000000 --- a/doc/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. -openstackdocstheme>=1.31.2 # Apache-2.0 -os-api-ref>=1.6.0 # Apache-2.0 -reno>=2.5.0 # Apache-2.0 -sphinx!=1.6.6,!=1.6.7,!=2.1.0,>=1.6.2 # BSD -sphinxcontrib-httpdomain>=1.3.0 # BSD -whereto>=0.3.0 # Apache-2.0 diff --git a/doc/source/_extra/.htaccess b/doc/source/_extra/.htaccess deleted file mode 100644 index 0858c21f22..0000000000 --- a/doc/source/_extra/.htaccess +++ /dev/null @@ -1,9 +0,0 @@ -# renamed after the switch to Storyboard -redirectmatch 301 ^/sahara/([^/]+)/contributor/launchpad.html$ /sahara/$1/contributor/project.html -# renamed after some documentation reshuffling -redirectmatch 301 ^/sahara/(?!ocata|pike|queens)([^/]+)/user/vanilla-imagebuilder.html$ /sahara/$1/user/vanilla-plugin.html -redirectmatch 301 ^/sahara/(?!ocata|pike|queens)([^/]+)/user/cdh-imagebuilder.html$ /sahara/$1/user/cdh-plugin.html -redirectmatch 301 ^/sahara/(?!ocata|pike|queens)([^/]+)/user/guest-requirements.html$ /sahara/$1/user/building-guest-images.html -redirectmatch 301 ^/sahara/([^/]+)/user/([^-]+)-plugin.html$ /sahara-plugin-$2/$1/ -redirectmatch 301 ^/sahara/([^/]+)/contributor/how-to-participate.html$ /sahara/$1/contributor/contributing.html -redirectmatch 301 ^/sahara/([^/]+)/contributor/project.html$ /sahara/$1/contributor/contributing.html diff --git a/doc/source/_templates/sidebarlinks.html b/doc/source/_templates/sidebarlinks.html deleted file mode 100644 index 09ad156cb3..0000000000 --- a/doc/source/_templates/sidebarlinks.html +++ /dev/null @@ -1,11 +0,0 @@ -

Useful Links

- - -{% if READTHEDOCS %} - -{% endif %} diff --git a/doc/source/_theme_rtd/layout.html b/doc/source/_theme_rtd/layout.html deleted file mode 100644 index cd7ade1d70..0000000000 --- a/doc/source/_theme_rtd/layout.html +++ /dev/null @@ -1,4 +0,0 @@ -{% extends "basic/layout.html" %} -{% set css_files = css_files + ['_static/tweaks.css'] %} - -{% block relbar1 %}{% endblock relbar1 %} \ No newline at end of file diff --git a/doc/source/_theme_rtd/theme.conf b/doc/source/_theme_rtd/theme.conf deleted file mode 100644 index 8c44b0ce46..0000000000 --- a/doc/source/_theme_rtd/theme.conf +++ /dev/null @@ -1,4 +0,0 @@ -[theme] -inherit = nature -stylesheet = nature.css -pygments_style = tango \ No newline at end of file diff --git a/doc/source/admin/advanced-configuration-guide.rst b/doc/source/admin/advanced-configuration-guide.rst deleted file mode 100644 index 94445e0424..0000000000 --- a/doc/source/admin/advanced-configuration-guide.rst +++ /dev/null @@ -1,653 +0,0 @@ -Sahara Advanced Configuration Guide -=================================== - -This guide addresses specific aspects of Sahara configuration that pertain to -advanced usage. It is divided into sections about various features that can be -utilized, and their related configurations. - -.. _custom_network_topologies: - -Custom network topologies -------------------------- - -Sahara accesses instances at several stages of cluster spawning through -SSH and HTTP. Floating IPs and network namespaces will be automatically -used for access when present. When floating IPs are not assigned to -instances and namespaces are not being used, sahara will need an -alternative method to reach them. - -The ``proxy_command`` parameter of the configuration file can be used to -give sahara a command to access instances. This command is run on the -sahara host and must open a netcat socket to the instance destination -port. The ``{host}`` and ``{port}`` keywords should be used to describe the -destination, they will be substituted at runtime. Other keywords that -can be used are: ``{tenant_id}``, ``{network_id}`` and ``{router_id}``. - -Additionally, if ``proxy_command_use_internal_ip`` is set to ``True``, -then the internal IP will be substituted for ``{host}`` in the command. -Otherwise (if ``False``, by default) the management IP will be used: this -corresponds to floating IP if present in the relevant node group, else the -internal IP. The option is ignored if ``proxy_command`` is not also set. - -For example, the following parameter in the sahara configuration file -would be used if instances are accessed through a relay machine: - -.. code-block:: - - [DEFAULT] - proxy_command='ssh relay-machine-{tenant_id} nc {host} {port}' - -Whereas the following shows an example of accessing instances though -a custom network namespace: - -.. code-block:: - - [DEFAULT] - proxy_command='ip netns exec ns_for_{network_id} nc {host} {port}' - -.. _dns_hostname_resolution: - -DNS Hostname Resolution ------------------------ - -Sahara can resolve hostnames of cluster instances by using DNS. For this Sahara -uses Designate. With this feature, for each instance of the cluster Sahara will -create two ``A`` records (for internal and external ips) under one hostname -and one ``PTR`` record. Also all links in the Sahara dashboard will be -displayed as hostnames instead of just ip addresses. - -You should configure DNS server with Designate. Designate service should be -properly installed and registered in Keystone catalog. The detailed -instructions about Designate configuration can be found here: -:designate-doc:`Designate manual installation ` -and here: :neutron-doc:`Configuring OpenStack Networking with Designate -`. -Also if you use devstack you can just enable the -:designate-doc:`Designate devstack plugin `. - -When Designate is configured you should create domain(s) for hostname -resolution. This can be done by using the Designate dashboard or by CLI. Also -you have to create ``in-addr.arpa.`` domain for reverse hostname resolution -because some plugins (e.g. ``HDP``) determine hostname by ip. - -Sahara also should be properly configured. In ``sahara.conf`` you must specify -two config properties: - -.. code-block:: - - [DEFAULT] - # Use Designate for internal and external hostnames resolution: - use_designate=true - # IP addresses of Designate nameservers: - nameservers=1.1.1.1,2.2.2.2 - -An OpenStack operator should properly configure the network. It must enable -DHCP and specify DNS server ip addresses (e.g. 1.1.1.1 and 2.2.2.2) in -``DNS Name Servers`` field in the ``Subnet Details``. If the subnet already -exists and changing it or creating new one is impossible then Sahara will -manually change ``/etc/resolv.conf`` file on every instance of the cluster (if -``nameservers`` list has been specified in ``sahara.conf``). In this case, -though, Sahara cannot guarantee that these changes will not be overwritten by -DHCP or other services of the existing network. Sahara has a health check for -track this situation (and if it occurs the health status will be red). - -In order to resolve hostnames from your local machine you should properly -change your ``/etc/resolv.conf`` file by adding appropriate ip addresses of -DNS servers (e.g. 1.1.1.1 and 2.2.2.2). Also the VMs with DNS servers should -be available from your local machine. - -.. _data_locality_configuration: - -Data-locality configuration ---------------------------- - -Hadoop provides the data-locality feature to enable task tracker and -data nodes the capability of spawning on the same rack, Compute node, -or virtual machine. Sahara exposes this functionality to the user -through a few configuration parameters and user defined topology files. - -To enable data-locality, set the ``enable_data_locality`` parameter to -``true`` in the sahara configuration file - -.. code-block:: - - [DEFAULT] - enable_data_locality=true - -With data locality enabled, you must now specify the topology files -for the Compute and Object Storage services. These files are -specified in the sahara configuration file as follows: - -.. code-block:: - - [DEFAULT] - compute_topology_file=/etc/sahara/compute.topology - swift_topology_file=/etc/sahara/swift.topology - -The ``compute_topology_file`` should contain mappings between Compute -nodes and racks in the following format: - -.. code-block:: - - compute1 /rack1 - compute2 /rack2 - compute3 /rack2 - -Note that the Compute node names must be exactly the same as configured in -OpenStack (``host`` column in admin list for instances). - -The ``swift_topology_file`` should contain mappings between Object Storage -nodes and racks in the following format: - -.. code-block:: - - node1 /rack1 - node2 /rack2 - node3 /rack2 - -Note that the Object Storage node names must be exactly the same as -configured in the object ring. Also, you should ensure that instances -with the task tracker process have direct access to the Object Storage -nodes. - -Hadoop versions after 1.2.0 support four-layer topology (for more detail -please see `HADOOP-8468 JIRA issue`_). To enable this feature set the -``enable_hypervisor_awareness`` parameter to ``true`` in the configuration -file. In this case sahara will add the Compute node ID as a second level of -topology for virtual machines. - -.. _HADOOP-8468 JIRA issue: https://issues.apache.org/jira/browse/HADOOP-8468 - -.. _distributed-mode-configuration: - -Distributed mode configuration ------------------------------- - -Sahara can be configured to run in a distributed mode that creates a -separation between the API and engine processes. This allows the API -process to remain relatively free to handle requests while offloading -intensive tasks to the engine processes. - -The ``sahara-api`` application works as a front-end and serves user -requests. It offloads 'heavy' tasks to the ``sahara-engine`` process -via RPC mechanisms. While the ``sahara-engine`` process could be loaded -with tasks, ``sahara-api`` stays free and hence may quickly respond to -user queries. - -If sahara runs on several hosts, the API requests could be -balanced between several ``sahara-api`` hosts using a load balancer. -It is not required to balance load between different ``sahara-engine`` -hosts as this will be automatically done via the message broker. - -If a single host becomes unavailable, other hosts will continue -serving user requests. Hence, a better scalability is achieved and some -fault tolerance as well. Note that distributed mode is not a true -high availability. While the failure of a single host does not -affect the work of the others, all of the operations running on -the failed host will stop. For example, if a cluster scaling is -interrupted, the cluster will be stuck in a half-scaled state. The -cluster might continue working, but it will be impossible to scale it -further or run jobs on it via EDP. - -To run sahara in distributed mode pick several hosts on which -you want to run sahara services and follow these steps: - -* On each host install and configure sahara using the - `installation guide <../install/installation-guide.html>`_ - except: - - * Do not run ``sahara-db-manage`` or launch sahara with ``sahara-all`` - * Ensure that each configuration file provides a database connection - string to a single database for all hosts. - -* Run ``sahara-db-manage`` as described in the installation guide, - but only on a single (arbitrarily picked) host. - -* The ``sahara-api`` and ``sahara-engine`` processes use oslo.messaging to - communicate with each other. You will need to configure it properly on - each host (see below). - -* Run ``sahara-api`` and ``sahara-engine`` on the desired hosts. You may - run both processes on the same or separate hosts as long as they are - configured to use the same message broker and database. - -To configure ``oslo.messaging``, first you need to choose a message -broker driver. The recommended driver is ``RabbitMQ``. For the ``RabbitMQ`` -drivers please see the :ref:`notification-configuration` documentation -for an explanation of common configuration options; the entire list of -configuration options is found in the -:oslo.messaging-doc:`oslo_messaging_rabbit documentation -`. - -These options will also be present in the generated sample configuration -file. For instructions on creating the configuration file please see the -:doc:`configuration-guide`. - -.. _distributed-periodic-tasks: - -Distributed periodic tasks configuration ----------------------------------------- - -If sahara is configured to run in distributed mode (see -:ref:`distributed-mode-configuration`), periodic tasks can also be launched in -distributed mode. In this case tasks will be split across all ``sahara-engine`` -processes. This will reduce overall load. - -Distributed periodic tasks are based on Hash Ring implementation and the Tooz -library that provides group membership support for a set of backends. In order -to use periodic tasks distribution, the following steps are required: - -* One of the :tooz-doc:`supported backends ` - should be configured and started. -* Backend URL should be set in the sahara configuration file with the - ``periodic_coordinator_backend_url`` parameter. For example, if the - ZooKeeper backend is being used: - - .. code-block:: - - [DEFAULT] - periodic_coordinator_backend_url=kazoo://IP:PORT - -* Tooz extras should be installed. When using Zookeeper as coordination - backend, ``kazoo`` library should be installed. It can be done with pip: - - .. code-block:: - - pip install tooz[zookeeper] - -* Periodic tasks can be performed in parallel. Number of threads to run - periodic tasks on a single engine can be set with - ``periodic_workers_number`` parameter (only 1 thread will be launched by - default). Example: - - .. code-block:: - - [DEFAULT] - periodic_workers_number=2 - -* ``coordinator_heartbeat_interval`` can be set to change the interval between - heartbeat execution (1 second by default). Heartbeats are needed to make - sure that connection to the coordination backend is active. Example: - - .. code-block:: - - [DEFAULT] - coordinator_heartbeat_interval=2 - -* ``hash_ring_replicas_count`` can be set to change the number of replicas for - each engine on a Hash Ring. Each replica is a point on a Hash Ring that - belongs to a particular engine. A larger number of replicas leads to better - task distribution across the set of engines. (40 by default). Example: - - .. code-block:: - - [DEFAULT] - hash_ring_replicas_count=100 - -.. _external_key_manager_usage: - -External key manager usage --------------------------- - -Sahara generates and stores several passwords during the course of operation. -To harden sahara's usage of passwords it can be instructed to use an -external key manager for storage and retrieval of these secrets. To enable -this feature there must first be an OpenStack Key Manager service deployed -within the stack. - -With a Key Manager service deployed on the stack, sahara must be configured -to enable the external storage of secrets. Sahara uses the -:castellan-doc:`castellan <>` library -to interface with the OpenStack Key Manager service. This library provides -configurable access to a key manager. To configure sahara to use barbican as -the key manager, edit the sahara configuration file as follows: - -.. code-block:: - - [DEFAULT] - use_barbican_key_manager=true - -Enabling the ``use_barbican_key_manager`` option will configure castellan -to use barbican as its key management implementation. By default it will -attempt to find barbican in the Identity service's service catalog. - -For added control of the barbican server location, optional configuration -values may be added to specify the URL for the barbican API server. - -.. code-block:: - - [castellan] - barbican_api_endpoint=http://{barbican controller IP:PORT}/ - barbican_api_version=v1 - -The specific values for the barbican endpoint will be dictated by the -IP address of the controller for your installation. - -With all of these values configured and the Key Manager service deployed, -sahara will begin storing its secrets in the external manager. - -Indirect instance access through proxy nodes --------------------------------------------- - -.. warning:: - The indirect VMs access feature is in alpha state. We do not - recommend using it in a production environment. - -Sahara needs to access instances through SSH during cluster setup. This -access can be obtained a number of different ways (see -:ref:`floating_ip_management`,:ref:`custom_network_topologies`).Sometimes -it is impossible to provide access to all nodes (because of limited -numbers of floating IPs or security policies). In these cases access can -be gained using other nodes of the cluster as proxy gateways. To enable -this set ``is_proxy_gateway=true`` for the node group you want to use as -proxy. Sahara will communicate with all other cluster instances through -the instances of this node group. - -Note, if ``use_floating_ips=true`` and the cluster contains a node group with -``is_proxy_gateway=true``, the requirement to have ``floating_ip_pool`` -specified is applied only to the proxy node group. Other instances will be -accessed through proxy instances using the standard private network. - -Note, the Cloudera Hadoop plugin doesn't support access to Cloudera manager -through a proxy node. This means that for CDH clusters only nodes with -the Cloudera manager can be designated as proxy gateway nodes. - -Multi region deployment ------------------------ - -Sahara supports multi region deployment. To enable this option each -instance of sahara should have the ``os_region_name=`` -parameter set in the configuration file. The following example demonstrates -configuring sahara to use the ``RegionOne`` region: - -.. code-block:: - - [DEFAULT] - os_region_name=RegionOne - -.. _non-root-users: - -Non-root users --------------- - -In cases where a proxy command is being used to access cluster instances -(for example, when using namespaces or when specifying a custom proxy -command), rootwrap functionality is provided to allow users other than -``root`` access to the needed operating system facilities. To use rootwrap -the following configuration parameter is required to be set: - -.. code-block:: - - [DEFAULT] - use_rootwrap=true - -Assuming you elect to leverage the default rootwrap command -(``sahara-rootwrap``), you will need to perform the following additional setup -steps: - -* Copy the provided sudoers configuration file from the local project file - ``etc/sudoers.d/sahara-rootwrap`` to the system specific location, usually - ``/etc/sudoers.d``. This file is setup to allow a user named ``sahara`` - access to the rootwrap script. It contains the following: - -.. code-block:: - - sahara ALL = (root) NOPASSWD: /usr/bin/sahara-rootwrap /etc/sahara/rootwrap.conf * - -When using devstack to deploy sahara, please pay attention that you need to -change user in script from ``sahara`` to ``stack``. - -* Copy the provided rootwrap configuration file from the local project file - ``etc/sahara/rootwrap.conf`` to the system specific location, usually - ``/etc/sahara``. This file contains the default configuration for rootwrap. - -* Copy the provided rootwrap filters file from the local project file - ``etc/sahara/rootwrap.d/sahara.filters`` to the location specified in the - rootwrap configuration file, usually ``/etc/sahara/rootwrap.d``. This file - contains the filters that will allow the ``sahara`` user to access the - ``ip netns exec``, ``nc``, and ``kill`` commands through the rootwrap - (depending on ``proxy_command`` you may need to set additional filters). - It should look similar to the followings: - -.. code-block:: - - [Filters] - ip: IpNetnsExecFilter, ip, root - nc: CommandFilter, nc, root - kill: CommandFilter, kill, root - -If you wish to use a rootwrap command other than ``sahara-rootwrap`` you can -set the following parameter in your sahara configuration file: - -.. code-block:: - - [DEFAULT] - rootwrap_command='sudo sahara-rootwrap /etc/sahara/rootwrap.conf' - -For more information on rootwrap please refer to the -`official Rootwrap documentation `_ - -Object Storage access using proxy users ---------------------------------------- - -To improve security for clusters accessing files in Object Storage, -sahara can be configured to use proxy users and delegated trusts for -access. This behavior has been implemented to reduce the need for -storing and distributing user credentials. - -The use of proxy users involves creating an Identity domain that will be -designated as the home for these users. Proxy users will be -created on demand by sahara and will only exist during a job execution -which requires Object Storage access. The domain created for the -proxy users must be backed by a driver that allows sahara's admin user to -create new user accounts. This new domain should contain no roles, to limit -the potential access of a proxy user. - -Once the domain has been created, sahara must be configured to use it by -adding the domain name and any potential delegated roles that must be used -for Object Storage access to the sahara configuration file. With the -domain enabled in sahara, users will no longer be required to enter -credentials for their data sources and job binaries referenced in -Object Storage. - -Detailed instructions -^^^^^^^^^^^^^^^^^^^^^ - -First a domain must be created in the Identity service to hold proxy -users created by sahara. This domain must have an identity backend driver -that allows for sahara to create new users. The default SQL engine is -sufficient but if your keystone identity is backed by LDAP or similar -then domain specific configurations should be used to ensure sahara's -access. Please see the :keystone-doc:`Keystone documentation -` for more information. - -With the domain created, sahara's configuration file should be updated to -include the new domain name and any potential roles that will be needed. For -this example let's assume that the name of the proxy domain is -``sahara_proxy`` and the roles needed by proxy users will be ``member`` and -``SwiftUser``. - -.. code-block:: - - [DEFAULT] - use_domain_for_proxy_users=true - proxy_user_domain_name=sahara_proxy - proxy_user_role_names=member,SwiftUser - -A note on the use of roles. In the context of the proxy user, any roles -specified here are roles intended to be delegated to the proxy user from the -user with access to Object Storage. More specifically, any roles that -are required for Object Storage access by the project owning the object -store must be delegated to the proxy user for authentication to be -successful. - -Finally, the stack administrator must ensure that images registered with -sahara have the latest version of the Hadoop swift filesystem plugin -installed. The sources for this plugin can be found in the -`sahara extra repository`_. For more information on images or swift -integration see the sahara documentation sections -:ref:`building-guest-images-label` and :ref:`swift-integration-label`. - -.. _Sahara extra repository: https://opendev.org/openstack/sahara-extra - -.. _volume_instance_locality_configuration: - -Volume instance locality configuration --------------------------------------- - -The Block Storage service provides the ability to define volume instance -locality to ensure that instance volumes are created on the same host -as the hypervisor. The ``InstanceLocalityFilter`` provides the mechanism -for the selection of a storage provider located on the same physical -host as an instance. - -To enable this functionality for instances of a specific node group, the -``volume_local_to_instance`` field in the node group template should be -set to ``true`` and some extra configurations are needed: - -* The cinder-volume service should be launched on every physical host and at - least one physical host should run both cinder-scheduler and - cinder-volume services. -* ``InstanceLocalityFilter`` should be added to the list of default filters - (``scheduler_default_filters`` in cinder) for the Block Storage - configuration. -* The Extended Server Attributes extension needs to be active in the Compute - service (this is true by default in nova), so that the - ``OS-EXT-SRV-ATTR:host`` property is returned when requesting instance - info. -* The user making the call needs to have sufficient rights for the property to - be returned by the Compute service. - This can be done by: - - * by changing nova's ``policy.yaml`` to allow the user access to the - ``extended_server_attributes`` option. - * by designating an account with privileged rights in the cinder - configuration: - - .. code-block:: - - os_privileged_user_name = - os_privileged_user_password = - os_privileged_user_tenant = - -It should be noted that in a situation when the host has no space for volume -creation, the created volume will have an ``Error`` state and can not be used. - -Autoconfiguration for templates -------------------------------- - -:doc:`configs-recommendations` - - -NTP service configuration -------------------------- - -By default sahara will enable the NTP service on all cluster instances if the -NTP package is included in the image (the sahara disk image builder will -include NTP in all images it generates). The default NTP server will be -``pool.ntp.org``; this can be overridden using the ``default_ntp_server`` -setting in the ``DEFAULT`` section of the sahara configuration file. - -If you are creating cluster templates using the sahara UI and would like to -specify a different NTP server for a particular cluster template, use the ``URL -of NTP server`` setting in the ``General Parameters`` section when you create -the template. If you would like to disable NTP for a particular cluster -template, deselect the ``Enable NTP service`` checkbox in the ``General -Parameters`` section when you create the template. - -If you are creating clusters using the sahara CLI, you can specify another NTP -server or disable NTP service using the examples below. - -If you want to enable configuring the NTP service, you should specify the -following configs for the cluster: - -.. code-block:: - - { - "cluster_configs": { - "general": { - "URL of NTP server": "your_server.net" - } - } - } - -If you want to disable configuring NTP service, you should specify following -configs for the cluster: - -.. code-block:: - - { - "cluster_configs": { - "general": { - "Enable NTP service": false - } - } - } - -CORS (Cross Origin Resource Sharing) Configuration --------------------------------------------------- - -Sahara provides direct API access to user-agents (browsers) via the HTTP -CORS protocol. Detailed documentation, as well as troubleshooting examples, -may be found in the :oslo.middleware-doc:`documentation of the oslo.db -cross-project features `. - -To get started quickly, use the example configuration block below, replacing -the :code:`allowed origin` field with the host(s) from which your API expects -access. - -.. code-block:: - - [cors] - allowed_origin=https://we.example.com:443 - max_age=3600 - allow_credentials=true - - [cors.additional_domain_1] - allowed_origin=https://additional_domain_1.example.com:443 - - [cors.additional_domain_2] - allowed_origin=https://additional_domain_2.example.com:443 - -For more information on Cross Origin Resource Sharing, please review the `W3C -CORS specification`_. - -.. _W3C CORS specification: http://www.w3.org/TR/cors/ - -Cleanup time for incomplete clusters ------------------------------------- - -Sahara provides maximal time (in hours) for clusters allowed to be in states -other than "Active", "Deleting" or "Error". If a cluster is not in "Active", -"Deleting" or "Error" state and last update of it was longer than -``cleanup_time_for_incomplete_clusters`` hours ago then it will be deleted -automatically. You can enable this feature by adding appropriate config -property in the ``DEFAULT`` section (by default it set up to ``0`` value which -means that automatic clean up is disabled). For example, if you want cluster to -be deleted after 3 hours if it didn't leave "Starting" state then you should -specify: - -.. code-block:: - - [DEFAULT] - cleanup_time_for_incomplete_clusters = 3 - -Security Group Rules Configuration ----------------------------------- - -When auto_security_group is used, the amount of created security group rules -may be bigger than the default values configured in ``neutron.conf``. Then the -default limit should be raised up to some bigger value which is proportional to -the number of cluster node groups. You can change it in ``neutron.conf`` file: - -.. code-block:: - - [quotas] - quota_security_group = 1000 - quota_security_group_rule = 10000 - -Or you can execute openstack CLI command: - -.. code-block:: - - openstack quota set --secgroups 1000 --secgroup-rules 10000 $PROJECT_ID diff --git a/doc/source/admin/configs-recommendations.rst b/doc/source/admin/configs-recommendations.rst deleted file mode 100644 index fb647610a1..0000000000 --- a/doc/source/admin/configs-recommendations.rst +++ /dev/null @@ -1,44 +0,0 @@ -:orphan: - -Autoconfiguring templates -========================= - -During the Liberty development cycle sahara implemented a tool that recommends -and applies configuration values for cluster templates and node group -templates. These recommendations are based on the number of specific instances -and on flavors of the cluster node groups. Currently the following plugins -support this feature: - - * CDH; - * Ambari; - * Spark; - * the Vanilla Apache Hadoop plugin. - -By default this feature is enabled for all cluster templates and node group -templates. If you want to disable this feature for a particular cluster or -node group template you should set the ``use_autoconfig`` field to ``false``. - -.. NOTE - Also, if you manually set configs from the list below, the recommended - configs will not be applied. - -The following describes the settings for which sahara can recommend -autoconfiguration: - -The Cloudera, Spark and Vanilla Apache Hadoop plugin support configuring -``dfs.replication`` (``dfs_replication`` for Cloudera plugin) which is -calculated as a minimum from the amount of ``datanode`` (``HDFS_DATANODE`` for -Cloudera plugin) instances in the cluster and the default value for -``dfs.replication``. - -The Vanilla Apache Hadoop plugin and Cloudera plugin support autoconfiguration -of basic YARN and MapReduce configs. These autoconfigurations are based on the -following documentation: -http://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.0.9.1/bk_installing_manually_book/content/rpm-chap1-11.html - -The Ambari plugin has its own strategies on configuration recommendations. You -can choose one of ``ALWAYS_APPLY``, ``NEVER_APPLY``, and -``ONLY_STACK_DEFAULTS_APPLY``. By default the Ambari plugin follows the -``NEVER_APPLY`` strategy. You can get more information about strategies in -Ambari's official documentation: -https://cwiki.apache.org/confluence/display/AMBARI/Blueprints#Blueprints-ClusterCreationTemplateStructure diff --git a/doc/source/admin/configuration-guide.rst b/doc/source/admin/configuration-guide.rst deleted file mode 100644 index a0400f58c3..0000000000 --- a/doc/source/admin/configuration-guide.rst +++ /dev/null @@ -1,211 +0,0 @@ -Sahara Configuration Guide -========================== - -This guide covers the steps for a basic configuration of sahara. -It will help you to configure the service in the most simple -manner. - -Basic configuration -------------------- - -A full configuration file showing all possible configuration options and their -defaults can be generated with the following command: - -.. sourcecode:: cfg - - $ tox -e genconfig - -Running this command will create a file named ``sahara.conf.sample`` -in the ``etc/sahara`` directory of the project. - -After creating a configuration file by either generating one or starting with -an empty file, edit the ``connection`` parameter in the -``[database]`` section. The URL provided here should point to an empty -database. For example, the connection string for a MySQL database will be: - -.. sourcecode:: cfg - - connection=mysql+pymsql://username:password@host:port/database - -Next you will configure the Identity service parameters in the -``[keystone_authtoken]`` section. The ``www_authenticate_uri`` parameter -should point to the public Identity API endpoint. The ``auth_url`` -should point to the internal Identity API endpoint. For example: - -.. sourcecode:: cfg - - www_authenticate_uri=http://127.0.0.1:5000/v3/ - auth_url=http://127.0.0.1:5000/v3/ - -Specify the ``username``, ``user_domain_name``, ``password``, ``project_name``. -and ``project_domain_name``. These parameters must specify an Identity user who -has the ``admin`` role in the given project. These credentials allow sahara to -authenticate and authorize its users. - -Next you will configure the default Networking service. If using -neutron for networking the following parameter should be set -in the ``[DEFAULT]`` section: - -With these parameters set, sahara is ready to run. - -By default the sahara's log level is set to INFO. If you wish to increase -the logging levels for troubleshooting, set ``debug`` to ``true`` in the -``[DEFAULT]`` section of the configuration file. - -Networking configuration ------------------------- - -By default sahara is configured to use the neutron. Additionally, if the -cluster supports network namespaces the ``use_namespaces`` property can -be used to enable their usage. - -.. sourcecode:: cfg - - [DEFAULT] - use_namespaces=True - -.. note:: - If a user other than ``root`` will be running the Sahara server - instance and namespaces are used, some additional configuration is - required, please see :ref:`non-root-users` for more information. - -.. _floating_ip_management: - -Floating IP management -++++++++++++++++++++++ - -During cluster setup sahara must access instances through a secure -shell (SSH). To establish this connection it may use either the fixed -or floating IP address of an instance. By default sahara is configured -to use floating IP addresses for access. This is controlled by the -``use_floating_ips`` configuration parameter. With this setup the user -has two options for ensuring that the instances in the node groups -templates that requires floating IPs gain a floating IP address: - -* The user may specify a floating IP address pool for each node - group that requires floating IPs directly. - -From Newton changes were made to allow the coexistence of clusters using -floating IPs and clusters using fixed IPs. If ``use_floating_ips`` is -True it means that the floating IPs can be used by Sahara to spawn clusters. -But, differently from previous versions, this does not mean that all -instances in the cluster must have floating IPs and that all clusters -must use floating IPs. It is possible in a single Sahara deploy to have -clusters setup using fixed IPs, clusters using floating IPs and cluster that -use both. - -If not using floating IP addresses (``use_floating_ips=False``) sahara -will use fixed IP addresses for instance management. When using neutron -for the Networking service the user will be able to choose the -fixed IP network for all instances in a cluster. - -.. _notification-configuration: - -Notifications configuration ---------------------------- - -Sahara can be configured to send notifications to the OpenStack -Telemetry module. To enable this functionality the following parameter -``enable`` should be set in the ``[oslo_messaging_notifications]`` section -of the configuration file: - -.. sourcecode:: cfg - - [oslo_messaging_notifications] - enable = true - -And the following parameter ``driver`` should be set in the -``[oslo_messaging_notifications]`` section of the configuration file: - -.. sourcecode:: cfg - - [oslo_messaging_notifications] - driver = messaging - -By default sahara is configured to use RabbitMQ as its message broker. - -If you are using RabbitMQ as the message broker, then you should set the -following parameter in the ``[DEFAULT]`` section: - -.. sourcecode:: cfg - - rpc_backend = rabbit - -You may also need to specify the connection parameters for your -RabbitMQ installation. The following example shows the default -values in the ``[oslo_messaging_rabbit]`` section which may need -adjustment: - -.. sourcecode:: cfg - - rabbit_host=localhost - rabbit_port=5672 - rabbit_hosts=$rabbit_host:$rabbit_port - rabbit_userid=guest - rabbit_password=guest - rabbit_virtual_host=/ -.. - -.. _orchestration-configuration: - -Orchestration configuration ---------------------------- - -By default sahara is configured to use the heat engine for instance -creation. The heat engine uses the OpenStack Orchestration service to -provision instances. This engine makes calls directly to the services required -for instance provisioning. - -.. _policy-configuration-label: - -Policy configuration --------------------- - -.. warning:: - - JSON formatted policy file is deprecated since Sahara 15.0.0 (Xena). - This `oslopolicy-convert-json-to-yaml`__ tool will migrate your existing - JSON-formatted policy file to YAML in a backward-compatible way. - -.. __: https://docs.openstack.org/oslo.policy/victoria/cli/oslopolicy-convert-json-to-yaml.html - -Sahara's public API calls may be restricted to certain sets of users by -using a policy configuration file. The location of the policy file(s) -is controlled by the ``policy_file`` and ``policy_dirs`` parameters -in the ``[oslo_policy]`` section. By default sahara will search for -a ``policy.yaml`` file in the same directory as the ``sahara.conf`` -configuration file. - -Examples -++++++++ - -Example 1. Allow all method to all users (default policy). - -.. sourcecode:: json - - { - "default": "" - } - - -Example 2. Disallow image registry manipulations to non-admin users. - -.. sourcecode:: json - - { - "default": "", - - "data-processing:images:register": "role:admin", - "data-processing:images:unregister": "role:admin", - "data-processing:images:add_tags": "role:admin", - "data-processing:images:remove_tags": "role:admin" - } - -API configuration ------------------ - -Sahara uses the ``api-paste.ini`` file to configure the data processing API -service. For middleware injection sahara uses pastedeploy library. The location -of the api-paste file is controlled by the ``api_paste_config`` parameter in -the ``[default]`` section. By default sahara will search for a -``api-paste.ini`` file in the same directory as the configuration file. diff --git a/doc/source/admin/index.rst b/doc/source/admin/index.rst deleted file mode 100644 index 150ffe8972..0000000000 --- a/doc/source/admin/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -====================== -Operator Documentation -====================== - -.. toctree:: - :maxdepth: 2 - - configuration-guide - advanced-configuration-guide - upgrade-guide diff --git a/doc/source/admin/upgrade-guide.rst b/doc/source/admin/upgrade-guide.rst deleted file mode 100644 index 65fcd0f207..0000000000 --- a/doc/source/admin/upgrade-guide.rst +++ /dev/null @@ -1,155 +0,0 @@ -Sahara Upgrade Guide -==================== - -This page contains details about upgrading sahara between releases such as -configuration file updates, database migrations, and architectural changes. - -Icehouse -> Juno ----------------- - -Main binary renamed to sahara-all -+++++++++++++++++++++++++++++++++ - -The All-In-One sahara binary has been renamed from ``sahara-api`` -to ``sahara-all``. The new name should be used in all cases where the -All-In-One sahara is desired. - -Authentication middleware changes -+++++++++++++++++++++++++++++++++ - -The custom auth_token middleware has been deprecated in favor of the keystone -middleware. This change requires an update to the sahara configuration file. To -update your configuration file you should replace the following parameters from -the ``[DEFAULT]`` section with the new parameters in the -``[keystone_authtoken]`` section: - -+-----------------------+--------------------+ -| Old parameter name | New parameter name | -+=======================+====================+ -| os_admin_username | admin_user | -+-----------------------+--------------------+ -| os_admin_password | admin_password | -+-----------------------+--------------------+ -| os_admin_tenant_name | admin_tenant_name | -+-----------------------+--------------------+ - -Additionally, the parameters ``os_auth_protocol``, ``os_auth_host``, -and ``os_auth_port`` have been combined to create the ``auth_uri`` -and ``identity_uri`` parameters. These new parameters should be -full URIs to the keystone public and admin endpoints, respectively. - -For more information about these configuration parameters please see -the :doc:`../admin/configuration-guide`. - -Database package changes -++++++++++++++++++++++++ - -The oslo based code from sahara.openstack.common.db has been replaced by -the usage of the oslo.db package. This change does not require any -update to sahara's configuration file. - -Additionally, the usage of SQLite databases has been deprecated. Please use -MySQL or PostgreSQL databases for sahara. SQLite has been deprecated because it -does not, and is not going to, support the ``ALTER COLUMN`` and ``DROP COLUMN`` -commands required for migrations between versions. For more information please -see http://www.sqlite.org/omitted.html - -Sahara integration into OpenStack Dashboard -+++++++++++++++++++++++++++++++++++++++++++ - -The sahara dashboard package has been deprecated in the Juno release. The -functionality of the dashboard has been fully incorporated into the -OpenStack Dashboard. The sahara interface is available under the -"Project" -> "Data Processing" tab. - -The Data processing service endpoints must be registered in the Identity -service catalog for the Dashboard to properly recognize and display -those user interface components. For more details on this process please see -:ref:`registering Sahara in installation guide `. - -The -`sahara-dashboard `_ -project is now used solely to host sahara user interface integration tests. - -Virtual machine user name changes -+++++++++++++++++++++++++++++++++ - -The HEAT infrastructure engine has been updated to use the same rules for -instance user names as the direct engine. In previous releases the user -name for instances created by sahara using HEAT was always 'ec2-user'. As -of Juno, the user name is taken from the image registry as described in -the :doc:`../user/registering-image` document. - -This change breaks backward compatibility for clusters created using the HEAT -infrastructure engine prior to the Juno release. Clusters will continue to -operate, but we do not recommended using the scaling operations with them. - -Anti affinity implementation changed -++++++++++++++++++++++++++++++++++++ - -Starting with the Juno release the anti affinity feature is implemented -using server groups. From the user perspective there will be no -noticeable changes with this feature. Internally this change has -introduced the following behavior: - -1) Server group objects will be created for any clusters with anti affinity - enabled. -2) Affected instances on the same host will not be allowed even if they - do not have common processes. Prior to Juno, instances with differing - processes were allowed on the same host. The new implementation - guarantees that all affected instances will be on different hosts - regardless of their processes. - -The new anti affinity implementation will only be applied for new clusters. -Clusters created with previous versions will continue to operate under -the older implementation, this applies to scaling operations on these -clusters as well. - -Juno -> Kilo ------------- - -Sahara requires policy configuration -++++++++++++++++++++++++++++++++++++ - -Sahara now requires a policy configuration file. The ``policy.json`` file -should be placed in the same directory as the sahara configuration file or -specified using the ``policy_file`` parameter. For more details about the -policy file please see the -:ref:`policy section in the configuration guide `. - -Kilo -> Liberty ---------------- - -Direct engine deprecation -+++++++++++++++++++++++++ - -In the Liberty release the direct infrastructure engine has been deprecated and -the heat infrastructure engine is now default. This means, that it is -preferable to use heat engine instead now. In the Liberty release you can -continue to operate clusters with the direct engine (create, delete, scale). -Using heat engine only the delete operation is available on clusters that were -created by the direct engine. After the Liberty release the direct engine will -be removed, this means that you will only be able to delete clusters created -with the direct engine. - -Policy namespace changed (policy.json) -++++++++++++++++++++++++++++++++++++++ - -The "data-processing:" namespace has been added to the beginning of the all -Sahara's policy based actions, so, you need to update the policy.json file by -prepending all actions with "data-processing:". - -Liberty -> Mitaka ------------------ - -Direct engine is removed. - -Mitaka -> Newton ----------------- - -Sahara CLI command is deprecated, please use OpenStack Client. - -.. note:: - - Since Mitaka release sahara actively uses release notes so you can see all - required upgrade actions here: https://docs.openstack.org/releasenotes/sahara/ diff --git a/doc/source/cli/index.rst b/doc/source/cli/index.rst deleted file mode 100644 index 6c8fcf7072..0000000000 --- a/doc/source/cli/index.rst +++ /dev/null @@ -1,11 +0,0 @@ -======================== -Sahara CLI Documentation -======================== - -In this section you will find information on Sahara’s command line -interface. - -.. toctree:: - :maxdepth: 1 - - sahara-status diff --git a/doc/source/cli/sahara-status.rst b/doc/source/cli/sahara-status.rst deleted file mode 100644 index a1ba218665..0000000000 --- a/doc/source/cli/sahara-status.rst +++ /dev/null @@ -1,83 +0,0 @@ -============= -sahara-status -============= - ----------------------------------------- -CLI interface for Sahara status commands ----------------------------------------- - -Synopsis -======== - -:: - - sahara-status [] - -Description -=========== - -:program:`sahara-status` is a tool that provides routines for checking the -status of a Sahara deployment. - -Options -======= - -The standard pattern for executing a :program:`sahara-status` command is:: - - sahara-status [] - -Run without arguments to see a list of available command categories:: - - sahara-status - -Categories are: - -* ``upgrade`` - -Detailed descriptions are below: - -You can also run with a category argument such as ``upgrade`` to see a list of -all commands in that category:: - - sahara-status upgrade - -These sections describe the available categories and arguments for -:program:`sahara-status`. - -Upgrade -~~~~~~~ - -.. _sahara-status-checks: - -``sahara-status upgrade check`` - Performs a release-specific readiness check before restarting services with - new code. For example, missing or changed configuration options, - incompatible object states, or other conditions that could lead to - failures while upgrading. - - **Return Codes** - - .. list-table:: - :widths: 20 80 - :header-rows: 1 - - * - Return code - - Description - * - 0 - - All upgrade readiness checks passed successfully and there is nothing - to do. - * - 1 - - At least one check encountered an issue and requires further - investigation. This is considered a warning but the upgrade may be OK. - * - 2 - - There was an upgrade status check failure that needs to be - investigated. This should be considered something that stops an - upgrade. - * - 255 - - An unexpected error occurred. - - **History of Checks** - - **10.0.0 (Stein)** - - * Sample check to be filled in with checks as they are added in Stein. diff --git a/doc/source/conf.py b/doc/source/conf.py deleted file mode 100644 index 86ded5ac84..0000000000 --- a/doc/source/conf.py +++ /dev/null @@ -1,282 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2013 Mirantis Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import sys - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) -sys.path.insert(0, os.path.abspath('../../sahara')) -sys.path.append(os.path.abspath('..')) -sys.path.append(os.path.abspath('../bin')) - -# -- General configuration ----------------------------------------------------- - -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage', - 'sphinx.ext.viewcode', 'sphinxcontrib.httpdomain', 'oslo_config.sphinxconfiggen', - 'oslo_config.sphinxext', 'openstackdocstheme'] - -# openstackdocstheme options -repository_name = 'openstack/sahara' -use_storyboard = True - -config_generator_config_file = 'config-generator.conf' -config_sample_basename = 'sahara' - -openstack_projects = [ - 'barbican', - 'castellan', - 'designate', - 'devstack', - 'ironic', - 'keystone', - 'keystoneauth', - 'kolla-ansible', - 'neutron', - 'nova', - 'oslo.messaging', - 'oslo.middleware', - 'sahara-plugin-ambari', - 'sahara-plugin-cdh', - 'sahara-plugin-mapr', - 'sahara-plugin-spark', - 'sahara-plugin-storm', - 'sahara-plugin-vanilla', - 'tooz' -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# Add any paths that contain "extra" files, such as .htaccess or -# robots.txt. -html_extra_path = ['_extra'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -copyright = '2014, OpenStack Foundation' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. - -if on_rtd: - html_theme_path = ['.'] - html_theme = '_theme_rtd' - -html_theme = 'openstackdocs' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = {"show_other_versions": "True",} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -html_title = 'Sahara' - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = ['_static'] - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -html_sidebars = { - 'index': ['sidebarlinks.html', 'localtoc.html', 'searchbox.html', 'sourcelink.html'], - '**': ['localtoc.html', 'relations.html', - 'searchbox.html', 'sourcelink.html'] -} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'SaharaDoc' - - -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - #'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', 'saharadoc.tex', 'Sahara', - 'OpenStack Foundation', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'sahara', 'Sahara', - ['OpenStack Foundation'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------------ - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'Sahara', 'Sahara', - 'OpenStack Foundation', 'Sahara', 'Sahara', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' diff --git a/doc/source/config-generator.conf b/doc/source/config-generator.conf deleted file mode 120000 index 1a611114f8..0000000000 --- a/doc/source/config-generator.conf +++ /dev/null @@ -1 +0,0 @@ -../../tools/config/config-generator.sahara.conf \ No newline at end of file diff --git a/doc/source/configuration/descriptionconfig.rst b/doc/source/configuration/descriptionconfig.rst deleted file mode 100644 index 065ab0321b..0000000000 --- a/doc/source/configuration/descriptionconfig.rst +++ /dev/null @@ -1,8 +0,0 @@ -Configuration options -===================== - -This section provides a list of the configuration options that can -be set in the sahara configuration file. - -.. show-options:: - :config-file: tools/config/config-generator.sahara.conf diff --git a/doc/source/configuration/index.rst b/doc/source/configuration/index.rst deleted file mode 100644 index fad26d5e4a..0000000000 --- a/doc/source/configuration/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -======================= -Configuration Reference -======================= - - -.. toctree:: - :maxdepth: 1 - - descriptionconfig - sampleconfig diff --git a/doc/source/configuration/sampleconfig.rst b/doc/source/configuration/sampleconfig.rst deleted file mode 100644 index e4fa2dfd15..0000000000 --- a/doc/source/configuration/sampleconfig.rst +++ /dev/null @@ -1,8 +0,0 @@ -Sample sahara.conf file -======================= - -This is an automatically generated sample of the sahara.conf file. - -.. literalinclude:: ../sample.config - :language: ini - :linenos: diff --git a/doc/source/contributor/adding-database-migrations.rst b/doc/source/contributor/adding-database-migrations.rst deleted file mode 100644 index 96f70f1d3a..0000000000 --- a/doc/source/contributor/adding-database-migrations.rst +++ /dev/null @@ -1,113 +0,0 @@ -Adding Database Migrations -========================== - -The migrations in ``sahara/db/migration/alembic_migrations/versions`` contain -the changes needed to migrate between Sahara database revisions. A migration -occurs by executing a script that details the changes needed to upgrade or -downgrade the database. The migration scripts are ordered so that multiple -scripts can run sequentially. The scripts are executed by Sahara's migration -wrapper which uses the Alembic library to manage the migration. Sahara supports -migration from Icehouse or later. - -Any code modifications that change the structure of the database require a -migration script so that previously existing databases will continue to -function when the new code is released. This page gives a brief overview of how -to add the migration. - -Generate a New Migration Script -+++++++++++++++++++++++++++++++ - -New migration scripts can be generated using the ``sahara-db-manage`` command. - -To generate a migration stub to be filled in by the developer:: - -$ sahara-db-manage --config-file /path/to/sahara.conf revision -m "description of revision" - -To autogenerate a migration script that reflects the current structure of the -database:: - -$ sahara-db-manage --config-file /path/to/sahara.conf revision -m "description of revision" --autogenerate - -Each of these commands will create a file of the form ``revision_description`` -where ``revision`` is a string generated by Alembic and ``description`` is -based on the text passed with the ``-m`` option. - -Follow the Sahara Naming Convention -+++++++++++++++++++++++++++++++++++ - -By convention Sahara uses 3-digit revision numbers, and this scheme differs -from the strings generated by Alembic. Consequently, it's necessary to rename -the generated script and modify the revision identifiers in the script. - -Open the new script and look for the variable ``down_revision``. The value -should be a 3-digit numeric string, and it identifies the current revision -number of the database. Set the ``revision`` value to the ``down_revision`` -value + 1. For example, the lines:: - - # revision identifiers, used by Alembic. - revision = '507eb70202af' - down_revision = '006' - -will become:: - - # revision identifiers, used by Alembic. - revision = '007' - down_revision = '006' - -Modify any comments in the file to match the changes and rename the file to -match the new revision number:: - -$ mv 507eb70202af_my_new_revision.py 007_my_new_revision.py - - -Add Alembic Operations to the Script -++++++++++++++++++++++++++++++++++++ - -The migration script contains method ``upgrade()``. Sahara has not supported -downgrades since the Kilo release. Fill in this method with the appropriate -Alembic operations to perform upgrades. In the above example, an upgrade will -move from revision '006' to revision '007'. - -Command Summary for sahara-db-manage -++++++++++++++++++++++++++++++++++++ - -You can upgrade to the latest database version via:: - -$ sahara-db-manage --config-file /path/to/sahara.conf upgrade head - -To check the current database version:: - -$ sahara-db-manage --config-file /path/to/sahara.conf current - -To create a script to run the migration offline:: - -$ sahara-db-manage --config-file /path/to/sahara.conf upgrade head --sql - -To run the offline migration between specific migration versions:: - -$ sahara-db-manage --config-file /path/to/sahara.conf upgrade : --sql - -To upgrade the database incrementally:: - -$ sahara-db-manage --config-file /path/to/sahara.conf upgrade --delta <# of revs> - -To create a new revision:: - -$ sahara-db-manage --config-file /path/to/sahara.conf revision -m "description of revision" --autogenerate - -To create a blank file:: - -$ sahara-db-manage --config-file /path/to/sahara.conf revision -m "description of revision" - -This command does not perform any migrations, it only sets the revision. -Revision may be any existing revision. Use this command carefully:: - -$ sahara-db-manage --config-file /path/to/sahara.conf stamp - -To verify that the timeline does branch, you can run this command:: - -$ sahara-db-manage --config-file /path/to/sahara.conf check_migration - -If the migration path does branch, you can find the branch point via:: - -$ sahara-db-manage --config-file /path/to/sahara.conf history diff --git a/doc/source/contributor/apiv2.rst b/doc/source/contributor/apiv2.rst deleted file mode 100644 index 5ebe95eb31..0000000000 --- a/doc/source/contributor/apiv2.rst +++ /dev/null @@ -1,112 +0,0 @@ -API Version 2 Development -========================= - -The sahara project is currently in the process of creating a new -RESTful application programming interface (API). This interface is -by-default enabled, although it remains experimental. - -This document defines the steps necessary to enable and communicate -with the new API. This API has a few fundamental changes from the -previous APIs and they should be noted before proceeding with -development work. - -.. warning:: - This API is currently marked as experimental. It is not supported - by the sahara python client. These instructions are included purely - for developers who wish to help participate in the development - effort. - -Enabling the experimental API ------------------------------ - -There are a few changes to the WSGI pipeline that must be made to -enable the new v2 API. These changes will leave the 1.0 and 1.1 API -versions in place and will not adjust their communication parameters. - -To begin, uncomment, or add, the following sections in your -api-paste.ini file: - -.. sourcecode:: ini - - [app:sahara_apiv2] - paste.app_factory = sahara.api.middleware.sahara_middleware:RouterV2.factory - - [filter:auth_validator_v2] - paste.filter_factory = sahara.api.middleware.auth_valid:AuthValidatorV2.factory - -These lines define a new authentication filter for the v2 API, and -define the application that will handle the new calls. - -With these new entries in the paste configuration, we can now enable -them with the following changes to the api-paste.ini file: - -.. sourcecode:: ini - - [pipeline:sahara] - pipeline = cors request_id acl auth_validator_v2 sahara_api - - [composite:sahara_api] - use = egg:Paste#urlmap - /: sahara_apiv2 - -There are 2 significant changes occurring here; changing the -authentication validator in the pipeline, and changing the root "/" -application to the new v2 handler. - -At this point the sahara API server should be configured to accept -requests on the new v2 endpoints. - -Communicating with the v2 API ------------------------------ - -The v2 API makes at least one major change from the previous versions, -removing the OpenStack project identifier from the URL. Now users of -the API do not provide their project ID explictly; instead we fully -trust keystonemiddeware to provide it in the WSGI environment based -on the given user token. - -For example, in previous versions of the API, a call to get the list of -clusters for project "12345678-1234-1234-1234-123456789ABC" would have -been made as follows:: - - GET /v1.1/12345678-1234-1234-1234-123456789ABC/clusters - X-Auth-Token: {valid auth token} - -This call would now be made to the following URL:: - - GET /v2/clusters - X-Auth-Token: {valid auth token} - -Using a tool like `HTTPie `_, the -same request could be made like this:: - - $ httpie http://{sahara service ip:port}/v2/clusters \ - X-Auth-Token:{valid auth token} - -Following the implementation progress -------------------------------------- - -As the creation of this API will be under regular change until it moves -out of the experimental phase, a wiki page has been established to help -track the progress. - -https://wiki.openstack.org/wiki/Sahara/api-v2 - -This page will help to coordinate the various reviews, specs, and work -items that are a continuing facet of this work. - -The API service layer ---------------------- - -When contributing to the version 2 API, it will be necessary to add code -that modifies the data and behavior of HTTP calls as they are sent to -and from the processing engine and data abstraction layers. Most -frequently in the sahara codebase, these interactions are handled in the -modules of the ``sahara.service.api`` package. This package contains -code for all versions of the API and follows a namespace mapping that is -similar to the routing functions of ``sahara.api`` - -Although these modules are not the definitive end of all answers to API -related code questions, they are a solid starting point when examining -the extent of new work. Furthermore, they serve as a central point to -begin API debugging efforts when the need arises. diff --git a/doc/source/contributor/contributing.rst b/doc/source/contributor/contributing.rst deleted file mode 100644 index 553a5d6754..0000000000 --- a/doc/source/contributor/contributing.rst +++ /dev/null @@ -1,70 +0,0 @@ -============================ -So You Want to Contribute... -============================ - -For general information on contributing to OpenStack, please check out the -`contributor guide `_ to get started. -It covers all the basics that are common to all OpenStack projects: the -accounts you need, the basics of interacting with our Gerrit review system, how -we communicate as a community, etc. - -Below will cover the more project specific information you need to get started -with Sahara. - -Communication -~~~~~~~~~~~~~ -* If you have something to discuss use - `OpenStack development mail-list `_. - Prefix the mail subject with ``[sahara]`` - -* Join ``#openstack-sahara`` IRC channel on `OFTC `_ - -* Attend Sahara team meetings - - * Weekly on Thursdays at 1400 UTC - - * IRC channel: ``#openstack-meeting-3`` - -Contacting the Core Team -~~~~~~~~~~~~~~~~~~~~~~~~ -* The core team has coverage in the timezones of Europe and the Americas. - -* Just pop over to IRC; we keep a close eye on it! - -* You can also find the email addresses of the core team `here - https://review.opendev.org/#/admin/groups/133,members>`. - -New Feature Planning -~~~~~~~~~~~~~~~~~~~~ -Sahara uses specs to track feature requests. They provide a high-level summary -of proposed changes and track associated commits. Sahara also uses specs for -in-depth descriptions and discussions of blueprints. Specs follow a defined -format and are submitted as change requests to the openstack/sahara-specs -repository. - -Task Tracking -~~~~~~~~~~~~~ -We track our tasks in Storyboard. - -The Sahara project group homepage on Storyboard is -https://storyboard.openstack.org/#!/project_group/sahara. - -If you're looking for some smaller, easier work item to pick up and get started -on, search for the 'low-hanging-fruit' or 'new-contributor' tag. - -Reporting a Bug -~~~~~~~~~~~~~~~ -You found an issue and want to make sure we are aware of it? You can do so on -https://storyboard.openstack.org/#!/project_group/sahara. - -Getting Your Patch Merged -~~~~~~~~~~~~~~~~~~~~~~~~~ -Typically two +2s are required before merging. - -Project Team Lead Duties -~~~~~~~~~~~~~~~~~~~~~~~~ -If you are the PTL of Sahara then you should follow the `PTL guide -`_. You should also -keep track of new versions of the various Hadoop distros/components coming out -(this can also be delegated to another contributor, but the PTL needs to track -it either way). diff --git a/doc/source/contributor/dashboard-dev-environment-guide.rst b/doc/source/contributor/dashboard-dev-environment-guide.rst deleted file mode 100644 index 05ba9f4dfd..0000000000 --- a/doc/source/contributor/dashboard-dev-environment-guide.rst +++ /dev/null @@ -1,153 +0,0 @@ -Sahara UI Dev Environment Setup -=============================== - -This page describes how to setup Horizon for developing Sahara by either -installing it as part of DevStack with Sahara or installing it in an -isolated environment and running from the command line. - -Install as a part of DevStack ------------------------------ - -See the `DevStack guide `_ for more information -on installing and configuring DevStack with Sahara. - -Sahara UI can be installed as a DevStack plugin by adding the following line -to your ``local.conf`` file - -.. sourcecode:: bash - - # Enable sahara-dashboard - enable_plugin sahara-dashboard https://opendev.org/openstack/sahara-dashboard - - -Isolated Dashboard for Sahara ------------------------------ - -These installation steps serve two purposes: - 1. Setup a dev environment - 2. Setup an isolated Dashboard for Sahara - -**Note** The host where you are going to perform installation has to be able -to connect to all OpenStack endpoints. You can list all available endpoints -using the following command: - -.. sourcecode:: console - - $ openstack endpoint list - -You can list the registered services with this command: - -.. sourcecode:: console - - $ openstack service list - -Sahara service should be present in keystone service list with service type -*data-processing* - -1. Install prerequisites - -.. sourcecode:: console - - $ sudo apt-get update - $ sudo apt-get install git-core python-dev gcc python-setuptools \ - python-virtualenv node-less libssl-dev libffi-dev libxslt-dev -.. - -On Ubuntu 12.10 and higher you have to install the following lib as well: - -.. sourcecode:: console - - $ sudo apt-get install nodejs-legacy -.. - -2. Checkout Horizon from git and switch to your version of OpenStack - -Here is an example: - -.. sourcecode:: console - - $ git clone https://opendev.org/openstack/horizon/ {HORIZON_DIR} -.. - -Then install the virtual environment: - -.. sourcecode:: console - - $ python {HORIZON_DIR}/tools/install_venv.py -.. - -3. Create a ``local_settings.py`` file - -.. sourcecode:: console - - $ cp {HORIZON_DIR}/openstack_dashboard/local/local_settings.py.example \ - {HORIZON_DIR}/openstack_dashboard/local/local_settings.py -.. - -4. Modify ``{HORIZON_DIR}/openstack_dashboard/local/local_settings.py`` - -Set the proper values for host and url variables: - -.. sourcecode:: python - - OPENSTACK_HOST = "ip of your controller" -.. - -If you wish to disable floating IP options during node group template -creation, add the following parameter: - -.. sourcecode:: python - - SAHARA_FLOATING_IP_DISABLED = True -.. - -5. Clone sahara-dashboard repository and checkout the desired branch - -.. sourcecode:: console - - $ git clone https://opendev.org/openstack/sahara-dashboard/ \ - {SAHARA_DASHBOARD_DIR} -.. - -6. Copy plugin-enabling files from sahara-dashboard repository to horizon - -.. sourcecode:: console - - $ cp -a {SAHARA_DASHBOARD_DIR}/sahara_dashboard/enabled/* {HORIZON_DIR}/openstack_dashboard/local/enabled/ -.. - -7. Install sahara-dashboard project into your horizon virtualenv - in editable mode - -.. sourcecode:: console - - $ . {HORIZON_DIR}/.venv/bin/activate - $ pip install -e {SAHARA_DASHBOARD_DIR} -.. - -8. Start Horizon - -.. sourcecode:: console - - $ . {HORIZON_DIR}/.venv/bin/activate - $ python {HORIZON_DIR}/manage.py runserver 0.0.0.0:8080 -.. - -This will start Horizon in debug mode. That means the logs will be written to -console and if any exceptions happen, you will see the stack-trace rendered -as a web-page. - -Debug mode can be disabled by changing ``DEBUG=True`` to ``False`` in -``local_settings.py``. In that case Horizon should be started slightly -differently, otherwise it will not serve static files: - -.. sourcecode:: console - - $ . {HORIZON_DIR}/.venv/bin/activate - $ python {HORIZON_DIR}/manage.py runserver --insecure 0.0.0.0:8080 -.. - -.. note:: - - It is not recommended to use Horizon in this mode for production. - diff --git a/doc/source/contributor/development-environment.rst b/doc/source/contributor/development-environment.rst deleted file mode 100644 index 77972e7c69..0000000000 --- a/doc/source/contributor/development-environment.rst +++ /dev/null @@ -1,131 +0,0 @@ -Setting Up a Development Environment -==================================== - -This page describes how to setup a Sahara development environment by either -installing it as a part of DevStack or pointing a local running instance at an -external OpenStack. You should be able to debug and test your changes without -having to deploy Sahara. - -Setup a Local Environment with Sahara inside DevStack ------------------------------------------------------ - -See :doc:`the main article `. - -Setup a Local Environment with an external OpenStack ----------------------------------------------------- - -1. Install prerequisites - -On OS X Systems: - -.. sourcecode:: console - - # we actually need pip, which is part of python package - $ brew install python mysql postgresql rabbitmq - $ pip install virtualenv tox - -On Ubuntu: - -.. sourcecode:: console - - $ sudo apt-get update - $ sudo apt-get install git-core python-dev python-virtualenv gcc libpq-dev libmysqlclient-dev python-pip rabbitmq-server - $ sudo pip install tox - -On Red Hat and related distributions (CentOS/Fedora/RHEL/Scientific Linux): - -.. sourcecode:: console - - $ sudo yum install git-core python-devel python-virtualenv gcc python-pip mariadb-devel postgresql-devel erlang - $ sudo pip install tox - $ sudo wget http://www.rabbitmq.com/releases/rabbitmq-server/v3.2.2/rabbitmq-server-3.2.2-1.noarch.rpm - $ sudo rpm --import http://www.rabbitmq.com/rabbitmq-signing-key-public.asc - $ sudo yum install rabbitmq-server-3.2.2-1.noarch.rpm - -On openSUSE-based distributions (SLES 12, openSUSE, Factory or Tumbleweed): - -.. sourcecode:: console - - $ sudo zypper in gcc git libmysqlclient-devel postgresql-devel python-devel python-pip python-tox python-virtualenv - -2. Grab the code - -.. sourcecode:: console - - $ git clone https://opendev.org/openstack/sahara.git - $ cd sahara - -3. Generate Sahara sample using tox - -.. sourcecode:: console - - tox -e genconfig - -4. Create config file from the sample - -.. sourcecode:: console - - $ cp ./etc/sahara/sahara.conf.sample ./etc/sahara/sahara.conf - -5. Look through the sahara.conf and modify parameter values as needed - For details see - :doc:`Sahara Configuration Guide <../admin/configuration-guide>` - -6. Create database schema - -.. sourcecode:: console - - $ tox -e venv -- sahara-db-manage --config-file etc/sahara/sahara.conf upgrade head - -7. To start Sahara API and Engine processes call - -.. sourcecode:: console - - $ tox -e venv -- sahara-api --config-file etc/sahara/sahara.conf --debug - $ tox -e venv -- sahara-engine --config-file etc/sahara/sahara.conf --debug - - -Setup local OpenStack dashboard with Sahara plugin --------------------------------------------------- - -.. toctree:: - :maxdepth: 1 - - - dashboard-dev-environment-guide - -Tips and tricks for dev environment ------------------------------------ - -1. Pip speedup - -Add the following lines to ~/.pip/pip.conf - -.. sourcecode:: cfg - - [global] - download-cache = /home//.pip/cache - index-url = - -Note that the ``~/.pip/cache`` folder should be created manually. - -2. Git hook for fast checks - -Just add the following lines to .git/hooks/pre-commit and do chmod +x for it. - -.. sourcecode:: - - #!/bin/sh - # Run fast checks (PEP8 style check and PyFlakes fast static analysis) - tox -epep8 - -You can add also other checks for pre-push, for example pylint (see below) -and tests (tox -epy27). - -3. Running static analysis (PyLint) - -Just run the following command - -.. sourcecode:: - - tox -e pylint diff --git a/doc/source/contributor/development-guidelines.rst b/doc/source/contributor/development-guidelines.rst deleted file mode 100644 index 3b6136440b..0000000000 --- a/doc/source/contributor/development-guidelines.rst +++ /dev/null @@ -1,238 +0,0 @@ -Development Guidelines -====================== - -Coding Guidelines ------------------ - -For all the Python code in Sahara we have a rule - it should pass `PEP 8`_. -All Bash code should pass `bashate`_. - -To check your code against PEP 8 and bashate run: - -.. sourcecode:: console - - $ tox -e pep8 - -.. note:: - For more details on coding guidelines see file ``HACKING.rst`` in the root - of Sahara repo. - -Static analysis ---------------- - -The static analysis checks are optional in Sahara, but they are still very -useful. The gate job will inform you if the number of static analysis warnings -has increased after your change. We recommend to always check the static -warnings. - -To run check first commit your change, then execute the following command: - -.. sourcecode:: console - - $ tox -e pylint - -Modification of Upstream Files ------------------------------- - -We never modify upstream files in Sahara. Any changes in upstream files should -be made in the upstream project and then merged back in to Sahara. This -includes whitespace changes, comments, and typos. Any change requests -containing upstream file modifications are almost certain to receive lots of -negative reviews. Be warned. - -Examples of upstream files are default xml configuration files used to -configure Hadoop, or code imported from the OpenStack Oslo project. The xml -files will usually be found in ``resource`` directories with an accompanying -``README`` file that identifies where the files came from. For example: - -.. sourcecode:: console - - $ pwd - /home/me/sahara/sahara/plugins/vanilla/v2_7_1/resources - - $ ls - core-default.xml hdfs-default.xml oozie-default.xml README.rst - create_oozie_db.sql mapred-default.xml post_conf.template yarn-default.xml -.. - -Testing Guidelines ------------------- - -Sahara has a suite of tests that are run on all submitted code, -and it is recommended that developers execute the tests themselves to -catch regressions early. Developers are also expected to keep the -test suite up-to-date with any submitted code changes. - -Unit tests are located at ``sahara/tests/unit``. - -Sahara's suite of unit tests can be executed in an isolated environment -with `Tox`_. To execute the unit tests run the following from the root of -Sahara repo: - -.. sourcecode:: console - - $ tox -e py27 - - -Documentation Guidelines ------------------------- - -All Sahara docs are written using Sphinx / RST and located in the main repo -in the ``doc`` directory. You can add or edit pages here to update the -https://docs.openstack.org/sahara/latest/ site. - -The documentation in docstrings should follow the `PEP 257`_ conventions -(as mentioned in the `PEP 8`_ guidelines). - -More specifically: - -1. Triple quotes should be used for all docstrings. -2. If the docstring is simple and fits on one line, then just use - one line. -3. For docstrings that take multiple lines, there should be a newline - after the opening quotes, and before the closing quotes. -4. `Sphinx`_ is used to build documentation, so use the restructured text - markup to designate parameters, return values, etc. - -Run the following command to build docs locally. - -.. sourcecode:: console - - $ tox -e docs - -After it you can access generated docs in ``doc/build/`` directory, for -example, main page - ``doc/build/html/index.html``. - -To make the doc generation process faster you can use: - -.. sourcecode:: console - - $ SPHINX_DEBUG=1 tox -e docs - -To avoid sahara reinstallation to virtual env each time you want to rebuild -docs you can use the following command (it can be executed only after -running ``tox -e docs`` first time): - -.. sourcecode:: console - - $ SPHINX_DEBUG=1 .tox/docs/bin/python setup.py build_sphinx - - - -.. note:: - For more details on documentation guidelines see HACKING.rst in the root of - the Sahara repo. - - -.. _PEP 8: http://www.python.org/dev/peps/pep-0008/ -.. _bashate: https://opendev.org/openstack/bashate -.. _PEP 257: http://www.python.org/dev/peps/pep-0257/ -.. _Tox: http://tox.testrun.org/ -.. _Sphinx: http://sphinx.pocoo.org/markup/index.html - -Event log Guidelines --------------------- - -Currently Sahara keeps useful information about provisioning for each cluster. -Cluster provisioning can be represented as a linear series of provisioning -steps, which are executed one after another. Each step may consist of several -events. The number of events depends on the step and the number of instances -in the cluster. Also each event can contain information about its cluster, -instance, and node group. In case of errors, events contain useful information -for identifying the error. Additionally, each exception in sahara contains a -unique identifier that allows the user to find extra information about that -error in the sahara logs. You can see an example of provisioning progress -information here: -https://docs.openstack.org/api-ref/data-processing/#event-log - -This means that if you add some important phase for cluster provisioning to -the sahara code, it's recommended to add a new provisioning step for this -phase. This will allow users to use event log for handling errors during this -phase. - -Sahara already has special utils for operating provisioning steps and events -in the module ``sahara/utils/cluster_progress_ops.py``. - -.. note:: - It's strictly recommended not to use ``conductor`` event log ops directly - to assign events and operate provisioning steps. - -.. note:: - You should not start a new provisioning step until the previous step has - successfully completed. - -.. note:: - It's strictly recommended to use ``event_wrapper`` for event handling. - -OpenStack client usage guidelines ---------------------------------- - -The sahara project uses several OpenStack clients internally. These clients -are all wrapped by utility functions which make using them more convenient. -When developing sahara, if you need to use an OpenStack client you should -check the ``sahara.utils.openstack`` package for the appropriate one. - -When developing new OpenStack client interactions in sahara, it is important -to understand the ``sahara.service.sessions`` package and the usage of the -keystone ``Session`` and auth plugin objects (for example, ``Token`` and -``Password``). Sahara is migrating all clients to use this authentication -methodology, where available. For more information on using sessions with -keystone, please see -:keystoneauth-doc:`the keystoneauth documentation ` - -Storing sensitive information ------------------------------ - -During the course of development, there is often cause to store sensitive -information (for example, login credentials) in the records for a cluster, -job, or some other record. Storing secret information this way is **not** -safe. To mitigate the risk of storing this information, sahara provides -access to the OpenStack Key Manager service (implemented by the -:barbican-doc:`barbican project <>`) through -the :castellan-doc:`castellan library <>`. - -To utilize the external key manager, the functions in -``sahara.service.castellan.utils`` are provided as wrappers around the -castellan library. These functions allow a developer to store, retrieve, and -delete secrets from the manager. Secrets that are managed through the key -manager have an identifier associated with them. These identifiers are -considered safe to store in the database. - -The following are some examples of working with secrets in the sahara -codebase. These examples are considered basic, any developer wishing to -learn more about the advanced features of storing secrets should look to -the code and docstrings contained in the ``sahara.service.castellan`` module. - -**Storing a secret** - -.. sourcecode:: python - - from sahara.service.castellan import utils as key_manager - - password = 'SooperSecretPassword' - identifier = key_manager.store_secret(password) - -**Retrieving a secret** - -.. sourcecode:: python - - from sahara.service.castellan import utils as key_manager - - password = key_manager.get_secret(identifier) - -**Deleting a secret** - -.. sourcecode:: python - - from sahara.service.castellan import utils as key_manager - - key_manager.delete_secret(identifier) - -When storing secrets through this interface it is important to remember that -if an external key manager is being used, each stored secret creates an -entry in an external service. When you are finished using the secret it is -good practice to delete it, as not doing so may leave artifacts in those -external services. - -For more information on configuring sahara to use the OpenStack Key -Manager service, see :ref:`external_key_manager_usage`. diff --git a/doc/source/contributor/devstack.rst b/doc/source/contributor/devstack.rst deleted file mode 100644 index fe065aa4ae..0000000000 --- a/doc/source/contributor/devstack.rst +++ /dev/null @@ -1,181 +0,0 @@ -Setup DevStack -============== - -DevStack can be installed on Fedora, Ubuntu, and CentOS. For supported -versions see `DevStack documentation `_ - -We recommend that you install DevStack in a VM, rather than on your main -system. That way you may avoid contamination of your system. You may find -hypervisor and VM requirements in the next section. If you still want to -install DevStack on your baremetal system, just skip the next section and read -further. - - -Start VM and set up OS ----------------------- - -In order to run DevStack in a local VM, you need to start by installing -a guest with Ubuntu 14.04 server. Download an image file from -`Ubuntu's web site `_ and create -a new guest from it. Virtualization solution must support -nested virtualization. Without nested virtualization VMs running inside -the DevStack will be extremely slow lacking hardware acceleration, i.e. -you will run QEMU VMs without KVM. - -On Linux QEMU/KVM supports nested virtualization, on Mac OS - VMware Fusion. -VMware Fusion requires adjustments to run VM with fixed IP. You may find -instructions which can help :ref:`below `. - -Start a new VM with Ubuntu Server 14.04. Recommended settings: - -- Processor - at least 2 cores -- Memory - at least 8GB -- Hard Drive - at least 60GB - -When allocating CPUs and RAM to the DevStack, assess how big clusters you -want to run. A single Hadoop VM needs at least 1 cpu and 1G of RAM to run. -While it is possible for several VMs to share a single cpu core, remember -that they can't share the RAM. - -After you installed the VM, connect to it via SSH and proceed with the -instructions below. - - -Install DevStack ----------------- - -The instructions assume that you've decided to install DevStack into -Ubuntu 14.04 system. - -**Note:** Make sure to use bash, as other shells are not fully compatible -and may cause hard to debug problems. - -1. Clone DevStack: - -.. sourcecode:: console - - $ sudo apt-get install git-core - $ git clone https://opendev.org/openstack/devstack.git - -2. Create the file ``local.conf`` in devstack directory with the following - content: - -.. sourcecode:: bash - - [[local|localrc]] - ADMIN_PASSWORD=nova - MYSQL_PASSWORD=nova - RABBIT_PASSWORD=nova - SERVICE_PASSWORD=$ADMIN_PASSWORD - SERVICE_TOKEN=nova - - # Enable Swift - enable_service s-proxy s-object s-container s-account - - SWIFT_HASH=66a3d6b56c1f479c8b4e70ab5c2000f5 - SWIFT_REPLICAS=1 - SWIFT_DATA_DIR=$DEST/data - - # Force checkout prerequisites - # FORCE_PREREQ=1 - - # keystone is now configured by default to use PKI as the token format - # which produces huge tokens. - # set UUID as keystone token format which is much shorter and easier to - # work with. - KEYSTONE_TOKEN_FORMAT=UUID - - # Change the FLOATING_RANGE to whatever IPs VM is working in. - # In NAT mode it is the subnet VMware Fusion provides, in bridged mode - # it is your local network. But only use the top end of the network by - # using a /27 and starting at the 224 octet. - FLOATING_RANGE=192.168.55.224/27 - - # Set ``OFFLINE`` to ``True`` to configure ``stack.sh`` to run cleanly - # without Internet access. ``stack.sh`` must have been previously run - # with Internet access to install prerequisites and fetch repositories. - # OFFLINE=True - - # Enable sahara - enable_plugin sahara https://opendev.org/openstack/sahara - - # Enable heat - enable_plugin heat https://opendev.org/openstack/heat - -In cases where you need to specify a git refspec (branch, tag, or commit hash) -for the sahara in-tree devstack plugin (or sahara repo), it should be -appended to the git repo URL as follows: - -.. sourcecode:: bash - - enable_plugin sahara https://opendev.org/openstack/sahara - -3. Sahara can send notifications to Ceilometer, if Ceilometer is enabled. - If you want to enable Ceilometer add the following lines to the - ``local.conf`` file: - -.. sourcecode:: bash - - enable_plugin ceilometer https://opendev.org/openstack/ceilometer - -4. Start DevStack: - -.. sourcecode:: console - - $ ./stack.sh - -5. Once the previous step is finished Devstack will print a Horizon URL. - Navigate to this URL and login with login "admin" and password from - ``local.conf``. - -6. Congratulations! You have OpenStack running in your VM and you're ready to - launch VMs inside that VM. :) - - -Managing sahara in DevStack ---------------------------- - -If you install DevStack with sahara included you can rejoin screen with the -``screen -c stack-screenrc`` command and switch to the ``sahara`` tab. -Here you can manage the sahara service as other OpenStack services. -Sahara source code is located at ``$DEST/sahara`` which is usually -``/opt/stack/sahara``. - - -.. _fusion-fixed-ip: - -Setting fixed IP address for VMware Fusion VM ---------------------------------------------- - -1. Open file ``/Library/Preferences/VMware Fusion/vmnet8/dhcpd.conf`` - -2. There is a block named "subnet". It might look like this: - -.. sourcecode:: text - - subnet 192.168.55.0 netmask 255.255.255.0 { - range 192.168.55.128 192.168.55.254; - -3. You need to pick an IP address outside of that range. For example - - ``192.168.55.20`` - -4. Copy VM MAC address from VM settings->Network->Advanced - -5. Append the following block to file ``dhcpd.conf`` (don't forget to replace - ``VM_HOSTNAME`` and ``VM_MAC_ADDRESS`` with actual values): - -.. sourcecode:: text - - host VM_HOSTNAME { - hardware ethernet VM_MAC_ADDRESS; - fixed-address 192.168.55.20; - } - -6. Now quit all the VMware Fusion applications and restart vmnet: - -.. sourcecode:: console - - $ sudo /Applications/VMware\ Fusion.app/Contents/Library/vmnet-cli --stop - $ sudo /Applications/VMware\ Fusion.app/Contents/Library/vmnet-cli --start - -7. Now start your VM; it should have new fixed IP address. diff --git a/doc/source/contributor/gerrit.rst b/doc/source/contributor/gerrit.rst deleted file mode 100644 index a27cc3c65a..0000000000 --- a/doc/source/contributor/gerrit.rst +++ /dev/null @@ -1,14 +0,0 @@ -Code Reviews with Gerrit -======================== - -Sahara uses the `Gerrit`_ tool to review proposed code changes. The review -site is https://review.opendev.org. - -Gerrit is a complete replacement for Github pull requests. `All Github pull -requests to the Sahara repository will be ignored`. - -See `Development Workflow`_ for information about how to get -started using Gerrit. - -.. _Gerrit: http://code.google.com/p/gerrit -.. _Development Workflow: https://docs.openstack.org/infra/manual/developers.html#development-workflow diff --git a/doc/source/contributor/how-to-build-oozie.rst b/doc/source/contributor/how-to-build-oozie.rst deleted file mode 100644 index 9465824755..0000000000 --- a/doc/source/contributor/how-to-build-oozie.rst +++ /dev/null @@ -1,74 +0,0 @@ -How to build Oozie -================== - -.. note:: - - Apache does not make Oozie builds, so it has to be built manually. - -Download --------- - -* Download tarball from `Apache mirror `_ -* Unpack it with - - .. sourcecode:: console - - $ tar -xzvf oozie-4.3.1.tar.gz - -Hadoop Versions ---------------- - -To build Oozie the following command can be used: - -.. sourcecode:: console - - $ {oozie_dir}/bin/mkdistro.sh -DskipTests - -By default it builds against Hadoop 1.1.1. To built it with Hadoop version -2.x: - -* The hadoop-2 version should be changed in pom.xml. - This can be done manually or with the following command (you should - replace 2.x.x with your hadoop version): - - .. sourcecode:: console - - $ find . -name pom.xml | xargs sed -ri 's/2.3.0/2.x.x/' - -* The build command should be launched with the ``-P hadoop-2`` flag - -JDK Versions ------------- - -By default, the build configuration enforces that JDK 1.6.* is being used. - -There are 2 build properties that can be used to change the JDK version -requirements: - -* ``javaVersion`` specifies the version of the JDK used to compile (default - 1.6). - -* ``targetJavaVersion`` specifies the version of the generated bytecode - (default 1.6). - -For example, to specify JDK version 1.7, the build command should contain the -``-D javaVersion=1.7 -D tagetJavaVersion=1.7`` flags. - - - -Build ------ - -To build Oozie with Hadoop 2.6.0 and JDK version 1.7, the following command -can be used: - -.. sourcecode:: console - - $ {oozie_dir}/bin/mkdistro.sh assembly:single -P hadoop-2 -D javaVersion=1.7 -D targetJavaVersion=1.7 -D skipTests - -Also, the pig version can be passed as a maven property with the flag -``-D pig.version=x.x.x``. - -You can find similar instructions to build oozie.tar.gz here: -http://oozie.apache.org/docs/4.3.1/DG_QuickStart.html#Building_Oozie - diff --git a/doc/source/contributor/image-gen.rst b/doc/source/contributor/image-gen.rst deleted file mode 100644 index 7e25ccfe39..0000000000 --- a/doc/source/contributor/image-gen.rst +++ /dev/null @@ -1,344 +0,0 @@ -Image Generation -================ - -As of Newton, Sahara supports the creation of image generation and image -validation tooling as part of the plugin. If implemented properly, this -feature will enable your plugin to: - -* Validate that images passed to it for use in cluster provisioning meet its - specifications. -* Provision images from "clean" (OS-only) images. -* Pack pre-populated images for registration in Glance and use by Sahara. - -All of these features can use the same image declaration, meaning that logic -for these three use cases can be maintained in one place. - -This guide will explain how to enable this feature for your plugin, as well as -how to write or modify the image generation manifests that this feature uses. - - -Image Generation CLI --------------------- - -The key user-facing interface to this feature is the CLI script -``sahara-image-pack``. This script will be installed with all other Sahara -binaries. - -The usage of the CLI script ``sahara-image-pack`` is documented in -the :ref:`sahara-image-pack-label` section of the user guide. - - -The Image Manifest ------------------- - -As you'll read in the next section, Sahara's image packing tools allow plugin -authors to use any toolchain they choose. However, Sahara does provide a -built-in image packing framework which is uniquely suited to OpenStack use -cases, as it is designed to run the same logic while pre-packing an image or -while preparing an instance to launch a cluster after it is spawned in -OpenStack. - -By convention, the image specification, and all the scripts that it calls, -should be located in the plugin's resources directory under a subdirectory -named "images". - -A sample specification is below; the example is reasonably silly in practice, -and is only designed to highlight the use of the currently available -validator types. We'll go through each piece of this specification, but the -full sample is presented for context. - -:: - - arguments: - java-distro: - description: The java distribution. - default: openjdk - required: false - choices: - - oracle-java - - openjdk - - validators: - - os_case: - - redhat: - - package: nfs-utils - - debian: - - package: nfs-common - - argument_case: - argument_name: java-distro - cases: - openjdk: - - any: - - all: - - package: java-1.8.0-openjdk-devel - - argument_set: - argument_name: java-version - value: 1.8.0 - - all: - - package: java-1.7.0-openjdk-devel - - argument_set: - argument_name: java-version - value: 1.7.0 - oracle-java: - - script: install_oracle_java.sh - - script: setup_java.sh - - package: - - hadoop - - hadoop-libhdfs - - hadoop-native - - hadoop-pipes - - hadoop-sbin - - hadoop-lzo - - lzo - - lzo-devel - - hadoop-lzo-native - - -The Arguments Section ---------------------- - -First, the image specification should describe any arguments that may be used -to adjust properties of the image: - -:: - - arguments: # The section header - - java-distro: # The friendly name of the argument, and the name of the variable passed to scripts - description: The java distribution. # A friendly description to be used in help text - default: openjdk # A default value for the argument - required: false # Whether or not the argument is required - choices: # The argument value must match an element of this list - - oracle-java - - openjdk - -Specifications may contain any number of arguments, as declared above, by -adding more members to the list under the ``arguments`` key. - -The Validators Section ----------------------- - -This is where the logical flow of the image packing and validation process -is declared. A tiny example validator list is specified below. - -:: - - validators: - - package: nfs-utils - - script: setup_java.sh - -This is fairly straightforward: this specification will install the nfs-utils -package (or check that it's present) and then run the ``setup_java.sh`` script. - -All validators may be run in two modes: reconcile mode and test-only mode -(reconcile == false). If validators are run in reconcile mode, any image or -instance state which is not already true will be updated, if possible. If -validators are run in test-only mode, they will only test the image or -instance, and will raise an error if this fails. - -We'll now go over the types of validators that are currently available in -Sahara. This framework is made to easily allow new validators to be created -and old ones to be extended: if there's something you need, please do file a -wishlist bug or write and propose your own! - -Action validators ------------------ - -These validators take specific, concrete actions to assess or modify your -image or instance. - -The Package Validator -~~~~~~~~~~~~~~~~~~~~~ - -This validator type will install a package on the image, or validate that a -package is installed on the image. It can take several formats, as below: - -:: - - validators: - - package: hadoop - - package: - - hadoop-libhdfs - - nfs-utils: - version: 1.3.3-8 - -As you can see, a package declaration can consist of: - -* The package name as a string -* A list of packages, any of which may be: - * The package name as a string - * A dict with the package name as a key and a version property - -The Script Validator -~~~~~~~~~~~~~~~~~~~~ - -This validator will run a script on the image. It can take several formats -as well: - -:: - - validators: - - script: simple_script.sh # Runs this file - - script: - set_java_home: # The name of a script file - arguments: # Only the named environment arguments are passed, for clarity - - jdk-home - - jre-home - output: OUTPUT_VAR - - script: - store_nfs_version: # Because inline is set, this is just a friendly name - inline: rpm -q nfs-utils # Runs this text directly, rather than reading a file - output: nfs-version # Places the stdout of this script into an argument - # for future scripts to consume; if none exists, the - # argument is created - -Two variables are always available to scripts run under this framework: - -* ``distro``: The distro of the image, in case you want to switch on distro - within your script (rather than by using the os_case validator). -* ``test_only``: If this value equates to boolean false, then the script should - attempt to change the image or instance if it does not already meet the - specification. If this equates to boolean true, the script should exit with - a failure code if the image or instance does not already meet the - specification. - - -Flow Control Validators ------------------------ - -These validators are used to build more complex logic into your -specifications explicitly in the yaml layer, rather than by deferring -too much logic to scripts. - -The OS Case Validator -~~~~~~~~~~~~~~~~~~~~~ - -This validator runs different logic depending on which distribution of Linux -is being used in the guest. - -:: - - validators: - - os_case: # The contents are expressed as a list, not a dict, to preserve order - - fedora: # Only the first match runs, so put distros before families - - package: nfs_utils # The content of each case is a list of validators - - redhat: # Red Hat distros include fedora, centos, and rhel - - package: nfs-utils - - debian: # The major supported Debian distro in Sahara is ubuntu - - package: nfs-common - - -The Argument Case Validator -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -This validator runs different logic depending on the value of an argument. - -:: - - validators: - - argument_case: - argument_name: java-distro # The name of the argument - cases: # The cases are expressed as a dict, as only one can equal the argument's value - openjdk: - - script: setup-openjdk # The content of each case is a list of validators - oracle-java: - - script: setup-oracle-java - -The All Validator -~~~~~~~~~~~~~~~~~ - -This validator runs all the validators within it, as one logical block. If any -validators within it fail to validate or modify the image or instance, it will -fail. - -:: - - validators: - - all: - - package: nfs-utils - - script: setup-nfs.sh - -The Any Validator -~~~~~~~~~~~~~~~~~ - -This validator attempts to run each validator within it, until one succeeds, -and will report success if any do. If this is run in reconcile mode, it will -first try each validator in test-only mode, and will succeed without -making changes if any succeed (in the case below, if openjdk 1.7.0 were -already installed, the validator would succeed and would not install 1.8.0.) - -:: - - validators: - - any: # This validator will try to install openjdk-1.8.0, but it will settle for 1.7.0 if that fails - - package: java-1.8.0-openjdk-devel - - package: java-1.7.0-openjdk-devel - -The Argument Set Validator -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You may find that you wish to store state in one place in the specification -for use in another. In this case, you can use this validator to set an -argument for future use. - -:: - - validators: - - argument_set: - argument_name: java-version - value: 1.7.0 - -SPI Methods ------------ - -In order to make this feature available for your plugin, you must -implement the following optional plugin SPI methods. - -When implementing these, you may choose to use your own framework of choice -(Packer for image packing, etc.) By doing so, you can ignore the entire -framework and specification language described above. However, you may -wish to instead use the abstraction we've provided (its ability to keep -logic in one place for both image packing and cluster validation is useful -in the OpenStack context.) We will, of course, focus on that framework here. - -:: - - def get_image_arguments(self, hadoop_version): - """Gets the argument set taken by the plugin's image generator""" - - def pack_image(self, hadoop_version, remote, - test_only=False, image_arguments=None): - """Packs an image for registration in Glance and use by Sahara""" - - def validate_images(self, cluster, test_only=False, image_arguments=None): - """Validates the image to be used by a cluster""" - -The validate_images method is called after Heat provisioning of your cluster, -but before cluster configuration. If the test_only keyword of this method is -set to True, the method should only test the instances without modification. -If it is set to False, the method should make any necessary changes (this can -be used to allow clusters to be spun up from clean, OS-only images.) This -method is expected to use an ssh remote to communicate with instances, as -per normal in Sahara. - -The pack_image method can be used to modify an image file (it is called by the -CLI above). This method expects an ImageRemote, which is essentially a -libguestfs handle to the disk image file, allowing commands to be run on the -image directly (though it could be any concretion that allows commands to be -run against the image.) - -By this means, the validators described above can execute the same logic in -the image packing, instance validation, and instance preparation cases with -the same degree of interactivity and logical control. - -In order to future-proof this document against possible changes, the doctext -of these methods will not be reproduced here, but they are documented very -fully in the sahara.plugins.provisioning abstraction. - -These abstractions can be found in the module sahara.plugins.images. -You will find that the framework has been built with extensibility and -abstraction in mind: you can overwrite validator types, add your own -without modifying any core sahara modules, declare hierarchies of resource -locations for shared resources, and more. These features are documented in -the sahara.plugins.images module itself (which has copious doctext,) and we -encourage you to explore and ask questions of the community if you are -curious or wish to build your own image generation tooling. diff --git a/doc/source/contributor/index.rst b/doc/source/contributor/index.rst deleted file mode 100644 index 3c7d7e6420..0000000000 --- a/doc/source/contributor/index.rst +++ /dev/null @@ -1,30 +0,0 @@ -===================== -Developer Information -===================== - -Programming HowTos and Tutorials -================================ - -.. toctree:: - :maxdepth: 2 - - development-guidelines - development-environment - devstack - dashboard-dev-environment-guide - how-to-build-oozie - adding-database-migrations - testing - log-guidelines - apiv2 - image-gen - -Other Resources -=============== - -.. toctree:: - :maxdepth: 2 - - contributing - gerrit - jenkins diff --git a/doc/source/contributor/jenkins.rst b/doc/source/contributor/jenkins.rst deleted file mode 100644 index e8ccbcc54a..0000000000 --- a/doc/source/contributor/jenkins.rst +++ /dev/null @@ -1,41 +0,0 @@ -Continuous Integration with Jenkins -=================================== - -Each change made to Sahara core code is tested with unit and integration tests -and style checks using flake8. - -Unit tests and style checks are performed on public `OpenStack Zuul -`_ instance. - -Unit tests are checked using python 2.7. - -The result of those checks and Unit tests are represented as a vote of +1 or --1 in the *Verify* column in code reviews from the *Jenkins* user. - -Integration tests check CRUD operations for the Image Registry, Templates, and -Clusters. Also a test job is launched on a created Cluster to verify Hadoop -work. - -All integration tests are launched by `Jenkins -`_ on the internal Mirantis OpenStack -Lab. - -Jenkins keeps a pool of VMs to run tests in parallel. Even with the pool of VMs -integration testing may take a while. - -Jenkins is controlled for the most part by Zuul which determines what jobs are -run when. - -Zuul status is available at this address: `Zuul Status -`_. - -For more information see: `Sahara Hadoop Cluster CI -`_. - -The integration tests result is represented as a vote of +1 or -1 in the -*Verify* column in a code review from the *Sahara Hadoop Cluster CI* user. - -You can put *sahara-ci-recheck* in comment, if you want to recheck sahara-ci -jobs. Also, you can put *recheck* in comment, if you want to recheck both -Jenkins and sahara-ci jobs. Finally, you can put *reverify* in a comment, if -you only want to recheck Jenkins jobs. diff --git a/doc/source/contributor/log-guidelines.rst b/doc/source/contributor/log-guidelines.rst deleted file mode 100644 index 4086f73349..0000000000 --- a/doc/source/contributor/log-guidelines.rst +++ /dev/null @@ -1,34 +0,0 @@ - -Log Guidelines -============== - -Levels Guidelines ------------------ - -During the Kilo release cycle the sahara community defined the following -log levels: - -* Debug: Shows everything and is likely not suitable for normal production - operation due to the sheer size of logs generated (e.g. scripts executions, - process execution, etc.). -* Info: Usually indicates successful service start/stop, versions and such - non-error related data. This should include largely positive units of work - that are accomplished (e.g. service setup and configuration, cluster start, - job execution information). -* Warning: Indicates that there might be a systemic issue; - potential predictive failure notice (e.g. job execution failed). -* Error: An error has occurred and the administrator should research the error - information (e.g. cluster failed to start, plugin violations of operation). -* Critical: An error has occurred and the system might be unstable, anything - that eliminates part of sahara's intended functionalities; immediately get - administrator assistance (e.g. failed to access keystone/database, failed to - load plugin). - - -Formatting Guidelines ---------------------- - -Sahara uses string formatting defined in `PEP 3101`_ for logs. - - -.. _PEP 3101: https://www.python.org/dev/peps/pep-3101/ diff --git a/doc/source/contributor/testing.rst b/doc/source/contributor/testing.rst deleted file mode 100644 index 92700a7f11..0000000000 --- a/doc/source/contributor/testing.rst +++ /dev/null @@ -1,36 +0,0 @@ -Sahara Testing -============== - -We have a bunch of different tests for Sahara. - -Unit Tests -++++++++++ - -In most Sahara sub-repositories we have a directory that contains Python unit -tests, located at `_package_/tests/unit` or `_package_/tests`. - -Scenario integration tests -++++++++++++++++++++++++++ - -New scenario integration tests were implemented for Sahara. They are available -in the sahara-tests repository -(https://opendev.org/openstack/sahara-tests). - -Tempest tests -+++++++++++++ - -Sahara has a Tempest plugin in the sahara-tests repository covering all major -API features. - -Additional tests -++++++++++++++++ - -Additional tests reside in the sahara-tests repository (as above): - -* REST API tests checking to ensure that the Sahara REST API works. - The only parts that are not tested are cluster creation and EDP. - -* CLI tests check read-only operations using the Sahara CLI. - -For more information about these tests, please read -`Tempest Integration of Sahara `_. diff --git a/doc/source/images/hadoop-cluster-example.jpg b/doc/source/images/hadoop-cluster-example.jpg deleted file mode 100644 index 70f9f675da9a3f958ef0dc934a4b72194802bd5e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 38120 zcmb@t1yo!?)F#-t1-IZ39MTX75?qoX-MB*|!P7uPaEAn!;0{3oH15(k1Pe|Q92yDk z?v`xlpP8M1cF*pa+1ctkecpR@Z{2#gs_y;j-248{{rwFf0Vyae0MO9T07|GI;4c#J z8i0ZRul)PRKov}E%zp(J8ygE74;LRF4;K#){|WIEd;%f@JUl{DLLy=k5>gWUCuHPg zB;=@Tl7F3`{o};I#6>ZZ5a1D@p8hYEzuy3)xM=Uu4KUE20nka&Fi6q<_5m0GXaLl( zP{aK{3pNfK7CI&_1|F)}k_7NiFgofU2Ok%Q2nP)f9Rm}9g^fc>M$UpOK=DjYlal(W zpuV|tDi!Oego(+im-1RVdIrfU^}pC&=~}oaCVj;d5_R*atz+ll5*B%*pl#{m8r0DE zTG7Tcq@eo*C4>eg{eKt%0Dy*$g@K7`;vq#14-E|m_3Hm^gs1_KVv#cN zfi$x4sp^RK^x{dx>mIE{K1`AO=n&(~O0iG1B}en2l~DO!+{93?#NnMZB377l-uN~# z2N0()sho z!*5c0MG6NQZ!UYs^mW8QCh}oQH(p78c0n)|vbBOp{pBBQ- z1-wTdjP!j(0`%~BJZgiD3KdrFBURIigGt5r?SG_ zVQ$|Uf4+ei_a1p>FWXd}yBzlZbaav?>wTEeiRHOELSsG1%hLMFgPK7FTwyW8=|YPr zeKb%0I0c^VUPrV0rOpK-_grxGoWtSkdRx5=ud$j6l~$n>fjKTf3AgE3Q26#g{#U8> zP!CV*)(tNYPiPzRc?6#^`&%zy^AIjCyY2j5VQ!5+J-N|Ex%T3h@myO(^m*#{iGzk7 zzgLFxO>4SGIR7RmN(Tpj)`WG+esXS$pv1#|X_cDC$t zWSPv>*k*>?)Gk^JG1>?Xp%FOOf#g44JL2BI^>@vNPAmytVB~J6MpO5^*RFXKK8bB5 zM{G);h!Ovgdj*$w$+n$1;OKT3b~)Iagf^H4sdicw2SJi4V&CQ6d#d{wH)eq==2U=6 zt=k?EbN&5LN8t1TBK9n0Bm&gYcB;Q6L~f_Uj_PFo)w4@!X-}vr*FqGvCvlttDCmYn z_maN}2_~wC253eu)>rt7H!KR=E6S)f*W73e+^}&QryR_Y1peDM)Zg4i0n>Dr*D+F~ zK<_87s?)1G(B)0QY9XN?^p++nlap$%NVTG{xIzE>75~?zjbOXnOIQHi_-bUYv0tfk zViDHlq(;1Wh~J;sKUIHBuFCO{eu|l3eno3o!PZw;5cw5`hCtdTYaf9ZNvsU^bhwz8 zc4w^T#i}qN=A~qpE9dk>qs346m`)noU!ykQ*F#ts5f3>!D!38mnh7+?b=4xhmXQxR zy7O=x!!|#KEQqRtHLlq>2m&Cz{IqRohiULT;&j12&H94_;h&}U{aO%FIn|s%vOXg# zA=eck2irAyaa5c;Njzkg)qGjm$>M1xVi zbYx*Yo=m3GD0Qg+0?u6UfDGg4*?h(^*%>XcyPb#5E5n;#eoRV*%N)|e$C|BFwLNV|CcVqm7`M}|@=Whi?mBGe z&#W{qllILe#JQmA!z) z%Gjodt!x?m11<|Xjm;ad2zfa0`!}NGkkn?6<=~|%$ZGq7kq0kWUfNFQqwGD}S@oCn ztF33)8;vvqKNW_Yxp8skx99Z3+a!I=CwWpzgp*CKo5By|L8P94rinVbun5~SwEvz6^DaFkg~JCq8ED# zOA~1ySrvrWzr%?5L(N#rmM&#j0kE*#`YRec^X)Z8jqtS!Ze>FeUF-zJY+Tj>v;2@z z*WkTrh~h79v5{IAF?}l=Z^_Sb!Mn(xcO~*Zc#^TdKdb~S2k6@Wly%#GQD;SC>KSV! zPy};an@z2pSaD$XagB}`>7F1Oh?m0PE4gtwP^l0Evo?T+Hs+RNh3sy4tgP(D*V*uaFM%xp7dHF`I(|90sS z-|xSRQRVEe@J2OP)1HD?*I%>`4Aefp`);+kSswB}Dl;|P-*jtYo2>ql)2BgeO-Ghu zLbC##%?)Bh0l^Zg`khYJs<~(bJ-f^PHeb=FX>Zc`*^9-0EfR^gXz9E=_nCHlFijKOS0(Lk42;pmP@vFSDwWuZc#3$O9Dxq`pJAd z6Rfh~J7o#8gb8DI^F%}#0fQ?6kHJn^H_s;9m*(Zh-b_Uc@ivw*z4)(YY%`Pkj_NGq6 z!^P+DrGk>WVAl)fqb~>EjBKvpw%8c41onS0QEBFwkFrX%eQ|AajkkkG`fL9M19D2D zeAHKB*26QFzyU}y>|St!4$AoM+VsNx8+p>$Ws$b?BQt;45M3+f0PG&tfmsh~r-jZW zSY1^Fb!XWT_E3pO63#h@f^U6h-@itlgP*|hH0j6y&S2#HB8K)O>95aT@SB&j z)DBkzOztV_-hAyk=Vk7G$;pLGhPIB&x^Hd&2n$wticKAe68)t6t~HbFBbl{v#W$+x z^ou^0kw;!njQbmYhb+-5=8*)(&eE$N4&v}217T`vNLj;%$CcxYks-VfB5U#=4KP1` z?d)^(4VFVK9XX6|)Wv(Fu2ZYs7Drhluh}-JYqNGfpCHqm7JbY_o1FaBd2l`&*4W#a z=Zy6`;b*wDm>LEA8FJ(O#UAk)4*XQJ-o_1uc-|tX&OU*({`YGTwxFMiy{iGV<~WI$ z!emTTEm4F7vq8l2d?oEDgdc`D?8=sH*D$T77JC{HJy%r+3zzzG=Q#PS8RFmeMz1w7 z3u@U)wX3b_isKeep)g>V%VC=P;3KD>~^sf*zb$$(hdUno~~ z{|k6p$?_MVV3j-HSCw8))&8b5(T4jaVU=i@g_nDdv`n<+Phjd!>F+7819onVW&bkH z(WDmX347>HZQLJ+*}I_5#>w`@N$dLY_A0gv=BihjBYIK#t`-6WRE#wD6`&j z!V{Q{ZC13FL)?#mnb8P7)u;uA071Q=eI?TeibA4Y{I$wRhARngJ)Or4YV1Rh4@u@Z zZ~US~p}XD$MRKJ%qcykIPdwy1op`@~Z;Sn$0`@7$mT#ol(Tf+peV=_M{kwXhuqsyE zIN@0mdT#bQ1{YSx=K)&`63N+PcA0~QStV4+D%(;T;rgeMu%UNzf*eytxy^6MV8hnD zUaYh9z1^5`-!C-Jn^=sC+1^~ZMfr8``%SAdeCLZZI8#;Xg^67{}7kI$7y#L@yt`*<%l zFi(AJ1DQ>1e#ct@<2y2@_tzeA92h*1N+}rT&(Mu!)J!n0d-zJIG;TbQulTlKWZ&;8 zA)HrT^bRcWGoK=-Mp?finebDgS$mJC@Fv;CasKx|d?@|5o|@IG8K4h^jmXdkk1igV#R*4vqM9f=P7CVM<`f!SkjOW7+Z=hOK z!+O_LRPY9hkjr~A%8h{ZE#0zIm67MoHPfR;j?^?&hN6~*5BGlo-)fRav={K^2}`*( zs7 z&r|Dg*qNXUMXAvjp)kW@L;JG|R?@M9I)Tx8`d~N2i1OLR{DneiWh%=_lnHdec6+KECRF4eVWeMhrE0B%kU^~j?+a|iHG&;>%Rcp z4wN5Mune3Zu9ikBEl%&u9ZK9u5rVYpY2W_G0fTtRI8C|HWRBR&^;<<|6IIVlzQ6~> z+Uyf94kX@JAYTi&e&JD?1si24-V$em+FyWZSI3L4Csg;?%9Ux5$Lm#l$A(w4MY5!= z9IWWi)|+Rw6Sjx=xQ^>Q{sR0>KS}8G5VNdy;+#vkjbboPF*tj2O+G$v$>05KkYwrj zJhmQkCt5fJ8v5`O`HoD?*W(Zo{p>Fw_*}Hjz~xa2VLzMqu7$&tOSjy zg>(QPN0YG*KORnO99#>Cv)Ogbop&o%cRmyy5HFgKL1r|))h>1~B`oAjj5~?#%SFI- z^0bokg5JGykE0!ZiJUHe_V&A;vbUXokDyNQ4lxHS5IaJdjV$bE7sPVk7g4$->vpaF zrS}{tJDBEaW8rd-6n!HYHmsr%;ZQrNxp>BzuiNF6y{WzTzkjGh^?*zJ4thJYbyT$0 z)f?FTc0ff9^v?=@1-Zs9dhR>ZovrL$O+%}WSh`4!FkSb3(e(IgzsK6n3M9&)4+yjcD4^!IbJ<P0 zOo@38cJ?ZtX1-dSRS~~SD9AC@Nr~AN==$9Fr@6RL;m?cNyS1_(iPhoN`6G%bbhUe? zdvbYU_#TC%vK>Z|s|gnme^S#5-y)rqY@HrY)s>OQ0hIT_(}Ow8Oca=6|4Vu z_C4up()(xj4BHnCj~t?>sa>;_aY|8s8!!Jfmo;saukP>E{X$_balil0tzFr!cyQpv+Z{d9bd&?AQIzbpJ`FePG=DOT%Lsc=3OI|B{>#Ai*hlc|QRt zygPm6gC|qVNXfvEe(&rr>jD&RbYD7Y3|ULZZm~O(37db~2uZ?W@KUrYDbCDF8NBPu zxyL^{^j*#sEw;|=zm?;`+^S2F8aFMQRylya@>m-1G+?vQf=2) z(=q@5zC4A`y`OqAQb%i(Gha%m?HtZwL{FRpGHkEUDzl;egV~D z=Pv11$L!^0)BT<~xm^D)y|?c^Elhllgf*Y4E&Cqp{)pWCgKxV4DVrt#K49CdT5Dn^ zBS_0f$nvCf^$cvj1(8qoB`RTy+6}W1R4B8)=X}$Xzff{sd(q1u-&+oysSw5b+Q$V~ z(ul3YWgQGYES1&Li7$+o5BnY5xz?%u>}@XT4Y?)a!5dhbmgy`3@8fbPOgO|O<8`z& z*yvx5$Z=vbI6sKPNQ3*hXIqgmcwnZiz?!`I`9sHtpC{>Qvsk(`v9|T|d9W4Mb$zKq zQg}sAr|%`8y>IH`^G`*3HYsIT8H;G{ioW_q+25rEsS?y?bu0b0aIKiIc(@DoDiV-H zsvHEYFwfr-1nP-X?|fBFO{QD)$wTBP;-pLHFA&ol}q>H=|bUvD@4z*ADv&-sQw)ji?=65+=N zyrGN|?YYNWEddr!0L)&b97x0lAeJH5h3oCtQbu0hKiFm*egK9K=dKhQ!NVsi9#`Qb z!&qG%jr3UNdC5x5+r}SivAv5hn|Y zqt_J3gu6cOrNa%@SwQ%&=EJ&bf>>aT$#yh+rWf6+mUNE6b{%eiVT;Qrq|ETMGHmxu zLN8M!ard(XW`!OfmWACf;ninvgnArA_6r<&HIS`gy_Q=dbXo$-5;>I;z2mR8YV7*6 zhn}jYI2cz&W+j>07^i>CP-)w_78VT4dOeZ3`vPytj=)fs3*fojArox2Wo=*`$KEXa zhFc%(=k28UddM3He2=fC)sUF-QQZO$6K2%nBL&y4By}LWIA`)$!kT4_q12$;t!pCM z=T<`!nXUvP;1zYF9-532O34GsLPR~=X& z&~>mmRci+#(}QEeSzVOp(2j-4PQC@=pTpFrOE6MWZ(Sp;8oFxNfmyCRuj3GY$qTve z8}Qe)xNMFh)zb3<6ceV|aoLyQUw1)z5T*ZH>_N^1T6aEeOB*Y=F-DcVlJUU6|B=M} zj)49S;>o@5?)TU>9j!Z)W#yfH{Aw!N5!73Pn0FfOtK)VSmvg4X3a8NO2-k7f+pb)( zY+gZ3Tm0k(Y5eb2V{inHMb%}{J`|Ahsb$XTPQZR6Hx zCw+(c=jEb{o2zh}j(wGgUgCSFrjZoW$noJk`H-KTwCF*X)Trd_@BZy2q92LX)zfA9 zYfXZ0*C$*om~o0Eq>?K><@u`=AiI@A(m+v_v9BXZh?kRb5lF# zG@o-VXmyIdICC-frNm2y^}Z9VCb-qo(ml(Xie0&va5`w@?zy!1RdzL0dlIACXJrCR z!<1p0hjXljK;9&3(PI_|EtzAn-LhCE403iMDW+6eratFi!us7!vB4Z}nDsnEw0p=y zMe9ze(a#AfD@u1+qb7+{_0*-qE+-xz_mhu9C)aMFA`A$psKwJrPojWGh5?|miz4Qk znJS0K{#wrVI}b+pa^E*~ww3w>G2v#7E_;WF)f3lRO|m71{^m5YaEhP@9d=kDS9v-+ z8`~+{RkBUvSY)r~PM@j(M$SO`yXVY*#awYpsEeOb=fxk+{ixxy4HS8lDPi z9|aLX-nVZ4(lN59i~bOs6_P>k`GkcC$eP*l>5a}^>Y0z*NlCR4P6-t_aay!bx`6KG z8k<3##XvZZa{B!e@QZn~X*u^sD7Baanx;NUqMfMQ$#IES#~$XF;O`;@qe8(TWjP-iF-AIu3AQJ+vdA3)LL~^Vx!v3n$Qe3^RL#g!9da$2o1a0v>4xV)Na+;}y z?7*+7ev?y+xKeTG;o+qJx(p# zVeNTwsS{_hI{xSPh}Y=91n*P4qNhd!FvyXiG`nHbf5^i1kPhPLmSnL7u_2hxv4Xd- zl%-X~^tS&3@UoTW0y7*QQ{mzXWee^FPW-H+uBu-KDM?8VwW<>4-;1P?EiA`|Cx?W< z`7Y@tBUum@Uvp^J6o&ai9m^PmE;?#9X##KH`nUUyGEc@qob z&#=#bs58PZ&82!!5wGdU}$p9Q^!xzBF}DU zccfBFIP~r-7Yvu@c>2NRlQOruQjq5}ghz+{gztBBcZHQ)!^OUynfZ3^lZbAm4~RUy zO2quT;=}){i1`50wdL@ywypCe8B$-(+KiUJ^og()FNEt-7A+bPC#b)`U1aEfR&`Gq zHW_1NK3Ac4a{`>X(q?>aFowNgT^N%XQs@C7LgN3N7j{(Nl+^L_NaMphh0!^rzT)b1pP57KoMXmV}8*i-zX7<=75H4?Fy7VGa||*Y36i z!tE5KyxZ!+>0Kze58KbQg3yORYWL`}GBEj}MjmpfnXzYW*2CW_Gvnfh27jI>OxxMH z+9lIF1-#!q9&xT+^w&wNY<-Ktua-MD_1XV(ic`KI4E$v-*~9>g3q_HD=^)*Thw3PC z-zLGlg}^ISzvSkz=wipA3EgUI!C$VoPDD;3M{#7uOa*D^R-jre;PB&;tUGQb`G-j= z-^(m>N6{8_W(qmh4=C8Ki#eP^Ix6g`B6J%;R{!N{$iikPu~|($JxrU086AWlv00<# z*RhuNXgv7@6)M@hx0V4v2l;x z1mP!2J`CoiY30AVs@e^M#4XmfTT0cLJf8_L%@t?s6Yq26+N#R$o|oWkS(!KJZS(|R z61Mz!H-8!X5N#p9#e+E|$y#_^Yev2or*nHBgV~w#q*pjs`I2hwF&A?RjP=J&uk~25 zY3%j5$ntL&4P`dMLCRhR)QpVO>AyIz)I7b1pXTqFbUxo&j8B|r8T(1}r)nV39S5xT zvTepN%Z^XRHKJFbR$^adju*y*?+))VKZ756aAuX!#e{wHE$b~iS9RwQ@lM^tN(CY$ z2v=t>=0B$zZ||!4@TygU4n(wcR^{k&v-B9yli~kgYRWh9?54AO`>$Su2L=~dOZ>A2 zh#gon5*(mi3Tvpe!<7dmdn|#pv~U{YxkPj-#m}COo3hUthKp(CzkbtGKm#@Js|4Z6 zn@WE*-H|9{_=a+3l#}5ye60epi?p@<2vPpirV>v)y5WeRo(Nd}EXT}m-y+J^Ag?p# zq#72sDqc+`OxU(&ivfIj-=Sl{@z!cNdtB3nZsv5y1$cTRf}!NhmO|11G#($Y6+xOq zNMj7UaItmDU6}}Q&2a7vm_AoccB7zG)A2TN4kGUeoZ`26yG$Qd;JK#f5 zFtsr4D+;lrdZdgJz=j3vq1RA6rF%eiC3do0XPa-hl&1&eV|uisZU<>kYa*U9 zIMVGPj|_c)q3sOwu+iP%+dXl*MejJ>dK7L$j6zMJMoz}Z&JC4@MyF26irZ+GSTOwD~5ksPT|xqFQ0{*)>Ma}?^C>~O2|+|o!nh|i}|8+XQM-8(ed ze;cJ!Pj;HtTMz1lETyI=u6<|C>y2ed464&fgb;=vcQy9Dfn5U(5ATsCRODR=${vaD@1B-cyx1^a{<)5 z@jO;dPqQ$ZntWjh4aNRh6qT6t;~iDU6rRNCwchZ|)${%{4{rOWdMZm&nlx@iUTM+P zWSKM9FPvkc`E`trhhVq&%0j_1bYMY;dangWV({Sya^Bt~uJc;^-ODf1hCBz)|()?T92h zc!P3Q$Bi0|Q85*HJm*=pU{%*x+VCoc8-J}KNK_f=AJe4_;)*sHehm~AqxFi>zf||5wpdhEZ@E;h+BcaZHQ?wX! zy(PJJwPsQ4wu?v7nM1)PcjF129F?zidlDGui6UoGJDhGMteLYPMT-k`C`Hz9By~s_ zZ#c}%T(;H!@K5gh9iK~g@fbE%1G;|C5-K!~HCGMT^OBjkOahA&HPG}%fi-(=Tz*s8 zkICe%I^;w%6&O;rOJ|Zcu>Ktpp;A|!$0B3vLQKjWVC#b3c3ILq;J*D{pFwKoP$cc( z#PVP{s2{Emo*ff*lA?XzaxE7q7$P0_tsX3;g@#c*u?wqZY?A719apZ-ghg(t{uXVy zRygLX4Eu@HGprWUxmuK=M}pq_t+w#Yd9J`>up&WKjTmNblPXTm&IjMOonyo;mEDsS z>qnDQ*g?!b2MrCut?Cw@haK)Oe~=le7xVE+%X#HIq+S!mI5dpp>`{?~o){Ux{HU~0 zF#`k}?_pj8FMZMj%k5tMsQWoZw^5C$)75^Rz+xC#p?iIGX7*W7G|=x?<^FNb za*mZVNM)l%kXAwq+X0{| zI)9&?t2oc+qOE_fOsO0ZDl#!gAeFTs*58B6=;Z_K)b4#%F+(=%ix$4L%n3v{OxL@K zZ1%@@ha6KV@3h{%e(-LHr&!a;8D~Zhs(KxOEFEELHeScc>u;*!wOC?rD0LxaJ}2Vt z957|?KE4?_)}mxF`mxmJZNBkaHj0v1ZtsRt|HSG{;`{xu z?ETd2y)UKw&?~izbe!->jst zmwYOmx2eo2_(&1kY6OJs6FpopPO5a73C&^+8}+&u>d#8ev}35U zLaaB>t4NV;aB%d_Ooq$wM?B7W^TuSq)pNvOfIH?<3g*ydwfgB3wb?8)n?*>&G6++V zreVat-&M}YTL@|A%vd5L#_Gf`4xVX8r^+3ex+QmHWlC)L+LQ!MEso^nq8lK*46ndF zimM(D@&pAzE~^2p@5qHk=#fzsBsztXdzFGeZ@9IzJYtJ_%Z|P-GT%QY_cxXUMr(Z$ zmgZT`F4I;77XwTUBe82<1)0qa`%veA(#Jiu?WSBiyd@sIkM=gXd-nR&wRp2Mc#U6q z6Rj4k`__o=orw{#fkF>U`-ah8q2FGvdPE03yYy-{%9D82BNC}GbEB99s{+Fidik;# z@kgE#gkzZdEL{o%z@l9qp?N|aBddH0MYguKSav`d5|k{ALPMZ^(*%iHn8-c64mW~H_u%JQbCqg+D5lX!hx=b zuC$S)nk|>&_7J(ed>#1+LxSJgO-R?4m}(ywuasU!t@Mu5PI9v~+|UM|Q-;RF;4-TS zOZ0d2WkfQr1@+!>!S3}EHpazN zbU7KL4%Fq{`1VYcywk)AY{+6IqjW?yB|lGcM> zNzBQ>0)hkqH4d$>X`S&kAtk$5;;ULZAtxB`NYon+%j)e-(E_`xXk+ZNoK0e9^w?TA ziy-Q3dFYkt5c6pdjoVTQRPu8`!*80)sM7>XZAX5&Iu>wd@uA>45WrkL1)G^@lG4dd zCvN-Eh`9;BB%wZF4Oq9^*7<`|`BXvsLx*}dd~=?5LnWkClMP}PwbTgz_(St6H=>K; z6*U!^UgZ$8CwfyeUk9|az%2KBt>Lw3u>#cTRfY((mV_h)GduNM=|5cI&U5kgZt1=1 z+DGq6zk-*6fO=y-aEGGv)fec#(0AI6Uo7t=*X7`_TBNn-m6IN+gr^WGGL?Z)(Sm95TMJw7n0@D zSV`@RonFdDEzbXoCFA%nmaO@GV^f91`5eki^o+%Kw7%N)P}=nFiOx)4SkCm0v2wTz2mmBH$)I+Ll{cuT@&$;1;0EZq>Huz6Z+(<@!KxKme3Qy* zH_q=`1zg}amLmKP6I^jN{;8LvDzflJv5a_J2{|$zeny4CX|r;(`c_&saGqSs2Y8pk~d$?A~i01)!h&} zT;9pZWKTyz?k!A+WXmUIWx$aE)SS~rf*}U;E(LSUy&N#PuUd4StlC^pbd~dAnsJg( zD>;W-Z|^mbTlGrg_P4M9_p~MZZvquO$P&@WS?q(RfZTlXr6NdGc_t`nC|Sfr@m+w6 z!m?t=zj<&qI>ElUunQ_$48(~F!dSr^**`Mg{R`;7^LNVrV>`PE7HRxV{Xn}~GSkvT zId^Dn0j@H#Za4!0c;-V94Eol@nMqxB|Gc01!t_U5ghZ#}=dtDv?C&XjcDW5BpBp{~ z)%Q0+K#P}JzRXOXxpwQttxLrV`3F}uyG@+aFc{I$Mfsa%Kds8E^;szWALv2$;NdX^ z`GT%@ko`=S-P_X#Px5Btm24WgH@ahjWenQRYTVd8Y6CW)o^( z=fBFYXSx<9PVtL4@` z@~)@2 z;u7-iQTZA=M93h>S9??lUlY>BGT&s+&^dG1z%H2%3pX<|;q<@KbbsB9oI0@c)*vv= zQE%G!kSil-e$kv(j@9~eQ6p&z=A0A1ma3lGCkD9y`tR=Ypo#s8f!Bn#k8u^05M zd;WB3<_WMw7R#x+zusqQ|U#f{eQ zSS`(k4}QI_&CBB3EQe;EB=)x9Iz(6rT zm%4iEN153HY^6~zKgknr61X3jjxg;*x(-^n6d5d;m&5{- z+e2C#Jrp@+QivZ{wNE(&piX#%RKJ?FdIm@obGEAKsKjF2wLIGTcJn39iaP>D{5Agq zEFB-q>QQ&z51GP>k1=gmWpl#GhB?DudLiC+Esbbxnen81KH`hoP!0VA$@r>i!kdeM zt|x(L3M?59JlZpOA~>!7MAhKxA0x#hP5e9`yxQg^P1WsdJx+B?juxu8YP`&T%(bxi zG4s3+DxzoTa1T-5$Xy`}+pak_(_GURyzw(054J1rUJm^!@(X{qxB5pwm-a-<@xb(Y z7n4)A>SAKAveuG+Av-4fTM&kYU-)0Zb!T0%k$ik;b9}Mr8XlnBNh%ys5InH4cD!ob zGt*ZKYk=@c(_C5{pU|!8(1>bdTczfry_1;sFf=qE(7<1*5$;lq;w`r``n4LTJyA@w zuMr2sQXFypL%EDduhM)!S|Q*9AsF$%9Jhuy2L*WnmGpk!dMa0@`{%D1OU}Q)-lD({ zSlaDgKXdjxiD9C+{}IvO!^NqT(1?ikRm4SO-WBsOh$cHp6`T{(O2YCezP z9mRRKXqgu;mTHWV0=dAVk1{Q9TNDyM)h$L8 z$1gUQv$jCL>(yv)wrco&6IRJ$d+T0tZj@y_mJcL0AmX9>C?cb48|B~AtoT8Em|i_G zZ2h1b?!~zEs(6vR<{E#DOQ2tRRgL?p|sWx&PokOsz@v)PtTlt2b*${A+%p zm9&*`Tl$a5?zn{^i|&o0lPA|nf;i*_D|OmHIs3FTtnhxSU=E*XG^3UV@|^EAmydju zAqD!lj+uAG%9_>qr=Cc z_zS@FyesXO>1N9OH)_WJ`ckjrMI_KRjo=SbCl4;85$Hd5s(fDC$#-yTiE#!cR^Eg- zZ#Oe%4|_j6_ia_ z^c%Y_U z*pfw9&(TH<_H$LEcBUr`D&+z8=~qd0EiVbJt;e)ojJh@N zYo_(bu|`x$oEoWHBrP(DAC{u|I(6AiEY(7+_FeL7Hp(q1g-eh47EYtqW8oXERUn1F z&{B!#OgV{po|r!!2goktd$J2U6`9qqHIl<*T2AKLPbyj81-BvBDIh5fiA;-`H? zG3Og4|X?5JibggZ5-?gM; zJHiAC95f2P=GeZ=^sBuneh4iwb8!qTEjnyx^^vTWN;*Ov}4tnSag2 z|AjB~KlmJx-5rejIYZZhtyyf|#MinsX9E+tXHVbhRejj{&Y72TwV zWd#QZyX&cXv#xj1jqo}7*d^7Ydot>SnznsWIv=_ZrK*AvDuRV2qF^TH3d9TiTj=nT@cdhi&kRA%M>>n!b8msHbu0WW)o-itnR$uQ6 zKU;V;4TmoWSfSr-0G2vei8G~_@kEB*F>x6Q(AxJU@swb4H)u35f_e8EohF)cit(O+ z6r>z1mQ!Tfp4l-bnsGY?>KapwZ#&)-yJJObW}|X#lN_oaaU|}srZNk}vsP5kYNQa# zD|3872|v|uiNDG0j5YlQ6gd{BY-dxQa^}szn0kQvs(J$jDv@!>IsY_vK+z%?7hjq2 zlE5miOf@O;ra1FtC$4N6}{3)~}SR^UAq(?WJKK{5l2ivk>)lTk}x1 zWFU_cpGd>Q2IEq4_AS06Rk(G3MWI-66T;ydYh8;~G2E%)JI5RCSJGXsc799F<2ZYB(M=V#s&|Yn#nSO_!0M%nU z@iZuvDx~@JZC**@xVNjdF_*`Q7z35<`NZkYpu(@zPvVh(-E3muTBlD-98@pbWDTU2 zbG&+j0~m-+==bk+NNxYL#r!sx%Q$TkY_62JXE3j{7uy;frAPs1W>;uSXH4FlG4M!r z$5Gta^h|g^u(n_;Ch#$fEri6{A57(VGQlKGsnZSot#7ph5rzZfRTk_^+j{@TA(G*OCgTf>7o z^)h>Bd2Xt%u8*t-|A)BRGx*8zmU|%WKiNgRO4%059a}=bps4gNJS8;N8jLxfic{sk z0A`xvhi;jL{Q7_N7hN%b0br&j-Pwphk~@~<_nq2t>kHZDxh~nHPriR1jfc0^?TLUC z8>W`07jx{^EAvl>j!FVg6#{Rde*tBk)2E@!bU1$jHo14@hX$2F|3f&u3O=s=^Q#P> zkiK>E2-sO*etm3{&50+wEMy!kI z)`|;cmfC8D06kxw^Jx_=FJ|_&;CisR*M#80!hR-xlwI;Iz9wFKh~KBh2vpS*mE z0<{E?_hrjxe=u1(T|6LxtIWCI9b#^-E(2*+I;WZS6An@SK{8{2JF0N~mr3M~TIdqE zlvr?E$e~AbAORG%#~BaE^N{%IIJqG`JZ3H<zl(@98yK8S<+d{l^N8=@*M@@3F%)y2(xQ4Cipek7;^qixO~I_7EVCG9ItKaL&^B%x~sf+9wmq|8nF} zRlnPr7LqRqW6((a*wOJWQI0iGstMys^n-JpMxDb%ul-81k$*U8&7>m)NfgX1+8 zFAKLfc#%Id-kUeAEK%eb$tnEFKJ-J2wX^Ynd0z{GuL~bvUFycU5y@l(x8Xb=uep>&AYx$y(!NplXLND8r3#L9l&pgRu>tN)m5CB-82`C$9XbHVIL-&XlIZw9>w ztX@oH5$rx!;3bUZaeiy;m2!ozZ;r4S8!S*qEO0>RGZ$95UHQgNpoL0`XuX9x`uk@_ zVaShUauSEl-feQ3jvKC{%XVakd-s{9nj%KwRfx4AU@<>Fv+djFA6~s%<`HW)CY;CPYNDiZB z!-LcDbWgvu#c{tVMZ2-8#By~|xyQx6^gCzKkl@+~X?hHY7w&dn`Sdj#W;!<2gaF?P*iY7S4tw3=J?oNT?(v#Pf!IseR= z$z=8(7JKa#Ho4bY_jO$>R`H!2dP#X!Sp2-l{>JYEM5$&r-J1BQ^FU2B`6|hVb$+?ZC!&9M+DWsMhi7kV_n#kMWHVo9s0Go3FWp~rVIVY&Tvgpa~< zAf1Dy{q2>o34o^SH;K@1_>so+-u10VgKD4b(&Y~Kg0`uoV={ANdC`A)qn1R4#kK%^{&#hq#_~ zXS4CUO;D)UuNZ4MFI3{FoA8EWFqD}J(BlmmiJCA@WG)sjbvZPgpWr7FTqF9eRr; z$+a))2cOwToe$&~f)}ELTq(lPS}T2<>~}pd3{qhOU_^!nNA)Y&At#1V4*&#r1x+h6 zzF6i)I%R8N9ga5)I5rq;5YM5VsM}I88TwQrmkCa6Ir1P&>^ilGr9$;FXVNB=xP3HX8?1fI-ZPVaHt?Av?HJE zQtS~IyX-6uq{J%a?&YQs>*M6c$L}o93q%7x_3k^~_cQ18T+WN>{)$tvE@+i_VxA%) ze?7e*jFqe*hnHj=quPA1BWf=vHqEG^84J z(GD>9aEn`9aSr*s%;y40KQG;c1f4iI66~rv*8s$8#~XzxhWUCKENh}Ox~5A;lzr8w zJF{GKA@%%KNwjj+EG!C7%BoID3-iy$3_x~^?N^-02i2H|9*IRQhe&dtr*+tGof^2u z3%)Bn;f2brt8dO%41T=>{Q*QhpmLTssuv}t1Ne=MTy0cEVe@pTZs8s8AHbA}?Nt{| zw?*8&zW?60dNX(=C=u&FUiB#lp+m7$)RwcpI^Ic}P|ioP2-{1yp+?kKL)h`%V?q8qhB-A}ocJW11~*ut8sFQ)h_# zZHls-L@w8Y%S$18?h^w1VXggixPlr>EH)(%=^mZx$<9vw!$ zvG$!(JYV%iwp{X{cGpNjU;m+g3i{>h)n>}!+VD=L8wxtRD=SC;3q;(G+QYegLfEL% z``JDx{eRDBs9XpE{%VexWTHLwP`NZAU8I@aqnRJ?!#V&?B5$;P9XxC^8{vy1Q%^^@ zqg75yUgBI_`@gB27D~vj?b^!|&s>iuL6*+OO!d^W7=KG%yw%)XjFf#a_$lcsqG~w4 z$XT}%XO!0Xb!lTNO_^UUOn7|ByGB9WNQJ5083(*76(81*m_0~#ld$mBuo%u@+^TQJ zpW-Tk5^OUFCYAO{_&{gn0WaRY5<}~7XtFCLvFD;s{!tc z&>dH&d7t` zWo2G3H+|l4hbSkZAn3EPfkP9$B=V96R2Jdj=L}US$M$l~wu!XU3-bCzL`eWiDj7AG zf9F?7ZkpiU`eAKk0QiS%B&$yU0D2v!3N=SEEv6~i2VE!`Mk`O)b%HHCL>B87s1$J) z4kzeYr5&p3p-G368g}xcumZ~7ZbEY(vlZ&0O#hqVxbcc|k~oznMLIDn9)pCf3v95z zm~cQTNM^!9E2TBjYG3&GQa!qB?E3>KpS@@Ohr_ZTAg@xAI<`+##J?z)UFbb^ zl9aO;U3;UMPt02oc{eO`$uAQ5KM_>gJhGH?{!%q>zmy^8)WV@-7_{k}5+Yi>J_J=W zO0FgAzsshjA?)c?06=87;3h9WVVrJLKh&E4E=~zSeq~ zlLmrOFH(D7LwkP7u1J5k0KG1W9|Toj!0ofGdU*2-Z*xTA-l}g`V;v9hV7UGVFm3Ob zq>?C58Cbnq{519qHX*#TQOaYYIiM;^;bpJ0Qjc>LS01YZuX_^$%h#l!2z=Vczl;O_ z2tN+06!M+-aYt`0vEg2;9Clm$*Lwz3Lmy ztFG@7DT03h=NZ(6>di98i!_xnXMX?$h7wzqe*m+575`=+CO6$e&=V&gXx%2=&qH3u zDO0P*0m5gr>;#R2kRPQjps;))4g*>`=X^kEeHp&!RE3pwe*drd2ljc?cb{yYzZIKH zG1=li`*1xebz`?#>TK3|hCMMn30-)6_02WtQiO1L3a7WrRBUSQXA7ra*2g2VAR|Mm z?b!pzRYhizFO{J%3QX~z#6u2cw%5BCxzD@aj-D(B&WBymdfCp+NL`cptk(Pi@Zp4Z zyL~!rJV@^Z%gNj%77}Rr+D(ci^%**Ia5+D>=%`u0KsJL2w5AMT#?TN3qpOog!SPkg z&bA#T47H+h5_-osjv1`veq$dHi&~CY6xm8)7BsxF$XEuL4~kp7A(kgnnY4#|b!MGq zotmwr61!n7*NUy6K%F|{o=Bzm(=fnX+lrTv$Ma3C60R^uY`3o@TSJG1W1OS8|KcmR zx&l~EdQ~0t8ES1v*yD%VWLI>>VW|I#l6|-Ep145ui%JQq@ zw<6k&ef#tbJi}tTNl8>^c5Ql^0a5g@t{)e#8j6?yk$S@N0iN?Z=Q*{^Pl=+Td$39D z9{}qwlU?|wudya_pRe?q=*sklG4P(>WZ&lRODg|;$qAGl6Z%7jZ~qZ)-$+*LRLS3l z4FA;-_0+JeY%d^R;Q2cs_7Cf=UH%*ARew7d+_}i4xXth?uIFE+*5itA5i?UQTIVs` zfjJN0_Zw*2X>c*jpp{X=&U&gQz<2AzK%_;2w!$xs)qo3OMF+YCDGpo^o`pce)n+>Q!ki#G?omj;Q$uwM z?Iw!U_G11KXsYS}iplp2VQ}>N&?$%K_d&y3Vb(BAr=P)HD>(+1;DPtR^Hv8AIJpjJ z;FZ?lj%PvQQiZBgece#9Ufh>pOJX-neR5VgU5D?bY-+Rim}jL7PYIy$-QH}x|AyLs zN_?g|r(TMG7$y?P^25p?<%t13ZCR(9F#bFzzZNBMvjG$i3|RA~xulw7IhC-KTk?6{ zOXS}IVkjx3Nej7(vM8YJppY7DM%&aVyvBo<-Ce43>S-6)Oj{GUTA7>GS`Q`; z1sX0Xh1;3YlH(w-beVjc(dtzfPVef7{{X^npV~$3@+A+H#EBnCQqFx%L~Pth4eD@j z);DKln?z}NdmNw$U-RX{-*_K+?x!nT8W&@Jq30a_-SGx+Nv-5v*pIv1=z4JjFT?%3 zb*n5sIOkc{5E7G^gwq+;5E-M?0PNWuX|fPVDg54`9QA6EM6BLU^Sw|{MPgBBVnJzF zf5p2ek5NddJq;i_D>uv@IbVK`XBj$SRUs7}!+=eLOW_g8|=A zfaTVO;DMjo{DTV3o5D zXTf9o9E$`;_kqG166pz8b%1tnKCT4^ZmQX1aP~Q~cZt@{Rc|1-%n`}C3kAV6WTUL) zoQ2~W3kP8YF;mNvmzqbpsW>xeIjPpw+-#sVBU592b6QX36M~|b(G}Pj@f&EYHX;Y& z4k_>mm0h43D+YL=T0wZ!ulvKSmmUQJD z85FDT;;p1KRk+a2OFey9HDZapyfquSwoz>|PAK&Jw%Sg*JOt*rMTlH9`3S9DUV{tU z-M9T_bXIGwbi#W!cY!+T4-_ZfckuD$W*2|E;x0d~@WTunhsDO_`ian?l{IcpFCNVu z=*?>-xE@yn*+;<@RrRio#N#cMzN$;@P^4A7X9fn{;DM4Q+QRw9UpQKweV>$SJnf$3WMGf z{OBT%ThaYj{{ZNJ)EuH@S6&5lMK2dL3-LfVRAtqkrJj;6wuUZ80mZWJ?(5B_d~?$_ z_cdILMl&p4q(+E)*Q)KaGbl(&gO)5s&k^iKq*r|;D)%V%dQLKNbat~RU>kHn zbJ$K+JDXlax59!#d64P{7$&NO_$czs2>~LRA4laFC{!AbbII3A_EquZ^F5=`(TI*1 zTv2r5-+&vf7d$t+=4LM&JsMCZEuk+yG(gB5EuZYVwfXk4bo1RW3V*!XEuq6c^0tUe z@}A97orZhOvY&;|?BU55loQSJU<1kr_uqtrs8Tl&3BE$}8ue@Z-caG0FwN$z!(3>~ z0(org)2FMlAj!ss)4^1AAfkDi>;A)lG^+gau9yFvQTqbB_71^emduN8t1M_y_L`W} zu!^97SRC$>EDGE9-|!qh|ERiX{!}<+khwY)Z;qI?|4CGN|7=9gFoht_Lx5t@_yO}$ zD@5xs?9S5d8>(tYB}W%y+Qjw;E!EO=w&9sIU)^+srVOPSdk<4v5kv_Ns2z#gKEq3k z!Yw0q;l1Rl0FcHN?CIj|^wjW&UV9Ju)HW?fX^%MhC~y`daiu|WF9ffU;klQBeICPm z8zc-tG11l?iL{!PSPhxMZ-@$ybc$MYpz!i(x_z^6D0*+Qh*)k5vAcGBWU`!*CBK4@`6&c^i=Y4g6Y%oR@B9JP?Och>r9ryQb^4xbstPlfhE zNXr!nwi}=KIwcJu6k=X!dN9@!L+Va{60CuY?st!?MC?7jg?`!Rm&SOKQYx%D6?tX} zZ!zz8UZbzw#dZ5R$~*}C)G!N*s}xXJ-%Hq5_Yh^2Z+w0Ict1hk0Ac((yygS4f-Q2f z>rN`AtzkE+O>;v9&`+yy=gPO)`t6Idw45PtEdw)Rd=+a zeT>KWS%DkgpY&(|3s;;1T5Pgcq*1EQ$sj#NbJMzPV$oNeuO_OnS~?VO>)V$KXPqtu zv|>pyWdx+s-MkD}qhwolsZtPM&R8mloWT)#f4$%~RNmb(Sq$D%fcO9AQv#zyWR+qm z8H5uH<-&E+HM94~HL0#w2X9j>*sbTpy(Suc;aJGph1bZ>^7X*qxqH~mZ1goRuz^yE zg&f$rtmmPNpNvWAx&}>}ZmjrA@QgN1q|2%?I6p$K^b=>H#aPk`ZapmY{1gBN(Ut*! zCKn|;bHF5pg9iJvLGJe>?(-Lmq?-oI5z`9tYWf7HAk82mezn7G5Dzq05)i~s6WgsF z_oUY!?M)f(YfDVo0dS1N{;&1PUEV}zqznAkVf(P+(ES_Ouvj6BmhzXok|73j?;O2k z?^#Re+@m(5v}lIX>3R7JhDS>@-T-1>E$mfnRJn0Z{Rgnn^7&Rq>s(hz{2Hurh}yi< z`tWxq@Dwk8zMd2Nxa(wy3)zz~X1HHhz&0KotFThQ=0{86dGw6QIV%R7^naZ;|AhSw z+tyVbrna>Dm#kb+(IPJ3<%HD>5O1x} zcG_j6)>7_P8lzZgx?QxsId8L-vhXU^&RQy)#z!l{SVYgegO~-N?DptjxqvwM3AOY& z+Pa()htSOTVf{9I=MQ4|2%*BZ=wiJE%Fx^2*Ped2r!yLQd@$U^p1RH`vy6C)Mv9Q` z8r@YEW_R;&d5tKIsPk7gb=iteMzCiG3V=&qRn9(_insNaf<>ZHl$GWQ1l>>ap5c&e z3eS)9pUW}i0T1su?=QdBY6X}?9*}Y3nw3*s!8@COv8Fv?j&r}$xDIRj7F+Et15D%K zEB_&cV|=2tvPUQTixE`0ig4b=*7wWTApGDrING}$>7UObemlc-0ZaqcHHa$h%%9S> zlN+;&hu2*m8-AN(qb4_IdHDMzXO7hx==dp?>ovhe>8D)ioaH_FL@~JtLDD9Vya;8V zIpOkBll!8|iRUZj4rFNN?nbYL{GRkj3+GqAwLj6g4&bY5Ok?Z3esm5$eO%6}@wwZ{ z^Tl#ZI|?-=qwjlJsd|ujHGiW1`+ae{*QVcq#(%ZYL0KSAR8bbo|Fjs5IVP&O*&k~` z--zAB*&HYMazTh^rp*~+hpk_XdxEtaGuT^WZ}q{{xP071dP{hE-VlZ6lKZ;PNo$a2 z`q*8Kmi`6bmyx`AR{j%~uN9A_bjO3w-@;e|-o;!o3)i z0yp)(VbA`n&ic=HO*}vc_^QY}dwF$vOzhz6=~S(uP)v_QkAep~W>!j|ou~iFuDgdb~SKCDgwE*uDpH z;Aq-h>w(mL1;`Fr$azV16&x?HmCGXi&PIxpjP-v%>^sbeMrjyjk!kPuN`UL$QZg86 zicq5d(uw@Y2s|cm?0pP+*nhh^_6pn~-}H#8?xVdNNiuex-k|VRX?<)VOVB~GaggM; zC`o@Q7D(K9wKCs@AIT|UCRH8jtG5A$S| zh{e%)uutEmeIer7S$UUBGOr_}^laR2JLj6XeV(`wB5$~&Pp+WgYf-7fT9 zJaPZn4B7dc{XsC7+h-P#0_*CWSpO`c5hL$DC-=E)+3rcl*FS*a1kMLU{ll2*KV9TV z<82uS`xK344&aFuj3~KZg^1_f(8lnENh(sgpuXbX=vzs0^7jWc7xpVh@1TIa3;9 zlq2lo`3jVr5cogrXE=d}M^D^&!)KcOH!=P{ya98jTJXurpW_nWEQ{~558)vQ%sEpT zMa=972#5!2BxfOFp@@bHy-3Tk#gs@3c*ewYu7Z8|Zp?rb=^Zi20N`G>h|YN9^iHeq z-=Yu~1C;{UoUW$H=xafr141F?qpRYcnp3w1usMp62gw_4zOzxPG?5$5cxc)%zht~*P~ zyk}@HqR&sWRau5F<2K{bG|;n#JJ@S!Z#!&a1F> zk|`N0&S_EPau)T7xQ!JG+eQuPVZAY+Y2+Eyh3Mpc zDM@6g8kfEGCVvFD4vuLlOI(MyEA8wW%&30lOCrmgDcpy|MN#D86 zz#sw-fFPVr$j=E5keXTYyB-$?gF*>04&^g~@kJ7I6{^ozXC)U`q313wWTZGl?qLip z-$O*j+&F4PUP!!DST48$}5(B}UUqq}f7?Gx7E@>~*M5bU6jA4|#oL}2Rp;nV32EO)L zj43Q2({@R3k6-2y;x9VHcOb0Lo>kAU;DXb&6}VBlHh^m0Ax)to4(-lQk7S{{t_bj` z!o)3hR2|HWSkmEyyuome4$_q#8F2ap#;V{@kF4Nm8m__SR=Yh?aN?k$=a?bVtj}s0 zOjKZ+SJr3ym}RAvp=~Z7_R(`v(I?y*qXYNj#tRf9 zrI4A}NZ-l6iL=EbEgCV#Ox)%FQMwhsl^n zL1Rkbm)NJnYLb;=$++E)UGexH@*~pkwT;LIOnq&TW>#Q{<4i+!JHGq4m}VdcWmas9xIqurMXY@xxa<%Z&R1M(U0h6@Hg1+t zXV$W@6uwbcHp0V;)ZH?>K8i;~&E?P6D&C*Ut)its^Sd!m7aU8urDaNgA~T*O1rB{s zS&Y8iL^r?To8oG?q@J4CM7KD-f=^NZbu!~Q5W5J*b&6x~(lQqLqMt_xI-K!z-^rh{ z8^+mTDUDByZ1x2ACe%K&F5ie{qnO+erea0H|mTg_OOrafGPr zDZsLu6k$dhSZ?+u9QAqdL3su<1BZr|uy8gSav%M@lm+j&y?ttCa_X_l5>yN9w^-k~ zo9gN%lTxdI)`ih3@@<>hT+{`NBX}+`;?zR7kfpO|G3GV$6?^zMsJMA61KM_v0@%-n zxI>t>#OY9non05q_4?kuW^n}A7$9b^B)b|$hLh-`5BtDX1geyXT{xMO)aDw*8u$(V zD&ce_QC9q~W0Sv*OCE4D1||gR)fZbt_THnE;t;0tHFFizXu8K2ah zs*@jg1LCW%-_2ei*Jd>R)))MB^se8{iO19{9{m;eNBFCe=Kl1})N$K7JF3MHtv>)} zHYU{Pa^KhRP}v~z)hjR!Ud9OBnKpTSOyfG{*nbGo=cp1moc44$F$fg*<%brsftwv# z_1&YLMz!d0wv!`Lf>CG&3^$yPYU!q=c%l-jzNlS=yFp;sJdTML`j{_wT!ELjeJ3(| zvsV#)*Mt0X1g7EcF-pC)pKfk=hNPl3AaEJH=cYTIwXf8rUtJ(W47&O}`Q!7GT7B*V zoYavmp{#DbgI*xkT*Otq;N1*Jq~dv92v_JjtbxgC$O~RJD!jkktpcULe7p!_#xoUT zMkVYY;^_a$JKESn&1Pkj z(gAy@@6E)OfIWh~BC2JUG=j(X5hs~y)A-N)p;qOeLd;wdpypVXE1Rix?(Sb4jByc5 zR>oXSX=Go--Sxb3Tp+^fh(;DWFs?9A$PuMx1j5~A+L};u8kMn2808E5N5DkaJT^=x z>`}+cqZg5t5GvN=X|mCULW7FS47#dgXF4x*xZN|U+&o1o4enpB5=mZ2C5{W`W_75Y zZI{j0mwK5#us9D!V|R^J_I^0LD*Ki<;51{~P?mq^Tv$>QriYE2HJ&@C_722fGDG_L zT4b>6pZDzi^A0$!Y7G2eRW)}w8=_8}xauz&5Z(T-eU~| zWhPzisVG_4Y5$C7U~T%_GI`yDf=ppxq-q@`tK(LZZo%f{VCLshh3}7)`>8!}I|IM? z&AC$}6@-v{NP=qE7k6)FVZeV;HO{In%+2n|qW$QbhQ06a+1uUTjVnQ*PfP9p`u)kP z3%eZ;?Y1q&hrM(DzGdaOwoHiX=iuU@|Cg7-nz01efk~!{vn06NJ20PZeKXx?W1+U8 z(LC$weUxUfy_sHCl3b_vZSKp7T2H2EN|HUi$G%8}v*B}(Ds#mQX?J2lIXz*YiyP7L z_VQkNVAbh-qvgCqvVi3b3D){H3MO62+4H|mbJ7vE6A?+u+><6Vcb>3~^0jRF)YdQc z{4HisTYA8G!Zhcl$mN1o3`%3w@>)eM(^f3~4}d}dgR=eY)1P@1h!Nr3s=YWm6inph z`e&yLZ)SFVRQtZNLLhqT@Y4Xhh5?jYe)h0N3H_@ePFk6lybr^0t9cLmQPwR)>>QQz z*bbkP%^%0_9)u5LGn7%m4cCQVsp%Jp{|**}wO-IL6u6`Pl?J0n{dfI|KtoV)aX;9w zR5GTt9>rCf&p;8}{&5)ei2T+1gv#yZ@?oXx-|+Bkw~4zCbz%2pFn?=;Rvcr46LnNy z8^>v!&5t{EpqDXKh@kuGgbY>%T`(H!_^Gjs@I@RNy%|ZP9p##1kE-WBjB5ot0rJY0UHF3A~Eyn}gURKnJCniLa<{01moEj0O=Luqh+UhmeJ46jsO{n7Xw* z#zzJAWO+Hi)f4`YStmd>N=-d$Zn7AfMdT zUC~GG$^!>UB`d7Ga_eRgIg!_gqj^Q%{hwm`14ulUYi#$Hat-<20seuDoIvo>_1VxW z*z=@dN7%??M8y=^ssl-|G}=vB^{lG&PPXV}YYgT?Crdehcyh!9ExEmiu;1IN(@)u) zXjMpzK&~&{kvC2M{1A&6%}-=@C1A*ZS2k8$`-Td9>R*8`|Ejj=ce;0)Li(wuDGY3d z3-0~6#wUxe30kO#@5m)Yw)-NEjg4xoCw9*5)8AhMcSbmLDHf%^Gi@NWdfUIJ34JT3a0=n;dPq z$`!BfRrZu8YLgOsfcH|Nd^XmIq4?-$R%Y=6`1DlX-F)PWB^2|VLjLq{^Pmj(|7yO9 z)r|o z4HNUHl@$4EuHT|+F3gZN`=IWLJJo;URbttQre_Y$A%HjQ>u~@z;flWR4pIP#`byPNtm=i%zim3OXNhG zrWvaz?iuBgHZ!u>MussJy-FCWyk=dQFmKpUc=8#Qx2NxrhX3#|`d~6|N7?=XaiF66 z*|*vxKm0VeNn?QiKpsSt)vqS;2aqxLYD6xKnoM^&-0p(OHNL3k7xnjJ#0;X) zO^yv`v*0A!Li$3QLJ^3j_!r6<9E~Xxf^4#Lp2MOh$pk&8X)gYYydf#054(wa&^od~ z2#b`_yEQDi^87j&{}X04Zy2sZ?80ihBYgRAe64qL#y3K_0cj|^4TH(6e5bsuWEpe{ zseTRO5z$WzP2`jgGPh(eGl3 zbE?k?Yw)_JGaQb-NAGCp@ejz={PJZQBpZ4lmT*^3C1D}{eNKjMcJB`I&(~}tI{uVi zc4p)DC`H-I3+xS7ntD4-ec(PnL&lH# zg~IlzMILKQy;Q;Vi;wI~0k~hW@mkiiqw4_$M=t~x9v$VMfysXqqb5tec5N}QF`CG% zrG?wN4X+aeBN2IB(4g@t1lsV)K7H0D&EclxzMX1RG%ji$g*?ZFh}O8>`~c4NMeeM_ z9cHK=u5cq2!ZhnDTjb_@WaH}%5|)Iex}MhWemD+?u z-8bvilM<2){G7=Dgkm=kf8Ue`%Yz5rZj0XH@uM^%Lnk#F#0|Y;%A^ZYdqS?g(9Onu zg@^3tcTES^1}3ATn=N{Y7F#*FR?+yS1NiqS(M6wbPo2wlA4=Ycl?g9~*l^}pyb5$_ zczvLE{X!72_d|K?RDrM65Hq_KCpN6)1?8u6 zG;yze_D^S|PF|dmQFf`OunI;LAVl1y?dwe^D&n&fi-=xK&J>7O)@?K|yYd4A#rl#U z$5hmBl)S$`BrLyAK8Z1bfDTE0f_9epHxI+8!6{dFTrbKw)Dx!F4w4+_=;ENcm)SK} zf!)77@N2eU`Gf7qW5kuW7U<|HFc^`TqjU!N_@%$al%r`JR!!iq@SC3rll0N2?E{z>Dz!k1-T2PZG zFO_RlKT^%YaQ?3tsg|?KoORLyiJcz_w}MGKwr73e4A_vSoy{ZbwLDRWWY?B&=?oHN z#jhmAJM$>Ni*1079Q`eXHg6fY%C&6_ytorLcIR`F_qNQ#*Y%?_-Kj(%_3ilIg6M{% z;}r0S&w=0dsG}%h1-IUx)q8#Pdh1rSl@P2wKj(Ac5eUAb?N`E&p=Z%esWB~ejr1Wr z^-t*0x^p3#?qz7SW1kQ$@cmSSvxAxr)@u@Wk&7-Vz(<>;XSPSewC60SO&SdaYMmfP;EQVtGVRgN-E zUXac{w6~%0|m?U9=XxbcS7hDaUmCjuD+zigmy7ksH1mO04oKz*eG>CPx7%-Ez zw<-VO$J<=3D^*}X?Pq>6LEjJrDtfU_%SMwJTrSi$z$el8=~G{k1+GQaX~u6%tF6PS zfaNcu3m)0A$*>_tR{`yYG{eCo3<9jp-;+F3xcSekI?WBt7Yby} zKhy)(zbay3%3m)KDNrquqI44`xZwRMVD8>+r5e^-^ZNdF_S!M2!bW1dcb32{Ia$=; z1q{pbM{V6PJtJW@xWdL$bl}>4dq`zVGl5y&MvsGUXXhGKCI3%V-*9LFpruKmacCvUH)}E@pgU znQ&G-f78*aS&JBY%_ly!|Irina)Z)ch2vS>PhSz|__q01>YIRY6o^ni4(% zCL=y^-~R~Bu*~bz$2SX$-9!6q*7xq{1*LxHqd6M)`R>Z3igsoYvOPkb66>#7X~zE= z5;aunNNpq|D#jw9>^Q9yM7E7tY!6o{IZ6V?k@Zo%6aRgqXc}ksjel;4e5^)px~8^5 z;*NQeL*j=xnJw3f&%k&qvHT+F=7gk=D65Yk+ovVYOruzK7jV1RKKe^!TDer%DaByx z%_KVz+^MvcT!`M3vRaY&1i(+v@-D0YfVIN}dU#k~1Z(?=ZR}z-ZpViJQNp%lCTgSW z6?{ZjfkmQ42wf)s9+HdWS$nZ0GgPAW;O^->ZpXf;BvWS*DI^C_#gWZ)61AlhwXu*VR4tpG&rDazuEhCb#G~@JPVVAM zY<9Lu4#6K;5Qvc10IJIqs}N-m+wkZRz1SS6k*ob6FguyRd$L;aN%Zh_h&bQ4{Pauc zwgIIj2T{idg^#?AA|>`%*_fgpvw2#s%dRueo7NMmf3Lfj91O#i>LEWEk<4)n^rFEh z@+`oJg}^QE?DK1l0&%KyX-!@aJ>&>G>C4L)ww`sfD+m3z;Z7=wPwf{r=zhd8wD%AX_9Y_od@oaW#& zTyKsqW)2UICle=&CI|V?7EMpcee19NAzge9%)hLCr=lL1pv?jxHwDyo!be2a9#eZ< z-uH?4pTP%fC>eE~7XoWv4vuwE3kl!D;u^^xz-k;;8ZwnxKNO0?sf)5hN3T9ItYm-=oEaiL8a?fVd@#?ivP zmRT8HUp#@*xk*JQFCUS@r$IU8B)m8E4mGNic4sDfErNyz4S1oVF7Lo9YeM4h9c?W~ zuEHx@Hf%->$Q5B5gpJtbC67^6FTAf+mZur#>XYMmv>s}X&@Mrh2s)r5Zko06ry{R? znar=|WT&DR2Ld*N$QOn4j5QXG#6j^bmMr%R%%Y(D5SUL8;&nx`o~;P8S?D+tT)==S z51_}R0h|(1>|q3K%od(;kAijY!&YuXVoxU(yJOQecE^eFQwX+pMarzEgO4g7zBQr1 z$KM%t@1ncJtnq^3kg;QBf%pmqT|DLA{F)(;zf|7=)5(j?=KsbBs)<-$V*BeIUmdW$ zv}1|*fEdhi+$nI&Yd8q5ni0Dc)eMX6cHc(+m_%U@r*^NWz0cfNs?)KFTNWa)*G%0^ ze)}TM1-7S8kN*W`eu^b|^o}p~8Qq6?M6>NC5LXoc4*$9g3y0pmp!i_Eamr>*gKnrA z=jo_(G-e?eZT-|sz#A8)f;{>%(fe(_E9#1N zNL1P2+*`cSves&_iqvw^Fo9&)?n;ZJQjr}WrECO*JECU0h=6O{@g|{gh0O-k?1+v2 zQ%v%Et?M3Y&XmACKlS1^>iTV#$!L%tY>8{edTV)jZs`3$(bUek9yBXTMetK>s<_>< z8PcIB+wS*W(f&#wRlRL@M&ZalmD%80?I(@O0ni7)lgSW*LKol#uEzqQmg^pKsCsAfS>UfV5YONr1E8c%wZ;!?+7ic5{4&yQFz9OK*P^!|ed z@-HsPf4mbDOfK^Fx|tIugr72qUq4C&&?2cE>RNRYq8vT(7c*w zkFexg^1vT}OZj|t&@wIGUN!4=TtCD{u1dJ%ICMd$RY6MNQrnNC#n7LF_sdQ8>dnn> zFdG6tS%Qg*nmR1A?VmsKG5FN*pl>60F4lE^zB8|RBIMhL7d3yD(u0VK+bl}-*I1A+ zQH^pSr5;jdekgA6YilJ3I{iO4imdbRp**v5JJxoLqQLOgBd`~|3uAJ;2u-3rMcQ66 z=Xra7U7HZ&>*dWMj0pP>pCu!|XD}*3JAM%1Ou?Yi@7uN5XQ;;tic1sks4BH&%Q`h+ zS`_{U)XuU0SXqxj{^Mvqt!Spj_fp-eMx{Z_N6ON59G)4vZhtDfURL{66&%~ws;lBN z|BVk_Xm@g$&-jhi-Q%9VI2cECJSa1N`)rZnUjTnmcQ*d-ikF&?r5m`eg|m z`U8=svJfFQ{*$RajZfmgS(Z^tbno};>1R@sIxi#`oGX(K=j`>iB^)foSW1pKLPuc> zm<)8MP-YR@UR6gq|F0qT1((RtMHAS|hi}SB#Z4NSY~9&b4`rK8@dQ*RF|2nhHpyaq zHX4jCQ8;3FKr(6{Tsnlwsfy%+`J!Zj`s96u`yp_IQ>T?hGxN5X898c#rs^8_!+mhuvGeP^l% z@u+pOEQ)qqKgE`~&QEoLfkYsL2)~UTcJbFd7@|BjzLBz33tHK&752zVrzuazg&BZh zHgL{Uv?+@U+eTDt@8I19Sn?)T^Cx*3IBZH!Uh;@Qg3nKPPsPV#*#~6gEJ5fQ`Zj*| zLaOnlRx-bQ$uN3wP(b*0^Dbzdd^ctsssJE%Fe!K?=G9-L%+huGyiMTCGN-m6&ylj@ zYA0}3DwD!DyIQP-VUegmamqreW4c(FkD9>`!Pws)xdLa{HC|^>OU8cNgJ5Ta%V& zmir?yH~+@U!R^VitlLd;mx#6u82cRJ9D|IWyv4+A({%|&#wu!b`QDzCZfbd2NjQB! z^FFFGPaQtyY%=CZzI|1md@g4rk5X;a{lu}E$8`DO8PhEPvKKZe2`wULi-7k**7PEN zL;dKWLP3@vu|`%IPz425?OH%Eb=`qtE*+*d!P4Lm0{owDrubD<)zpGI zeX4wi>`m_itVP=BJ!3_*$+OXtG1A@vG+$C2M99y0t>_&bO=PK?` z88*(VI6Ead=pzKLOKn(i>++vrtK0?B#C$)nr48THc8cFrndK5@Nmz%H<0dX{D+D*} zuei%&+H4B#U&l;b9Lfu)WDboIN3`ebi5dVc1pE69lb&^f2rzMfDfTR}&H@A-mK$=5 zdB1fK_u))tDyq4(>d9)pkqD-!k%^0usXvS7&W;+ltjIp4A^;q?_%6;?kQofEdaCpUbzvdWmfNc~DEeJ41W1xQm+xD9O`2QXP2 zF2jElJXck&V#kT#cFg4LGqd_|12aIpg9sbzw3-2&FRo`)hQHhx#VXqRNfZnAz!eF! zxQp}W(`>-~YAhNNAkZ`QCoz4#2;!fU%F@Gc2F6~mdx^wvDa)g%%J-Er{69MbXK5ub z)fDL3P2c;zMZq7rdpIr{lX2+!YY@c+UuEoE!(%Ej*Ai+&w$(5f;`b638H4ct%I>uv z<)!|%b2$O0@$KP^`wlW(7!?+6$^9;l3aMjrW1$rdf5Pnh*{kLI>?1Y9&k{e_XWhQh zBW$RoKQ)Z3?vlvYyk^woRG>d6S}(Ofp)>E1YdqBTml&F#*g7MrOL*<|tNuwQn5&x* zgq`WZ#u@64HhukIVq%NA$%<(=T1$3CaR4`lS-jz_CaSBA6Tg$@QKN8Y1{rYxJSiL5 zeJ0MexhYV=^j9P6x6TUENd8B6XBr6g`o(dja&eK2Q8AX8VMelCO=RD9AxpBR7+dxs zjL4EDJ7eE-jWtS^u}q1qL&(kyvTGRr_9eOZ*W3TA|LgzD=gsrxInVbz=RD_pj+`U~ zWN(E~V)q=H<_UGbFI$4ZP)|va^GaYx`m&jbY)8I7&e?xl(TKsqT&(yQd!yFy9R0q3 z7f}ykPufJIy&vCZd$4EScmSPFcJ4NOt%*)uL|B(}c$b-i7%oNGS`zqilnd`sV92eO z?gNo~A6E^Q%tb+cn~h`lPC0@yg{~D>i*C28h!V?`ZNGS$csI?vB8=-IoqE^-8!jFF zp{caMj$=H&UdxS5%&kROO!IouuN&iMF%FfeLd|1Pj%7wRly1Q+$xm z1JyabRv4zOs?CEuDZgQS$IbY-tMW_nw4xd3MDpSmcv>RY1%FLNMQ=zTDD=W+s@(7+ z-*VO>Dyx08+yx>=TT;JWl5CYEHgzV|Y5d1D^F~WTABEQQ5_VuN->*2}UBy|vfgaJ1 zhNpXLo94b?WWT!4G7M6~Pa21-2r3q*&nj;115+wkcgk-h)LkL0c-1FCn`a$C3_FK# zb^WHiFiQ0^UsS-R^kUtr)LTN~D%0pvmbmsVXtv|{oF$uG0hL$=IXS>8+7gC#Pk&F& zfW#^FZ%vsEAqz{gA(d+k6~nbUAyB;jVzSAoF+`MAeN*PPKLd74%V7 zTrZ(4Ex@0VoyW&}5%2B%fjS}=T4-I?5kq)#x)z4|7JNJam_!GLU85&1?ySd*nAl z)^A&y`8kcfom@OYkZi>EO35Yh+}*JN~7<3f(&T=aLA3#J>m}2k;UcRDTvpo z29fycv+wtuLG`ns*HuFW7sLPKAM4M5ntLe6#0*ZiUa?Hio043uB^$ Q_Q6Os*Ob z2Erts-jx=cKrw7?{-I%TtEcq=!-G0E#f_V@iOz$1neTrJjDKy^i5hjQO5p+V`7zDg zvDs;MhkwB<@6nV8{sLDI+RtdDpeu7Ch4-!CYHnX(?x~+z%Nc@)zRs`ape@cH+jY^T z$(pIl<|mW9yf~MrGj;8-78%~+d_U3rq{@H_o3mE`q5|V0|382J6&f@+qMYrLgx3U)8 z;byZTsz9I;w)DTbmWN*Q|Imr`+fxdAQ8-v<=4D|M7VE;8#nNjaZ7r4YP|eTS&KC>7 z>6*_3mN(a+y>ZO2Z^TSibd+p)x-X8gd(NC+*5yz62A`>Pf4b*;?=oN&7TsFua(2Y^ zQo}@LH(3+^e4%Sg@4}|}1-DZl)zS$Tve3pEo`l?UfsHl8OE3-g&(H+F0Z6AL+YmluZ82Avws=l?~7?#`Odm80X z5f_o9BJrBaiH$*z-}VFHMOKsUd>W@jR0IwXcOK=Q3gWuzOV#4zBm0cv?+WlIZ(kzX zELMloNB#O4%X~B4#^$bwbXeZsoN>k3>Km3NsJ^y3p{p@zBJLEVN&76Wd46oZmW*}G z!R6V@z@+4OW6?KI2yS=R7G)e`<5fH|JTA*io^Hs7M6N6nO15mS{_$!Qaf3WVX2Sk`zeS}JalI99hE7+??pi)hUSc5^X zd}V}>R;8hD&Tl7?>~s>D%i6GNVz>ZHg%ctLTVpJ)NxEuj2Jp^9zo zB*^l`+|!9v2SZzM5iS9L9PX-uNnAk>4}QkZEA?;CROIn0qJf9@O7!^|{UiEOmnWbZ z8QyF2F^;`^y4fhXQ|ZEt|T#i2Z`hRmy&Gphuc7#J1B-dgc9=$VwPh z@ma=PiIpVF^}ZeCzz1vfjkGKS6g52bdvbsO!{uv1p;A|XD{ua24)4T8?4!7@6lW`B z%tZ64dipGPn3~}~9wP+_Y3s~W?oed zt=u6w>epF0AaIzMV{cc#2+_CV?VqZ z?#f!UdJprSy9X5kAK(yeXxgPEeR0#^Pe=v?ijqG_@!wT4!wl4I_S~$s6l=7B_xk9V zgCcbZpHtY`lxHuf%i8!M;nz&4!ZxE@+bV0vAs4rH>$uTFs7Xc(mhj2cyJ>&a*nhj; zd_R?xR?05+J*iv%6(^qr<0)}ttTE4u?|rMnhG62nr_G3N>3wVGb$4S2fIPOK%`@?w zyZos9>yptOZbueR(t-=G2Bu{I$+onq9h2=XL*HyUvF^Mx<=N#l;BMimG{9NbBVn7e zEZiEobH*(R#kxQY#%mEL>1E)L5hKt+&){X+{wl;63*nuXQ>JCNnj! zaO~q?4c6ZeWWmDXou(Ce#!OgOHT;|2{Aj_%@7op~n9Uw!{cH`_EWFn{5WK&e#JT{5 zB^t<^)`)^_EVaBiHqwraPS*wQiN89d=lZ%woQJ=5BQ3AW$n-4gjET2A7FPG&+XRjKU)}}fi>O@0DbiB(vQUzMFZ^6S~`9dr%U1$rA z#vXq^#pR)8I3|J)V;zp`g>o%NeVTM%Er|)=qJs-&rUCQaV7|2X+rM8mRf>4WqubK~ z^)wJkG^3Nar+Rjlr5jUH#4Ez6)-{{?o;)_+L}Ahy%Uj6Xe=6!+YV@mw1a!w=b7;TX z0lFX#;))=eG&2AyGiCIsf&R0EP%Tf7VlOw5aM!}`M^(<0s+0st;6u-SEljgO(L1NX zni{hTi?0!4&mi@H2Y5+nvS1>1;27Cc1`@6Clc{SJ&GQaN;j(FBFog_F`Z%b7Q7 p=aaH*r<*&~teYig1SHyZkGSdk@Vv8)C@|-L6(s$iF4_K>{uhiQb{GHv diff --git a/doc/source/images/openstack-interop.png b/doc/source/images/openstack-interop.png deleted file mode 100644 index c2fdaf706cd80dffbc250c9234f32dea239a750c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 37251 zcmeFY=U-FN7B+YQDNj2?71AzXz#^)=m2!TiNMNDQ|1rv&(`c$Jr2KP&hu#GvJ#?&P!1Z|j(7)^7Z{L!FzL=>{}KE|Ue zga7;YPnjFJ+DXut>)C3iU7FP3^9~KxB9EI*dwj}e2(PZ27Rmo@!q6QH*{}kDAXnjR z%vpRKKc3B~?~UShg`TM4@)LHv*TroNQ41k)Ao@NUs zUk;z=xsM6>cI_Mcn-k}jRfX|epL|XMfO+nc^Xrrb*)WyDJNt|ga+bEQ@??$O29`>T zY#5*~B>wyn{@nAoe(kl%)2tCqCYXmqit4t2o5P*5EOg|>0Hq_XSsJC|kt9CZ2;IA0 zGXL&L^QCT$Nz1V^BTPm)0^`#C*P>Ty-s@`IJ|?i=^qH=V&MUO|1GS6h512%{WMnSP zMn2$`KQP#-j??sDt-t5Hu&ZU`1?rRo7uBi|sz-?a&1w8Ch|8kauq@24TKXVwlBcg| zDWMRKygv?)Q=D|5n&>l7luuxPtqd(FfaFGq+I{$)o1(3<4;R_dwaGS+luiQ0%-u1(*o~t zdu&7Etp~4suOrDsM)%8jmAeF5iei?XZ9c2m!)pd7@bLPYW-$o;a*n6gvGy@0 zpbDbjuX!GTP3GD)Oe>2_wcTMEU$(mr?;(D_@`c86$2*_>JWA(fg%XU~-C=lWXdy3g zv?C=kID9EPyJGX5>K}L^)O(rErWQ0{VVvlkQyZ5jt0(G%k@WI@lOobo#u|9UDwDkSe~6+mkLZmX3o_+HY=BBviR?mYKBKNh;D?y3ZkuT%1V?IW zYCjI!FnpXv^(!|~`|3IGi9Sx`&=HrE6dl2_&wdIKcn$zWfDJr3QC{*R)KZ?L$*B)H zB*=Gh7*-O9X9{JxxtC;Q^t1o4p=N(rCFqsywc3B<;NXxDKB-u~20k@%lKOGhcCHqB z19U1DtnRPs`OWtnB`Xgd!(ymVW;cc#DDFj5ynP-|CvY2srlSU63I?_|Hpdacb~Crh z>MMDv1FEPmOdpRH7K0WTJ9q1A^3{!xVKR;NVKJ)>*tt8hA>WJjqnui}6leh8o{Tgo zO@mTyLq$)mhN-6#yMWfZ6;Ao~U5=##rrrJ;=el*!nt@nZwjQbE{>&>T%hd7bcT7q{A>XV;UAQn{st3 zWEcP>!wXxov2R*;R_&fYk+&LW>f&J??KV@PgOqRnEl4bwOl9iIS5G`lwEqIyVbAAp z+Zx>Sgk~95mEuT`el+xGX#yqM^bCIXZA)SW_`=eVo2hQPyA4P!HZ<~P2ycNGm(c&)F&Z2KEN=p^yp zAOoP1_C7TgQOstBUsf@iy08qRFi#qq14iOV}@9O|SS5^4= z>30bhrdmC75m{R?bKX!7Dhy=nL+Py(Ezr0;^vgFkd^0P*>bZi8^V<}DKUdO|?tgL5 z%}6G0Be6^=BX0s`tk@WK4ZL$Asu|a>iurTeo`GN1Yu+?8md}MU0Dtxo` zqMe?32^c+BcG#?X(aiNV^)*I88|(c@I)Zed)L(Bg?_1zga>1v3QhMZ_F+CJbn>y`R zmL}vuwIlVubYo+K%hjR86A4;YW7&OE<$+da05&pxZHT>NDqiGut=?RvMw-GpwWR;F zUJJxCSb`6fYT5$-oyneLVJuC+vgCULz`*R&2w3D{%eaqWw zy)WV3`X;y;K1sEYud0);@5lhap=t6Xql+`~MFj6yqvba=6q^FnD^@V zvl<6oKj8~=!F&KLa=wUd6s?3zg9~iGmg8sG{?o1KwyLteiZZ=}w0ym$@|>pj;B^Cc zoNE5__vxnLVp#O7C$dqjwTBSWhtFZ5`1Qp?UN2USXxGhbPpgxZBt@>qzsGo72vk?H ziab`G<(3iYKuJ?HOSRiKhBmE8Luvj1@a2xX(K2g<>jvzyt@7-6rccnHE`DO2Q&J@( zoTH>d@lJ>vhW8#OMZc`IkZGg9#HV4AL#qM2ZuNSd3cN(d95M!4g~ZEMYvEV;Kf21~gv2&5*?Lsb zQeM7wh4vAPzF0l56_2&5DsOR~j@wl_2Qx%^NOUYy??x_!iKNOjwzVCZwZm)X6wVgE zEd%fOv>st1IWr!QVlQw0j&hif(wS~7hR=0a%@bcW99(b>GmQZ;({-&G`W4VO?j6P` ziUX^Lf35*8HiI;a`7)=Fy964Dr*A6CNzn$c-bc3T`HbJMBLAH%$2L})D)tocz>JG# zKCS57t{nr7_cQ67{^B5A5UIsS+8j(<@XmZS~K`##^g4)uyZu^bPJKWiDH!-;eRsf zg{h~vuOGGVRqW7tLl!t1p_#l}wkx1b#bJ};9IfH)RoJNsXXIEZ=KDz^-*Nd#YYwmc zmcybT9H5+RiG12<=0#_v7-&#l2nBL&f)YcYE zvlUhKnd&|aDBPB98=JLZrge(7WWgB1X1dZ^#bYhVR{jg~)K^=|vvp&Q$Xalht>YZI z3B%%{xAs#rqexW|0FTwYZ%@%-Da;OX^YC@ zj<+%xWKByV3>J-`Us8g}6~nnHFWpyvtPs_qUef)h!nFDRu<7HLs=$cD4i7GCk(m=s zd!`#)hibmAaC3B>-@F8h5aMZ$I9)pCvlz^}7d9wE)4wO(a#80L=Kf?Vh5NVi2t}QIXcw zS{(H;k47oQdCk84y-BQe`6(9E>}RnCZ+BknkA*7bvBd$@dI-2Lq#^3IVMiJE+n`Et ztwFxsL4$aX``Md4XV$V)S_a*hE9e20Hj)N<~@bSX zL|y{rm)bW@>oxB@gn!q|m0D=4a$P4yCY6Rc2ZWA3-e;>n64RXTN~`HvKmTN!7e|k7 z0?N?_3~!)XEuW%b=^HAen8xF(1$MG6mEv|yN&HuqvtQytlr>t!u_!&+CpP&@y|jrv z*qv+svblTC^c&Het=ayx^5jZhyc(d>q4?fj!VUloZWQ0^JbtR@#a%;Pq8zS6|Df+9 zQ(Ecg^kB8k@B5nquFVD$1g`GeL0zK304EnL@Wg;GoJ9z5*_#az*#GPOH$KW|{cO)g zY4H3wl_+Y$u{AnB)*oak-`7|&#WfvQHJqP+W-k}IrX$$Gp0rM&5(4hfa|1A~k%{<% z6&DVa%%dB!T4OnyWs8~OA%$|TKB}l37f!yXTgO$sW%GJ>g6F{Z&YLa?bL21z48*(&K$%ZCKkQ-R9TtKvB)Y16`q zqze0-ClpOICyRM8H8_ubGbRBE3n%voreem#?FNw4uSt% zfoVRQW>mxIea23&tbZcglWtMHdsjNRfGohR8HkET2i!$qY5dtma8O+nK>Fk1|e(--3 zDBiD`$6hzz3_<4^CGZ&kjS2U5s56&yE5?n@rv*lWhe}@m0#as;c(l_>p3a9@7E;ZM z`4vyLf9*YL+pN=0s{8!;htonJ7-ZpBv23hT^GM~1$AJXPWwsPUwr8x3->;O}*q^w3 zn~Z*kCdp1W@^80%-`{u^6!54qOeUat4YE$!5DP*K_!6E%*37SZ?5WE!tNfqX#;;oIH6Yq#*5HQ2hZe#Vuy&k*x*A3eCp4Hpgqtw#{<`HR78I99#WgSe z7%W{C107YEv6GXNz=we!x&?Y0Srd}Zva)x*T=`S8{FoB2d2o5(dvC#O2?COj2A&Qj z$=Bgg5toxga^SoGkg@(f-lYT&bJqb+L}fP%F9 z-IZhh+Xp%z9sm)6gn05x zvaP^+zqo#hIPX6eyc8@-@1o)i@w`0}4r;1Ew1T@b<*Lll#+S#6-hAMDCvke7ju{oJ zvzTj%dBsOi_+9QlU+u!O>8SjKg3Bg@G>|rTFtJ#+t(R^fE*o*?MYEUJO&qGMbUrtT z@W%xo*^jp}baXDF%#iFLH2LcviLf6(`E(R~!UoCf^Z2C&=aZ&+xzob!YKNi8PKdA#uoBVPOKEJN1k2(&DD$5Uy zqpgCKH`c6X``f7ST4pwYL>G_wPl=G;0UiMGEA9$l3uK|0`22?cuHE%K5J{;?+IK(T z3ZIKV$2|t3{Ie5%X)|wK;IpageCBF@XaPaCV)%}d1D97u5DwoT>_xgoFZrhHWb{ktAacV#>BrNR44N=h~EJ$i~_rAM3aDAR5d$Uh> zmu*DmYFT#rJ<7pHAWfNaMwhO!0xC=EQ-*oQS^{Bq=DR)C9gDd|FwW){LgxB$^xT4D z!|o&9BU;UGO z-=^6j&hiCgt7u0)cbO`l^F&acH{%^1>^r)F`>F8ra6Are9tznKCk2C?$3xFoAOXt5 zikAPG@LK@$I~Mv31AXWL*fI z)?fKU4FXU>r0&Ga-cVdHA>)Yw$epO}VL1jfM>Nj@z@PK&sFVgYn~O!lOPYo*;{9T{ zhT5kFNu8?)rSM?4TAt8#DhjlYv6NSRO}7xxGx|+2oQ9+zA(>E21ppU5IJd-W?BLl% zc$9sYd`F$8%!4?CURJ?nb{P5FI__ND;Lbp9hFzZF4TDmlWUq!lc&qt)}t1_0g) z3ki8Nt}pbtAQbK7L7E*0vg!2jh)OXlj=vP{f2JID#xsCOcG|&pQI}CL2R-Z9iVvop?29ZWWgeZ-No?u-(gn+num0w0D;8^R zQN87a&6O;DefgmGIQrrvmcnz(FQddHBu-u;FnB`bh%Bb8&hHFet_bBuMBOXXeM0E{ zsD%0&6kTbC3@1o6)Y{`WUD{>Ka7MZ8c|1rZA)>(~qVVgkWLcMOuKy%OUDM zFB7Olfq?3uuadgVKwMd(ah15C3Rr;I>wE(B2@=qy1(+>)TVE7`M?(-UH9m^d2=A<3 z6$hA2d0Sr>fo3fTpPx`L#VK3^Bt0QFdvd|!?dR(BYtd6y_cZ0&VM&m-f?fO_zf$rB z9c3Bf4uD0e|E?2}lNz5E%H{#M8)ee$C&0(vtG>O`xpjrKb2XbE7zq2{)h~E6SCLP$ zF9X~Lyrl8#D5Svd0k}G$+|8}KYU2?|U}ge#W+<~bRZyaY+*I<#PbJ$w-Kb&qZVngd zK-y698RsmlY@?Rm(RzVsv>ysCAAopTHSC=S<^9zc5yubu2Jl10{qKt!X7pHf9?!^!2r;v!OwqWQ2~Si8Wn;xuSqmW zrBGHvZh}AFy-V}|Dus_v-~R-n$ohW-di$i#Uce5#$^YXu!TE|Vsx*g3!IY@Il^RmLvHc91U9v$G6Zye-!zG^SlGlUb4D-HKSNO-GC` z#INBR=diXvtWA)%_7YR?%XOgW)-C*Z^efB7uN~HkY_A=*ZL`RjM1mhL?q?N&niz2l zzOGeE9K>MXeCVgBjIu2%Y1cz=k7x9G`%KVwg>1(?GF#1h>7^Qm=~$N&tbNsZY)9ol z*Vk=FzS>Kn>Es0^35B68N4}bLOP84D_62X6(x}!}9FaIzMMQ5}xkyaCE5{L_^jl7u zT1OWbFC?Qeb~)f>`;xs%67gH)$^9CnjcGbpOY2jwDMcCURn;AtH?t($@&Ag`xohsI z{LAtR*4C~giU`nDf8nJ@Oda5GR_B_@a=ennkUT>k$l5$lvo(2}aSHv@(#cy5oAY6% zU4zD_0dX9WjLurY#MQu~23oxwyrKCBt9&AP4Kg!V-730wQL8qo`89YlDd!8ebf-5C zQye!CAJs^`yEU*Q^U$lGcBd(CX86jbDcVKU`MmZ}fGVVs92Pl(3kso%&>3;q6|nWs z1m^8KkndJ%rv1U=b`R+}O9A$uO}y2LR%$DyFlw*kuK==N=PRS+%W%jn0yMI@x!1o9 zt$4;i3PpDoh9P?l?f%X5+=;(m5qDo;L=dRb-Jg{yIDPydLmYS}H-%0q_3KHHkG~Ee ziR>_B#zJUUiF?p1G~W}z#`E@Obe4PA>QT$RcE**DcNjXYX+k{zd+lwmc_}0^2Px+U zimuB^v-@N!_}EKsj=9ro_Tc@8&8fE@cq`;A`zc0*g=CrB%I^ZEPVsr9^U{twXz-v_ zQW?BJWeYuMff>6TR#t9_oaY~W3V6Y7B@&u-UqkGw1mb}CryVx$FE8#8BV8VkIL$bn zfnknT2+aJv|1srStuFDF(wZ<;wpmz%!b)ul4!JQHu9}EKTS!YB&bar~A|n(Nf8|nRPwS^mj-9>jm=Rfo+UKH&)YV>wi2i9p z4PX6uUerCf^yC=T5r$%RpxaZy$%OOO*h7-YQSmG>AFp%~idk$(2pC-H+=ETrPK%z| z9T;#^djT)7+uh{VgLM{}0z9>LW*$Y8L;Xi^GN?`eq;WCNhB2nLO)V@rf^BxVr9T)R z9ASw-2b5{*YX6=kJoxv1enoTDj8b5l=kboEjX0OPLg<3r65eK;w^Pd&mq*j}l&pO1 z{m8Qwc-_tvt%r6#9Q!Cu*p%`Au`#?+kmV3&`Z(L6W7Xr<@7LDzrtlI`#Kj7dGpL@SjGx{c!vbO_#|C}-F7{IK8c-*mOor=)X=JcMj zb_wKHmdNxolzo9MlR-?G{qxYETJ|25rtW90*oO$TZh8*?TzzP$-9q;p>N{omzZDFf zp_(4o1I~e7fa14R0C57KVD|4EQA@%uGj(PPK;dNT)G*DW-lKL-?9}~OQOKwkJ8Tn& zD{oAnrjGUXcPrc4sjN?*-jRABKIPb9>)?&Gb}3tLeQ0%q$zeMuQuj4YY>FurT zm2+_S+r3T;kr9Y79C^G|9ihPV)GN4*JQuWc?3H+N-?JWzbqj-+Pa$7}%CP&j9-~T% z7^`N17Lj3FH^2l0{or#q?rdhm6Pq1xFe%9i6}VX%SV$}{Rx%R$m~3u-7L-#Y6=*{q z5Pqq982*Z|vX4P2O$FoE7pf}|94kQ$Gg%@|A z?AklWyDZ6qVG{?KjnvSX}nyVu3T0yFI zUJRD|=gw8LNdnAf4Uk;v?;2`8GQ#KV8Tfo)ilOt@yUf|gSjQAs+ZJmzs~9#ZMH*+Y z2HH69WRz|d9{Q)%p+EcmI&MA2ORY9-maR+Z*)1@}OBMs49yb$x6Iqh)I8=HhBFgKq zaht_&9sxG)9ocCsa>8?rTg19kwK@X`DA-3gpt26xaBIacF`4wsE7*6$DR@q0 zyE|fjnP=;kHm!sI#uI;165ha{!pu_K`Bs&2 zFl%A!+4F><8*7ze8?koiG>kb*!<=~RHih&EIsDzQ6k(#fPs@^6a=#|dqM}aVQ$yQL zFyVk*qFuZoQ{Oy3*CTUtn?UV@SVv#sWf_Zni@RL{b^M4JCcwM*+CKnR0{+d@Zjou* zEvQMWAlqNHDO2??@eBLalk&cc*_tRVfE#wbacS!b-2xj~@NU?_T6S(3zj3SvzJA4j zg3lF)`W-ndkI@yGxqLp8bmwI{6S%1ioivP{JbTdZbc=JzXPQx;W@#?US~U+kF3@aI zJC&{DsJxPztzy^ZZD+|g4kJ#;xtwW7ZrGi5ss+AgOw-%;?&EUt-rwlU(AJibi#D0S z+su{SAR(Ha@qByTo>;J6$`fZD+w+vMHDl%toOmjWO_`lFFI&?7$L|6EdTgZNtt|$! zo2KvXlxG+m&9;wMKlm&}Y%mQL<1m6|7lBt{$}>-l{2=(Whbn$_-A?~xBRkvqHkBzxJ6bUrYI z%A)i(z3tE9E)_S@Yk-*}6i*;QFT=~Q#^c_p5PC+Z9ZzaWy>P0gzD9koC&@~p*5idt zFAdu|*W2O9gYCK2g@k#fPH5X^K+kKW<8IpnFrz~9vpEVZRul%eAGAE0wcd$65a03J z)OKbHne3w$vB-qhF4%s3Vo5jZ(Q)U~PN{Lq>AE?V9FF(r2)7joD{JrHZ#8cMMGtGQAPT+# zTi9(sXzS~z{Aw`>oE&tky4$4mCxfDF7U571hXirV6N6N;&)h0)jnBAMtQ7U;?#viO znU!l&j`(SxoLU#i8QN}NDM@uV0?1|$9d%Vg)eoqC;(Z239k<(7YS=|y1-LGQQTW&G zS$gN)o|vE1l3*}ncGwNvcjyj9Y@|BsczZp8o`r`W#{FDgUiNC=r6Sq)q&+#c*aUcP zQKiSt@)5eLZDhAc+AXgKcC2HRu2?XE7(?<0m01pMqibe&84rpn8Sd^$Gs7M(4L9xi zv>&0nU~rwxM`^}WPb>{kHS4jGE-#5r_kK*j*eisOe7Aw0)?2wou-kY`UN9iZYhPFp z2a_>%A<(LN-RmeJUyu|aset9@j_;zds7&$Lw2j7xvFy(^{)#@Tl36FRKkz-ZsCEmB z_0+LC!dyf(n)N7ZkRm4pliD0rq>z;EKZ4{{3pQOmv6-eil!WvrES>Zsu7+X zpok!9%;jDy|FX%#s8exIY%ToHh5JraJE96W)F#^!TVMOap)?D z5rAAl)=wCM5T(_m3+!Mzd-1i8#FE6(x^;gW2PmBS^(&NB?*&<)TXW;VGb5Hg5zeO* zdM#14?%n-khZ7UL%+!*Uv^e)GE6^aQGrLIwN#oNmt{GVLl9DQO*2h_9EV4^TsH16h{iv`5^iGPoZ~*+HU^gcd+XthDkXznW+m!YKVrjP_Q@% z`$eR92yNCZ-$L;${Tu-d;YOvsHOIR@hUb5n$=MqWqvY~x7rl)_txwVxsRNvTkT-hNxBe$UbUot@uE2j6; z(A<3Rndx`fW(c!}ZHV|Y;GzSb$%}rx zoSeG^>VIJ7?|t;lbG4jw#gju~p!}yFhb=tjU z?Ht#$ofQb(Kf=MUVkm$(LF-`x5vq) zcQ@KVcOfHH|2v50-*cPmZRrCxO0Jaa_b70MaCys8w&}VQg&mO`$qbM*g&tlwI5y}p z*})x|>zSLched<+Lkh6~(jRY`_}pE=+%Gj(oyCLFKv&82!G7X@Y87tzK0)b1RgEi^ zzy^9Z`QgEhHnb^(Y`2wHc_~up?1$B(ZJdD$i`2V33qk+A&hCq`@a7}1!#RAlQGm;_ z*kHVYX9(oaKknUT>}b)F@g|m^c8RB4DHB2Bi;{MwFhC%Q13Swxc0zXi`Q;glaWi8l zrE`R{E66SoZ1odAh1?_1{P#3mam(b%5G}N_uB9@&WC9iF0MBV8mv0!@KOEzUfA2BS z=w*udOtv5m- z&B0zc_b7D&aU>O;uF44-A0ZAFd-1hH#rAxSBv$d}KYL14X3^4e2hAN1tIrqXTaTZ7 z6Yo@qLOF6df;gU&!OjZIZ$h?Gy0PF)#Le#=NU&;#!RBYd1^xkR?#HhakZR@wRA!cZ z*9ZDjpOp&0IAo^td4nTz+PCj4IdOzo18+8cwYgx^ycPV`v~Ht(gw>9j#GJR^^t{Me z+TTsX99^fG2-I}{PW6@2tE<4~3E1MEfGCfPY2Eavy1V`NF$YY#+?>j?rz9WBzS&}c z((8DzRBvaAa zS=+pZtOt`*zvM6JGIP>ir-ONm7H2^hBw69 zDuaFb?2YOMut3sf7amwt(&_(U$`e>jMBfEJn-EQ*#=pV%f?Ky68Gm~htRf?eM^_>LGdDl`Ql3mB|EHRHpP{%nMh}Uc z;wzJqzW8c1TLazyaTKJjbRw%nymx-e^#jQ{7VlKa+l?*NuW>inYN+6DE*$& zCD|R31TixkLNlGR0~w`XDI4INLC(hF_8W4TYL z3*!vJaBI?zG57Cyi~bAIZ>zfVCMadyYRIq`HJ@}YrCIfOVs*ML>(zbA0E@?)^@X90 z_H&iuB-Hke->a%sF*2l<;uSHFQfJJFyMLlWA4hl;&2rZ>8GBvMXf zc4cln!VStT#(y+)*C8*mbJ-ktBafqGFSg@!kGF!O2)$KwMsB7aq-S=0<(5%U)t|8% zV4kV4g0Nw|=5bb)yHoHVG5=OA<=v$}vU0_4ALWx^8U+<#0I6c?0EZ{+lSOW(q%>e? zGm^3b3G__uN2~od5hU*zYh`&Y{p%j+Y+E`uB|@UzUQy-7J-2s9TYpaD0GO&>7AFVA?7Rq4DPS%Db%aPp!JiMCuPvfW@{ z4|_TqXSelOI2Q6?54&@LvzLZ)t({P7Y>g1e4z&G6u|BhV`40*Ix3hqugSO-vbH~fC zzcsQgH0|=^3S$F(Pfkqh>TM)drIjl zh%xlg+3%5H!ko_zbAZbpP9oUOeJ?%>c8Hgiws)buXWeMGQ8%rUbWB`D>p3o11x2qf z5F@+YD78XL!v;hALmM7~5?IAM4#!%@RlH#| zPCt%AHqNW4_g_Xgkcjs0w(ZA$CLPP!cc2;5ka%#J#v6Tu|-uwp-!Iotm&Vd`B4aOtE zA)}qYoi%FN=#V+C`f4I^y#0^y{R`4R0)H3e_~9PkV)ZVEr|LjnshB~c;2rEV91A!j zmdgOrF+~0thlAV8OdsJAW!7dMZudMPd6zDYwk)umT6|{Uq*ic$d+)*J-|Dv)kEiUY zw+4UeoYpPR0lT*Ujg!r2%g$eXBlTJ|_(CeD!}N~(4I3f2G|2Q2Q~D*UuAX#!&8~L4 z%uO>eWd0P%TduwiR(!9haE2f#ow$@ig_BFQ(&mY6-0YYd8)~-tS#k`Ktlv&#B@OZ*s^Q>Uy&zIUp>l`UrGhba`XDFm!$+qf7W@MuPEp9F%%bZ_l&Z}*(sve;W3D*NOn=P5Y$ z^v)*OuhX5q!o;x0Qz7e&^HQ?!oNo< zw;1eQg(Adfb-+^GMSlSTTJQo(&gcTsN+kQ-Uu!snu?t;N!!X?+*DA6XRN#ieDLdp8 zn*X!~J?`fvU^Pu0aFyzgb6g~AUuB5fZ2dRVn7-2@dvUJljJfA)sM5)NnVXvL<>cA4 zBd{=GORZwTs4Y5w;eqrQo;S5%VC#0yPCXWa_^-z9`?jf{Ad}B1#HI&y8A|9$I&aPX z#G;{Z1=W|!kBP*STb`Cf>@QCwV-@Eo#EzweS6uKmW&u=9;(iw@GQ$OX1IsXq=T9dZ z--735FGY|~EyfW1dEj(|<%lgqDunB=pq z8AXF1+w^{V=mn9Ql3$$LA$vNFP+eq|5lTY~%6~gxby#>`TJgIQ zi9ZiV_vd3R9{~J8VU??{N>wjMmZpE`@enj!zILvj^AKqY8zf75a?wu>mJGIS0bjqx zJC{Xq8mMX2zrA0=3coOj2*>Af-6OT$dAOS>4{aMHDUOmd`-YoyU4MU~#N2aK2}7al z4-&2)#a3HrZ(N$##|`$plA;RnzS?$J&sfjv`;!xu2~yySg(sIL;ypiRZ3Nx6UhSft zi!BVsugqOIWk)oVg?7j$0}7r8pTr;YyO4)M)ILi8&@8)MXY2&dE-Jqp%l7#&IDAIs zhS+h&V`;&SjPLX8JZoF;IfEw_Mo=!}2N*ertPAsP3!G5FpOqe2>rq(EU-8xOSLCRY zaFL1!UWZ=vMlXa=gaAXW@sBHn7W?)R>fOEbmkf~5uS^wXxaH55SDGJ|A2M}%1;@GF zT9WlTTNMz58s3yM#IaoDf^k8|r)M6ST>)I;k5_ofDf1}mM~VoGU+)|~FIhh@8n5dJ zblm$x&>(!_@Q`#4A!9wtd}3HWZxe|h+DRLwI2*4peq^L;i|Jdh#R4Y5R7PG@nlNu-PUgU?SbuL1HmICtU~xTX zZRCKMV@%uF6w@<>Vx8}HFkR8M; z-Fy!Lx>^VK+uY=oJS4n0czdFzAh`UIw>%%+?)1ZOI&c)8kF#RcY(G|%akpko$wU0`*gi7ryr;Dy?r^cnX72t#K0{~c zVqjCnrFFd6@#LBcyj^o|=h$Bje8xpzQ789?US~@l=LnbD1??Gc&*FXPtlXB9)o4ve z(Wc$4r0vu^GVWfz-@$k4))U1t>!g!ebJbyOS^5ebQ|&^&g=@j8N*CNOz?k?WfPy8qU3*&be}wx+`?+l1Yr;3dUNAF}OU0iSAKlItICa9AXF;ayo?P7@9bQ72^< zhkp^v1V0?vN(YCZ?4nbgLYR`3ET{g|eN>k}ke-mcdT^K)>CyNtyeyK|Z{qU(SEER599 z6*qn?&i>(GN+aO1zK7UWjV&DP_+63eKsp|JjlR=KXQ_%=LLG_oKIhdu+NtcVj4W7v zV0pK;N}?cazkzRQ@XQqA$$@T24#4fDO%S}q6P|>QD%w5ogsC4Gq#KQPABdMXvC;pX zy**{Cba=kY>5L-y@QSDQH(vqsRa(zhnW4?#Xf5c+orYStt^VoqsYfcV_CoL-VH-gDTSP?O%r6eewg7pX?ms@9&4}J>-UAp1sVdH*6r( z8mlpMx(gH0k{{i#k^dnb>fQf?bS^9;9cxLI#QTc^!3Ck=c%c(y>_8kHavy!0+_xCt z8mYRL|CFnLasc3ccG6W{d;@l~w1NIS8M1%-W0Cs7( zR=d_u1^zGM-ZQSL=6N3uC<4+{s?zKz2#5lqgQB7$MWpwl(wiid011MCf`E-8Eg%BY zK}sMHf>aghy@b$14?Tpm|Ka}rUOw-icYMe>duDfMXJ==wxfYAxM+q(ytm6O#|4h`T zZtf9d!{lv|ahdlfzp^iedGt|Z)b}GuDev$t$q$_8m$H27Nz~P8w|+J}t7v*O>uTgV z4aNwb8rmIPHbf~R_>NV|o}vG6WN-3H{r5_EvkFCKl08NFwf%!D+|nRx48o(Q;i&B! z$pv5(RP!16!Yk(9FtgqS^aS1G0Wk5dv(a|~#wq6BC%~+M)m&v>^YP)k7bzHR#WnCK z#itiOT=R8GL0mB~p`eWpPhM92Qg3cNiuDt6Zb(i|3!zaRR0POrq90N&#Uh7LF4@v7~Q0OF-~Gx zBhj`#HKK^WSUkwh6WRIl6;2YDkgWz^=PjoA_R#Dks88EYh2@V?8ZU3t@=`zz11Fl$ zDgIrNM$YH86Y4!i-*aPzEl*M|Nx0KMYcSZvrPoigAzMHl8r=JiwXoaazNt>M7Aj@x z!gya+El#7`(UQeXLb&;6j-hsK&aDyvY28mBLNRA=rkD%Dq{F zC(~l-HxE8fVELvQZT~KM~H2lF)oOkZQLDkpYog|Jp z0nFhwAl;CO*f6;(sxW<2N4vp0u>c%~JpQns*G(Gdnb20WuOmOqP*VtU__SPq7X>qm z@-x{eJ~3@Pl4ENnXe|z#9ph#8$wTe5rgF_J@E7N3Qt)_2$Ia`iG;@V2(RyvK9LcCn?c-FHsdqlWqxU3wh<-SL$N$tr|K-zg$MlW7dYV?M@y zCzzCQJ7XR(&efZe^>3f{0$6r-zrifs*u1YBtEn_UFq;${%hq(sFPwIik)E0$-wqd1SQ-Zs64Ud(`L&7= zrsy}I6Yf#VqP7#~$Y(k17M8~#1M1FQ>sAF*AL4H?Fb{5kWG%Nx*^|$@fQD_`&{RZY ziM0Pr(UOQ0;5UK9_)sTz=2MXC+s%`$@aFH~E?uXX%f+ALR=O$y*yJ~$b_Y0H$VD?L zeZJ7^$wChr%F6D6-8f3wwLaA@J>m}lqOs#Q@AEXBUphbQb2{UNjDm{&4#CqM+Lr_r zEm~gI-3e1rBS-2@=(9Ch{!un@&2>gxg91@XS*j>k0DX?k5z+14&`Y=lxHq8m#Gd!H zb(nz$|KDSt2K_@i0AkSAs|}FJg{@j3rvF-saqzK@$lq=I-^*Zjs0ZT%AGw=uJ7#hO zYzNgZuP?|vcleF;r8%>E8xI#Qf77)N_5(+UgHGHPFJwItH2s>|?+8OxylUZIrI0h| z-Kj4XKnbekFTAGa!=&BY>xo6S_6(9WNZ+iFGQ1aojPeV^P1TnI^hLJks3UC*>Ghd1t1iMG-lf=deYbj|j98PbdWzjY`U%nP-{;#6;yVC}?U(K|>#H&# z@Ye%T?pLS7;sVb2ReN^w$KYT*u^iOOf}>J;6zs+kCBt#Br28IFUj!vFOGp}JP?^Jc zjVCI1HqJ-2=WH%}k2>&NJ{GK17?EF?(y2JwWUX$*G-}6U`Px{$@nj(yE^Wk5Qx^=| z+c8d*EOx7{gdr62YiD?|c-}YF{gA@!Uh=6hJ0RlR!pnq$!{I%CxkJU(kxw)DLd5$KP=+CZKld}Ks zY!R*FU5zh`OXr(PzWPKJz=med<$ly!d-ASsy=1X*|b3OK@_ z9{*9Cy#GnDO->Kg#Kv+3wb?G#LVf3$!%{IE$ZDhlO5?42zgl4qHeynOsWWz}QQ;L5 z8GQT96m2A{Dr+s)K4`CMwPp2L);|=G+*&qm);CeH$a287oPC2cHT0EoeRkZwEVRI2 zt1@H%_LUlmUb+|(ysVkFsyD+Ev=O9M6A)jA9W3u=5mPAKW|bEQ9ai>>g^1oP-n9Og zm(r*%y8`XIE44yXUkOFPWq|rsOx5Ao0T8xtw#L0aG3{>U#TKmpjAs4o=>?GsOSo*F27+zZvfSxq%%pyElpKVED&nh|y*r|HtUNrM z*{jg!$Nist@=y#J*smqXYll0*bb*zO_cwn9rRU+Rob(8a))(Pj0gw%uei-ryIYla*{E>ba0i>()GQgzHS zVnM~3b|al+z@SEOH(AdA9Sv$cw=Kej9T$6~i6zj>2znrYig=p5b|p>B1EJx!D|aT0 zz3Gz6H%|lk$si;xAQA|v*8dLw`?rv|3@vSoiBS$aq6c%34M00gq2I?erP3@1x57kE zsILS8mAT*YQm0rRnBQuIU>uElZrdaS62O+;IR{74(aNPVARV*#_#}b1uP_BGXNQO` zucf)5rXL0-?NH-YQIGO1*;NFYMmg@Dl~&uEcET^!g%qAQII5Qj%CZb*Ts_18t5lRI z2n9VmQ1Jf|P}ot3l;aBtbOQpC%eE3ZKez3GdgV>m?)-@j7ZzX|u-qEAmQez!$aPRL zODW$=#&%zRzwO3i-TKS|U({Z;sn75TEfMd&w#YBFB>C?;TT#Fm4vzNxu zBU{tC#YsHWwR%k|H9wlkB;oeZLZ&TBE<^)*rYYlEuc5oT>~dx_V3PeP%bJHC2laS@<3Kp?zgojtF5>0L{H|Krhv6lfA9pPj^-FGr9{Zc8ygWIMo_8v~t z`_VX;(TyKpN;V{@g&Ojho!6CT!j{){MZ;`*P>#oJL zYv(HQger=rwC|4}Z;%=VN@0=2>MH*Y>AKDeE831r;$Hj^b8A?M-&Re4(DD z^6yuFkdddhHkz_EwTOfbMJ(UVuV}#tPVNZ)I_$y=4|*0P585O%7sKB1OPZu!T9>Wm z7v#i+`u=*B%m7kHQR;us!Ar}C zdG+cAvnFWr)6KYg(Ma2XuaF6+1tn0x80qZ&gnjiqQ>+`O{YYP&s%gli`fjNRgOR zBE4+2A1Xk>AF<^4m;Ey3CWiIbjP7~`$iK_ana5@PVv4W1HD-?{aSuQGj5rVCL4Zs# zbc}eUpJ|RZoMvRNSFQIct)*DZ%0>sRjP{zi&Rf}BBOV4t9&(g>G2Jpd*Itrof%x%i za@b(Q_krNV_ObGs(8o~G6`bh@10aROL0R4~h(C7oKEJ!?0QRI?E?=0!8)dJf?zsOh zg*oT6r187#)!!~YuU^Tou{)FhSBdsR7`QX@d%W?2Q5cB5m{2N;GtcEOiN5vS3{y&D znXaAbqTe;yAW^JFi?Q~pG9dQ@NR4T=el_W~dJ~_*cd=;~*L?O8g}(U*rxGnerfpo2 z`?5JV&($D+bXaic$(oZLaC6hTGKG7V*s^tm5l#XZ6{X>R`e&`!yrUFXHF%AM^GnrB z;gB4u!*5kxRb}ty&Wg0UOdZ?C6^E zB4J;Ty}x@p;~2I6H~X_Vm7>_px3am)O}^S+aE307TQW^~D$3J~mFO~T$OtAhN)-=4 z-f!9y0b-<{7rTf(kd&JHXeDK$mLV7nddYp$aJ=de=PiN67^s#RxS*a0DiWwPeH?OJtrHE(l&mP)eAlO zWt1n##P37e#hos{D`8QEEix1gMy>9HxW8);~EM zLRP;H(Vb-CVG0anE)V}E;A8Z5ka4?ChQ%5yID%X);?L#ZrDBWjmYY5UHj@vIo=?C0 z5t;sV!d|*H_rmzqThFb6$#lZRc=MD;m$aB0$h5~F^0wAAbl5M4{0l8w^3VpPoyFdC ztrgTTc488eCshCY+4ZBIECGR>V40kzLZY4A!U`{3=a)qbl7ERxT)h`AP}wh_1GA+i zW*o7Btf@I9hQGRg++H2KU;QV%OpCoQrg1`#xM@b?ymh9|bYmG@UkOi>r^%K4H(e93~b}sQ!+g)PgpjZ{Zqr?CespGK8`9Yu!^sDQ2vI}JD$ z1IcZksXVRaUY{YZn%B6_v9eDl?nbhKrVc&BtE~?qLc1e+#1Di9_NJAy3N>a1EkVz0 zx<|zxu!vy!GI6Ku*PVp9%rB8s`u9NfSZ#ENkTjR9KbAlesTX@1se!UFv?jOws5~1+ z(-Ox`T~mPsU1Ku$3&p*tS~n`3czQmdjEVW?&F%I$Rk98&qYS8p_{7yjKhms|o+p2v z{E<2^;Y*^aODVeJ@k<)bUQ3m5F`IRZk=(k*6=l;f_|oeaGcR`4!Sd#qzP3XuV!bCh zDq(BznSO9YfT*EL()h9K>vQA32cCZ7^GFyiTqpeWQdqJ}qnokySb&Blf1kvbn)%Td ziMgCj1{9b`lFex?bcpxlc`7U^h%9%j8vpS*J_;Y11SY5Dq8v@pQ*xo zWs`)uyFW{H)a(0@uJxB`PcB$!=PEq6q?~l0xe;m?%q+ONg9jK9M>2nIN*P`h0D>1K z&w^AQk6#^7mo3UhcDgE6SuNCQDgzU8!PsT!)XPY79BYZaS}&;Bh$mZSfE*{deGBCN zum)?MxcKL=B{c^#OAj~bGx=1_C~YhYYw+xmH8=jtL(Da++?cNH&9_IQ>CJJgcl^~qkp;qN9jnP!wU6%r#ltxq z8m`&-5bd13boJfV%9%M6pWzVe08u*hKB1c%%3Y4#`*63s#=bwa7jHzeaIjlwMn@I$ z0NYF;DpNc_IZnU_m2;|eVu+m&7av@#;tQZ=9$1@vQ#@u%1%6qt&2j{pN0*7;mj zJcI3_g0W=+7=5zJ?quYl4VHDUF&g=MMY4BgCa#oOlxu%w&&$)U*AGeM71p+38JAU0 zK1rvCJ8UQ%Nz%R4=OhNc@>Bd~Vm%Gf*{KuyF+kXGQXaAwrXFb7HSiQ-)HPFLFnsa$ zPwkKgY9MU6tJQ%+Uf6t(m5HIz&3KNclO`t}OZir)GzW!ZC2_%DF{@+7F3$`~qfWTj z{$wt=TLLrKtCd%4z^)qhl)>)f!tGZc4Hu5k!z}{f3$iVgoX$gRS?4mPl%zg%3TpEp z7i%`D_F_+|h%k<0rEx(rHoiwO7uR!R1ubXBLp6sCeHid1-=(@&s2swhgg>}=M5r$* z5pYH8qW}(8r}2Fq%GjClA0CN|>S=5u--DSLGtn?d|0#FFUKWkc9!~w3ph!Gepk1i_{HWe04Eh4%an6p@! zotsT7Bz-uF2faQGzF!vL+4<}rrO)Wn1f}RH%Q+kSTULX-TCXi#c~a!5b(#4&=dX{- z05N1`a4L0WNW2dqz$$9rcvqAqEFI|txChZpX-uV?K#B^;n*0{N{qkQCtjoRoDieV6 za!tGDDf$8(j9{n|WwHXHqY=3+`%ffK4xFF_SR0-v1>8GUzW6>>K9;|ZKZABZi>-+d zDB*k0+r-bGqtUxc2QVwA=YQM*3WjPWJT!|63kNS4w^?yq)34Mc+F~n%R8G>p3r?xU zfhJLWFbYSd<&8h1_sjm*ijpS`=Z-fOX#IBL*|@&0Hqw(a_Rs3%HT`kQ-rLX|eAmE7 zlKh*XxU*v=F+h&1Uh5PtNy5@AO}r%O$x&al9L_4Z?W)4okEri{0o23chB_~-fm1@Q z^OE0-7ekg)dRo5y{$Q{A9DX%Xmh7orZWGxkezeXFkb8VtxCdf_>LMj-qd$SK)mroZ z`sI(J?qS9fy}HH1^4<8oC@&yj08Hp$>(d-bMq4@frb1Z%pHBeP+LJ(whvOl{80M_s zE+4wpJz6F&gYLBkI1n}U-+v!v5tS~#;!K()0o68lKo;6>Q5uj zu%c?5XUV|$YyB?qJ_7*JIWHtUwgBiJr#7LV0LWPBy?9L6r>b_qV>a#>_!yv~N3zA- zemq^_rtjx-cy^9mI^ss8t$Or$^E*NWkT49raWX>h+r=X;>No0=FOJ}YRwu6M$LbM9 z`X8x@DrbM@w)FAPk3l9|CQzp~IcZ?q^R=Ac0N_OE!7ud>L4S|*pyKo28HxiyKK(|c zFdwM0*=Zr(os!h@b-Rt35Ut*X%>R2r2^j5ucDg^a?N=QEEJB%@ig=UE^u@Gl*bfZk zOu|dWsb2_h0C~0q&{;X^w(N)-feVXNfSM`v`6x5rHOnr=gB31KWWLiq<6JV`P54b8 zEyP6zc60qUICOV|63uq=>=)Co`o<8V$pLW4pH|KWhDcMw%5#*ASjTZx8SiCC9EOTD zoi;or*Pq3v_~3n2TujV^fom0gciQg6sVM#8Jd62wx%Jl@e%E8_srHvelg{NPzT`CH zy%m4L3iDJObwlAnyDfOHGOBI&M#@WVzVWH|O}V z0-6Yus%RRWO0`zgVH|uJpp(#CjPtFKrwQt$2*~1^Q~>8hDr32aTz`#8XumtRq@)BJ z>*UnW{&~PT^DxtJ^d6LGCia9!RO4c>XR#U&UAkhLe=v9E5uves;IV@GvdD_{V29LR zCpDs1H@7pcQ&(&CY{lRGVd|DWu~qnKg)H=NibU(P@++eZ5@#bC(AOtcN#CFKjGhi* zkBG4i^|*Y3K~cd(q{dDmw|Nsfn9zfM$nq?!^P! z5OVr%0hS_h$AG=5oI#xFjJeQPA@u1&Uz`O`v2Akv*-&G0C?vP%XD=#h(WZ>$m}(Ri zbs?IQQO}l5*~t#v>~&b4$%)rb5b}EhqdszW-lJ<`Zqb@p%$%W%teRDGjGUOxd!hle zXUCvYB#ar!g#C}Er?}G6yYGHV(BkK6b?kOe1d?qZE`N2pw778NuV&MTw&zzCWz z8y~qh5#-9A`~djg%xzE~dP>Q=FF&%^1U1QShl!nL9sax0E37T!c&DIK^RRArrwBXo<#m7ozYQ@#2Cb^@bofFQKpynh$>Qe{ z(K5@Jq0dgG+rzD!LjR59?ROt-Fr4miC;8&)a_KRYf3R-`R6@cAoyVvGVnSk zWIr9!n{{XCS3pv}BBbo8@5^#bXs__2Ax9IrNs<$|tQ(ziccZcEO5+R%{x==28adPL z$?iC{`9%!=gs{MLAYkDAze!6?YM8*$O#1gOelJ@(G^bu->?r#@waRv6$I!j@O9y{RCd_*fTU6^Qro*| zp$kew&(5Kh4{B;^)>9^gTW)-la@F_78?W6D8L!gy=L~Rlb*0^(a-EpyULRLW;oRHJ zi*Eb6v^(2LV6!jmasPYPSx{UkUs?V_r6ZUra05QI_?vqBkJZA2>caPdO3yEA5)Ix~ zLO4nyYh@iKnp^xS?V#Z+B1w3`l@lkCgt0T?Zj?iTqm)S`^`NxeSzjf_HhDRWDWWFe z#LQZ;d+~Wu4vV(*j6a#}yA|dlvg)*AzvDND2++Y=rx>328s7+g@zTMXb2BK_VJ6oe z{4vL;C89a36P6P-NZ-S;u@Hxr?*S!BcYG#F0$%RGSb4;ZXtk}TUy9Du`)Z+LkFB7S zl00(O*1}XB|H+MQPQ1j`{O}6j?B01(zskkQ-C7$c<1Zo~I6;DMC!NYWET%7t)2~Qp zT$86oycDzpND6J>-PS2(-egll4!v4ae_DRcrha?2%je_>7)qPzVVxN_X z??p#X`k{RHGk(}q!?q8>-1^THChln;MqJr#ZT}uPrIA0_EMGf{Gt{|-K`pqM2g3|| z9z<7_U@)TwxNtf797`L#8*S!ZP_S5g*+Z3)+wVR!)|tAf{g})0?7)yhCthYE9u4co z1yxok7(&d0VMEJ8g|vUq@SXe9ag_I26arY*fM6Ty2^%kP9=0qmw6^-xy&kji_ZacA zfx(h`OQ$olj)=xp8=uaR;~gg^bq_xA$Ue5Rnus}HEd0dotwTP#sj#dbD%#PEK&FFC%sv8{9w`b31eR@e287dGTs6uoViNO8f4}PrRLH zX9}OOU0_MI=kC`ScBBe+#$fw?K^5+Ntl4Kt^4a( z#;Y0@=OqC2piIHj*lnaB^gbNJPR0xUI3`!-*m2j4(AB6vf_k)?6T6|W$~|6zn%ud2 z6LUG!c`T;a8L8@2@A=W#z%>1N5dNKnV40u6)f%2`WqIupLHjbzz;@QK8wahk55w*K zMm*i=8lsAhI?k7)PM$sY&|4@O(*5r5-E#f%l>&7HV`=_b=fpd6QrLt1RSv^5zpk35 zbu#w=u8AzDu$b<+F15T=L6n}lVOKPGFPZ~0U3~F)f+!=Jn8I6(u58?Itz2;Eu^n+a zfo=qu%z(S$`Oyg~Dj3=?A_v?pA5G~K&!3aXw6-nr32%wAy(#l2TDzm!yh5c)`qSX+ zLB<|VJWtS0Lt(OxT*e68lO3N}9>*l56VyIIwzT^G5o2K1G z&i$`Z*Pfo^FQwEuuxmoV3AV$GAk)FdgnY)-G|Bcb}keUu&~^~oV( z(&w|0MdT_#rMxhAgMf z>g}&nm03W$8b?2mo|63}krDLzd_MON{WSkar^CTY#h@%RU?kTfJ(hNQwI6uckq>&_ zC_%OOd$NCNNAr6(?15D~><%8B4#PiL zHV*csg&VXVTPoYcWXUJ{O}uv6?T+4_ zOGe;f5Wo=FMDN>US-L7b8+l}-<@HWYJey94Y)a=_q8|djHzsfZ-sHdv3f2RrA&s+S zFll(WW}Ote%Ee?3+UP?A8k>A{t%1M6(eE#I5FPplIqO|~t;eSuiz1>$O&i`NKn|F+ zl7SI}HSGpEWXLb18-u3&K(<@GJOg#$5akTo$F!d&am0{$q`NI+LkE!#hxFmb7Bu{K zRv-&=niI=}KF{SM-7c8ml87>?q(LOCC6K z_Qw_+tLK)O1##1J`f8Ur&`3KPF2hT+R2V_&DGhYY51ahWk7%LBg~qq=fNlXTW1c_y z6mXGcy7ZXO78LTjGdXsqQ*N-1tqE}AKU-n4%->7{f7Ab>1aZAD(~haFH@nAxNkQD2 zAYkT~^kU~&NWd;}CM{2hcn92q7&RhIIaNO`N28p(Ob*lBVmRTgWKkm6- z9)EMvL?IZY@rp``0jd3&Vb%J4q*_kTW?2HMt)J5*-?W*kEwmM(Q}fToTDFZ;1e$R2 z$_ms-O4i84bha(btvXza|7{Alt(+ODpM2>7<+Hj7v=@(kO3%~$3pB@j)EsM+ncM1- z+r|LSIWDQE1RUi5kTgrVtNU>@Odrk)4ex#E*yi?u3cCA#N1eqnAK#1T z8RtQu|Lm6d-L?Y#Y-N@D(=JY1MYBKeA4uaj zCWRZ*ZDhis)-t7L8|8TVG^sMO!|R|KoVwyICc(AM!!)A226C|nw1_rpu{9ggnzA*K z=BZJ*g5%}BAB{2M74ZkjUjqyeyc83dEyF`GDO8n0=RN?P6}1qM9nMb= z{aBs{gE8Q7%h%qtcE({+{W|+0Z7R;Um7Tv z;0bpevSrd2U4nO3FnsAu4s|UYVJg6`*=rvKxDLB{m71?Up8`zqtnwE~H;v}HfEDGI z1+9Dm#5$HHAiEKDx9C_w;KsZ*a^T2z?>z(-Jo|I)?Ej(|q#R9wr9q9MP&_P0ZI=v> z<^@5I9(dPkxD>Y)t~v8&pdSl66b#Q8FN&#~0|EliLd-|MXeNWQGJ(|~pFJvk&=t6) z={bHypv@@QWI(1Nj}5yu`E~jxb5{J(6LpUgVX9NdyRjjSVfp@6X4WLTSRPI*NoZ0F zYnaNnRSvC}u>QJepi*&d6%sy_+3YnoAz;BX@`gY9}38JCLq01Bf6`&}63)|Eek?dLSN=67X>8!xgc zx|W1cotGau+kOc3adCAe$8BwrD%aWXp0^=GQH^v z2~U{>8kn=N1HhBmwy9F7Sn$W?N{!ShAaZhsTe!h-l)@@gi&n#f_hV{?PJ?I}fM__! zm$|kH;4c*jCD#eS4YD=#=(Riwya1sinP|KcQ~%528NEb>u0fdw?rPD97 zkr-&AmG|>s0i)T(>*3z1$X=o4>s(rzdBB`t18>JfeyX`lrcRqn9|D`o4UhfGn2vdB zCm}x~S_C)@Ry6NG44goRz)lT1OnjP%zJhr6c!HI4Qouo1r^+ULLTyx$nU z_`S_~hQwG-bX}7Wnb>Bl48}%2<&;AD&nPSd&de9V>KIt1QS9gLpfQjk53qV&mcVsF zVxqV|s913Q>j-X`^GaPMp1ky}et@5cX4qR&D5L_%>5+~Ov{23hZj5hDh)I9lw@Qa@ z&>%1H<Z_vsMP6En$+r0)e*uK@S3Tp+X`z%bix`It<3nP`EmJrvdv_ z5X+xOgzt7NJ2&pS43EXpf)X_LTRj}g&pDtD49gW6s_%EcfU>#)X_{=`dJ(>S-{IWa zeysi#BC}tm>mo8JH&4>Rk`hpvaP;+1qNW=4ym|~$J(%WdLyB~(mne=#`UUEE`isLZkJhqfO=dq*GY*}2^s9O zsIIQg4@UPqA4yCdAz*i?YLmkKoLV`JW~+zHT0G}~Q_5!>;{wV~`Ptyp9U|jh!MvY% z%4anc7fJP!4E5Q60snl(R%V#89s#yZOY@dIPbV*+o7+{?t{^Wl%jCfy_j5b%*?hE< z^>X^IdHu*^0D*cB09^rrTwDNc27%<&fQbiz&ZPpY5x5f=aQpy1*eGDp0nAh_fK>ok ztg?Yo1zy|y|Nk{h#L}UDmwP~-iQ6kM-g|QKo&nI4u=Wu&`~)>!hmZBlW^_oUQb028-wqYe!P zumc^Z2ywYt;*2Sw;f7L45OJdhoE*OP={ZSsOkT6X@Xfdty4G6GI@o3Tryt-d6JX;& zFV--ywL`Iop!n?eGKq|xSC_Qn$MwqJf1th;L|JTrWdD|dbJ`bT&euy~%SxjFU@@__ zdIeUk(zLUK?AqQvl3#Uid{6xYNKJtOdlA=JQhc(ht=V`;O2*)kzt~?jbp|XbZUPYK zcVniR&d`G6M2`97%0eN?TIvQc6AK?Rn}Nx%HzV5M0n(oPVkh}Ur_Gq7`**uVCh()F^(hTQG-KvZ{SJa)Ov_v#CLY_91hy_v7|#7o zDe?93X+sFit_RDoq@!xDj6oF$2f?0y+@I0$gq_u-V?M{{&2&-q1-lGq6PiZn4ib}L zM_5-=OD3F1jyN2i$94N|d7oV=0C?67tu8@Bi;mH9gKM9>p{|hYZf`f8b&#*QC#h1^ z=b2#v9b?Pk&`tL2Ay%r43g8naY?AP%I)d0|9=TQ5uw9Yl(8~(UJ~zr0t}}=~&tZY< zc0eWjMK$LXG6`~WqJ|cw7151_O_+mh^S`v$A zm4rq{a|32i5#7j|2jljkGTI+|77nhGN4yCLJUJ2AOUq1G$d5gNQ}JUxNgIR-d`vIM z&<1Ju(*6292{IJ^m9h4R3a?ol(cNjXGS5VK`@z#>33fUzUZhiiLIP(56oJ_%m33_2 z$XuG<%~7PU(Mn2MuruhwHey#at|p7+G`FjnC%5cWsT~Iv!oPmcq?V25b6B1uTids{ zP6nx!uDmKEAa;Jvn7j^f>{tE{yBi0jbRe3a3&Z`!3JXCZxEX13s7bK&@|w`mfQ#7R zr+^wI8Iofhab)FyyJXPf5#imt7eQ}vmS|)UdGFN$GAt`nR<~S z@a7pCd?Qbj@cyYVr?=ZN-z%N0(!akYHy3Spl-AN`pN|AJM%I~V>h4TTG^D*c1Kfe^ z!iH|#16Ciwu1r^jKuaR5B}Hr5eY7hbe)OLFr&M%0PV6c$ZGFp!!)72`5h@Ui!U+ZV*=V^cF5>U-RvHc!!$wGF9I zubfiv&=b?FpKJ*G@qZ({E8?Cyyr5&Oww+!WF9$Dx)G&ORu>`GPIOc%@p2)*{`iqF>PT3T5#fUGlZ4@g%b7fHO;idg(YP*`UF`PUXB zDEP6>d8%CO#zt(A3+@xL=^B87{Vl6nwQ^bK^oNnF_1!#~Nc_veqFh49VJkCmH$tzp z%C_NI@kV0d;O9ipp)7^ho<}**T;=8N(`6uho1dQndub|Y7iggZg!2`OYM%^Vm&nMp z3D}&z5Dc~REU^F~8n`1WO1<$JJ}FNRVGjf)LDbclZ1#+_B#TmHknEJAAkEh#@@_N(} z9^iWsV_SH+!M(IcA9PtYAR6&G`XgIj4th(jZ!h!I&G?JGSFJzA@Jw^ z-bM2&Nz)hc7aGGo5~5}f>O1Z_wgl`5{pX^IgtxIDi=&%n&l7 zJU1FI122*^fRy{mOj?yA&%(Qo3`5&HhkK8^c8vkf(;w2N(8epv=~k&?YP%oSD?>XV+dKAvyxaeXoMWa6F$(hhx5RM8_)PDjZ!qH*w< z2w;=^D-yfoO(L9Bb5hJC?+qi`{!Sk0fv2aZbhwXNcaGWu6DaH@#Pr+h#O7Wr7^!DK z&G`&Z8|4MOLKmZ0$!dkZJCiL~ zRJ^_l>IpCvE=qC?zh|GyHj7s$(*M(#qqQECyYzw^xQiw0~?rJU=+OU?5YPEe~Ct@O#RXJ;fxbs@d5U|m7&4! zex=SZV6eNQuqWY#)^>I~<|=C+X701WZP|d)Tj_gqnMmzcB7WNy2RB~WEJ)ouT4U;v zgSRC?Y_`clN0JPFoO#qZZNeEW03VuqGu`jAl*kPj$}F*aB$=q6FF9|6Q|+rJ>=Pn*HK5iQWQQ z!D<->fGbr%hkc^?0kepQTqZZ*gGQ`ne;3e7Q*P6z*m)7ihI$>~z8Nj-Pc`PwBkNe! zK=r$$FS_~7e0}{`%>4?^NT5T9aiiPUCfXNY53L?o8j`LClI`1uE>@S0L~6DlpgDaDQ^CtRUFX7q-iv^;0HKF-hI^pay@-5zfG^ zAj@iLjm4Kjc1O1?%mu=NvD_*yn?&9nz?TctxOvI-!+OHS-vTrK0k}0_8j&nhj=xDj zS;C|~B-RPKX89V8ridiR!2o1fKu}$>-NDntJ%xl9xZYA3-jM`aiqo}#OOVCRJ=UCd zzMU`$B`Pp9Q}W>EhyRjGNcMXL1|fYP+fu~gvOshp$sT&Nx4rf?$QZZGm|Fpf zXg6O9gm2`%u*g?Lqwf=xHu6Qn=3bpPzZ9mEf3Tx#_C11?=TD;@Rpzv9LdH!w4(w^4 zOZUp?o!-x5A#DpWhBj{w{OI!y#S~#u=z4>}OMLx7bLY8>AV&R;v!?-YX)mx`USD|> z@edKF??JV4V?KQ0;N4Asa-;-_S{5`Sa7-e)@`J8$ed0i5ub0DpP zIc+`X@ATI1-#`iXPjx`TC8%|o>3xaXDe?TAj(FLcJE=>Xz|w>b3|_Xw)lNm2$RQO4 z(#CO$_kvG0#R}q5Lq$34P3*vQeF|?_*&S&>K&`bP9a+X27uwe=%#r z6C~q3z~`Xo1y;fM20v|^HLn1fT3?N_*I`wWF`Lw(()St#eW+>MpoZa(tbh_lzIZeh z>G3A?Wi06Mm!ItKnj3$Qne8*SFl>nnoe%PrOh3eD_q35Nz7=plvHkPK$V%5s)UTPD zsW;?UiP)Lp5fJ}5id=Esf$f{c+7OQlWA3H!H`2|;Up4iBY|Igjb}yahdI;X!)V~@ z&0awjz{5re{<^$ar|8IL8fw?4-qS*S5Dje4y~(DRxqkK(uOLm%4Tom50Lx-Ymlt>( z)z&a*Sp)=F&hzi&?`%?D=BkKz0Yi&)^Y%vExcj&`Q z4G+`(k0${h@w2zjSdA-wSOTmM?{}oQfp}LI_N#FA!iR`xi&MJLj$VRvV(L;ku*o0h z?(gptE;gjU>R(>g^a|0=dPXrepzj6r1Ge|*dQ;F34jsU{Gj3IY%f~y8U@HShYTQix z{4To(kXAHhe0Y2xd+B$`3NMm>V^GX}PFM5K1FrcQ($Hfrs1P9b-fX|xzwc6BScBis zY)m;5k-k&+?;nI#8s&&+04-oFM6&ZrO4MonKZD<>n^f*ty}<(q7`!oE^^e1mqgBNJ zXo}soVaYVxU`S*?JsMvrB6IYfmEoIdgOIg>X5?Ri29=l83ILl>qx$~_^pw=GKR^S@ zl|IDSRr~!qb=qE>W%ibTznkxCt^c-XC2+n;4yb+c&MjNFek~WQQJZUE32p|o_=Xf- zO1@`$J-zr4Fs6!<4r&7fuF-d0%L92A2CF9z5?|VuuMy;%z2tQ?a6k3dO+m7Jz?8e- z;@0r2NJEt>V4GG-=JLrCwsqU z$LCLfQh?1BhU(0)rStR;g}#xMalCQi#lHLJ%RerP-pp4U`XP4W4bTYcg9e|~S9U4o z^vcyf`R2+!eVP9L56X>O#I^t3+qmQEiHYw8Ye3cXjVE4rRy9Zb7H$uU*8W}HdTri(c)K}0b|$E2 z<$GJU4zwop-49>~QCyd7_+CduWORM>y*JiiQB#iag}HWV{iT~PMXP{eCw1h4&5NTo*S4=Z z{NO+%^Y-I$hd~Yh*OQLTx$$Dgx9(Fr@+SAY1yLFcU2yED%Jo_(r* z{+cUIUcO(Z0D~x_#xNf^Zo6*9l}$^E_q~1=weaJDX109$x<^Y&fE7hbx69qK*K=Po zufCr8xeK%?jAQ#o-oQKIcb>o6|HthYa8cO#ZK_tk|31GqYj?Em63e#6^=IcsFnAja9&5n})vztAdQRw;J%zG+bWcyfn_&%KCUU z&q?-(LV4-Wzk4iqcWaksUfTI!dgv@*D7`4XSSlO7l5^^;Y2jzA=S8n{^?d&7%GIqJ w|0@H-pIq<0y}a2muG?pZ^&@FqN-b-CYPAHez7#boFyt=akR{08SU&)Bpeg diff --git a/doc/source/images/sahara-architecture.svg b/doc/source/images/sahara-architecture.svg deleted file mode 100644 index 81147b4f19..0000000000 --- a/doc/source/images/sahara-architecture.svg +++ /dev/null @@ -1,1529 +0,0 @@ - - - - - - image/svg+xml - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -