From 6c5898813c8dcee811530e2485a3890daaed7985 Mon Sep 17 00:00:00 2001 From: David Lyle Date: Fri, 20 Nov 2015 19:05:25 -0700 Subject: [PATCH] Excising Sahara content from Horizon This plugin moves the current content from the horizon repo to this plugin repo. The code has been tested in a devstack install using the following steps: 1. packaging the plugin: "python setup.cfg sdist" 2. pip installing the tar.gz in the resulting dist directory 3. a. (temporary step) remove existing sahara enabled files from horizon "rm openstack_dashboard/enabled/_18*.py" b. finding the install location and changing to it "cp sahara_dashboard/enabled/* /opt/stack/horizon/local/enabled" 4. in /opt/stack/horizon a. python manage.py collectstatic b. python manage.py compress 5. restarting the horizon server Additionally, you can run the unit tests by: ./run_tests.sh Note: added script to programmatically remove the old configuration files from the targeted horizon install, either in venv or system install. Known issues: 1. running tests locally emits missing neutron service messages. 2. plugin code for devstack needs to be added 3. README is inadequate 4. integration tests are still in horizon repo 5. local copy of run_tests is heavy weight, but a better solution is not available currently. 6. localization tooling and strings Change-Id: Icdce2d3e945e612d368556dd5cea1930194c7b67 --- LICENSE | 1 + MANIFEST.in | 4 + README.rst | 13 + manage.py | 2 +- run_tests.sh | 20 +- .../__init__.py | 0 sahara_dashboard/api/__init__.py | 5 + sahara_dashboard/api/sahara.py | 465 +++++++++++++ .../content}/__init__.py | 0 .../content/data_processing}/__init__.py | 0 .../cluster_templates/__init__.py | 0 .../cluster_templates/forms.py | 58 ++ .../cluster_templates/panel.py | 28 + .../cluster_templates/tables.py | 149 +++++ .../data_processing/cluster_templates/tabs.py | 76 +++ .../_configure_general_help.html | 22 + .../_create_general_help.html | 4 + .../_details.html | 55 ++ .../_nodegroups_details.html | 81 +++ .../_upload_file.html | 23 + .../cluster_node_groups_template.html | 159 +++++ .../cluster_templates.html | 63 ++ .../configure.html | 7 + .../create.html | 11 + .../upload_file.html | 7 + .../cluster_templates/tests.py | 167 +++++ .../data_processing/cluster_templates/urls.py | 43 ++ .../cluster_templates/views.py | 149 +++++ .../cluster_templates/workflows/__init__.py | 0 .../cluster_templates/workflows/copy.py | 99 +++ .../cluster_templates/workflows/create.py | 337 ++++++++++ .../cluster_templates/workflows/edit.py | 103 +++ .../data_processing/clusters/__init__.py | 0 .../content/data_processing/clusters/panel.py | 28 + .../data_processing/clusters/tables.py | 177 +++++ .../content/data_processing/clusters/tabs.py | 195 ++++++ .../_configure_general_help.html | 20 + .../_create_cluster.html | 22 + .../_create_general_help.html | 4 + .../data_processing.clusters/_details.html | 92 +++ .../data_processing.clusters/_event_log.html | 62 ++ .../_instances_details.html | 4 + .../_nodegroups_details.html | 82 +++ .../_rich_status.html | 6 + .../data_processing.clusters/clusters.html | 58 ++ .../data_processing.clusters/configure.html | 7 + .../data_processing.clusters/create.html | 7 + .../create_cluster.html | 7 + .../data_processing.clusters/scale.html | 7 + .../content/data_processing/clusters/tests.py | 93 +++ .../content/data_processing/clusters/urls.py | 40 ++ .../content/data_processing/clusters/views.py | 225 +++++++ .../clusters/workflows/__init__.py | 0 .../clusters/workflows/create.py | 258 ++++++++ .../clusters/workflows/scale.py | 172 +++++ .../data_image_registry/__init__.py | 0 .../data_image_registry/forms.py | 116 ++++ .../data_image_registry/panel.py | 28 + .../data_image_registry/tables.py | 83 +++ .../_edit_tags.html | 28 + .../_help.html | 21 + .../_list_tags.html | 5 + .../_register_image.html | 26 + .../_tag_form.html | 123 ++++ .../edit_tags.html | 7 + .../image_registry.html | 24 + .../register_image.html | 7 + .../data_image_registry/tests.py | 131 ++++ .../data_image_registry/urls.py | 33 + .../data_image_registry/views.py | 129 ++++ .../data_processing/data_plugins/__init__.py | 0 .../data_processing/data_plugins/panel.py | 28 + .../data_processing/data_plugins/tables.py | 40 ++ .../data_processing/data_plugins/tabs.py | 46 ++ .../_details.html | 20 + .../data_processing.data_plugins/plugins.html | 11 + .../data_processing/data_plugins/tests.py | 49 ++ .../data_processing/data_plugins/urls.py | 25 + .../data_processing/data_plugins/views.py | 49 ++ .../data_processing/data_sources/__init__.py | 0 .../data_processing/data_sources/panel.py | 28 + .../data_processing/data_sources/tables.py | 78 +++ .../data_processing/data_sources/tabs.py | 44 ++ .../_create_data_source_help.html | 15 + .../_details.html | 18 + .../data_processing.data_sources/create.html | 7 + .../data_sources.html | 11 + .../data_processing/data_sources/tests.py | 124 ++++ .../data_processing/data_sources/urls.py | 35 + .../data_processing/data_sources/views.py | 99 +++ .../data_sources/workflows/__init__.py | 0 .../data_sources/workflows/create.py | 121 ++++ .../data_sources/workflows/edit.py | 79 +++ .../data_processing/job_binaries/__init__.py | 0 .../data_processing/job_binaries/forms.py | 311 +++++++++ .../data_processing/job_binaries/panel.py | 28 + .../data_processing/job_binaries/tables.py | 98 +++ .../data_processing/job_binaries/tabs.py | 43 ++ .../data_processing.job_binaries/_create.html | 26 + .../_create_job_binary_help.html | 32 + .../_details.html | 17 + .../data_processing.job_binaries/create.html | 7 + .../job_binaries.html | 19 + .../data_processing/job_binaries/tests.py | 125 ++++ .../data_processing/job_binaries/urls.py | 38 ++ .../data_processing/job_binaries/views.py | 146 +++++ .../job_executions/__init__.py | 0 .../data_processing/job_executions/panel.py | 28 + .../data_processing/job_executions/tables.py | 220 +++++++ .../data_processing/job_executions/tabs.py | 81 +++ .../_details.html | 45 ++ .../job_executions.html | 63 ++ .../data_processing/job_executions/tests.py | 68 ++ .../data_processing/job_executions/urls.py | 35 + .../data_processing/job_executions/views.py | 83 +++ .../content/data_processing/jobs/__init__.py | 0 .../content/data_processing/jobs/panel.py | 28 + .../content/data_processing/jobs/tables.py | 116 ++++ .../content/data_processing/jobs/tabs.py | 43 ++ .../_create_job_help.html | 31 + .../_create_job_libs_help.html | 12 + .../data_processing.jobs/_details.html | 30 + .../_launch_job_configure_help.html | 6 + .../_launch_job_help.html | 15 + .../data_processing.jobs/config_template.html | 244 +++++++ .../data_processing.jobs/create.html | 7 + .../job_interface_arguments_template.html | 43 ++ .../templates/data_processing.jobs/jobs.html | 81 +++ .../data_processing.jobs/launch.html | 7 + .../library_template.html | 104 +++ .../content/data_processing/jobs/tests.py | 200 ++++++ .../content/data_processing/jobs/urls.py | 41 ++ .../content/data_processing/jobs/views.py | 135 ++++ .../jobs/workflows/__init__.py | 0 .../data_processing/jobs/workflows/create.py | 281 ++++++++ .../data_processing/jobs/workflows/launch.py | 609 ++++++++++++++++++ .../nodegroup_templates/__init__.py | 0 .../nodegroup_templates/panel.py | 28 + .../nodegroup_templates/tables.py | 110 ++++ .../nodegroup_templates/tabs.py | 100 +++ .../_configure_general_help.html | 23 + .../_create_general_help.html | 4 + .../_details.html | 94 +++ .../_fields_help.html | 60 ++ .../_service_confs.html | 23 + .../configure.html | 7 + .../create.html | 11 + .../nodegroup_templates.html | 81 +++ .../nodegroup_templates/tests.py | 323 ++++++++++ .../nodegroup_templates/urls.py | 43 ++ .../nodegroup_templates/views.py | 145 +++++ .../nodegroup_templates/workflows/__init__.py | 0 .../nodegroup_templates/workflows/copy.py | 117 ++++ .../nodegroup_templates/workflows/create.py | 520 +++++++++++++++ .../nodegroup_templates/workflows/edit.py | 109 ++++ .../data_processing.event_log.js | 145 +++++ ...data_processing.job_interface_arguments.js | 175 +++++ .../content/data_processing/utils/__init__.py | 0 .../data_processing/utils/anti_affinity.py | 67 ++ .../content/data_processing/utils/helpers.py | 134 ++++ .../data_processing/utils/neutron_support.py | 32 + .../data_processing/utils/workflow_helpers.py | 326 ++++++++++ .../data_processing/wizard/__init__.py | 0 .../content/data_processing/wizard/forms.py | 121 ++++ .../content/data_processing/wizard/panel.py | 28 + .../_job_type_select.html | 30 + .../_job_type_select_help.html | 7 + .../_plugin_select.html | 30 + .../_plugin_select_help.html | 5 + .../data_processing.wizard/cluster_guide.html | 163 +++++ .../job_type_select.html | 7 + .../data_processing.wizard/jobex_guide.html | 114 ++++ .../data_processing.wizard/plugin_select.html | 7 + .../data_processing.wizard/wizard.html | 56 ++ .../content/data_processing/wizard/tests.py | 59 ++ .../content/data_processing/wizard/urls.py | 41 ++ .../content/data_processing/wizard/views.py | 102 +++ .../_1810_data_processing_panel_group.py | 8 + .../_1815_data_processing_wizard_panel.py | 24 + .../_1820_data_processing_clusters_panel.py | 31 + ...25_data_processing_job_executions_panel.py | 24 + ...data_processing_cluster_templates_panel.py | 24 + ...ta_processing_nodegroup_templates_panel.py | 25 + .../_1840_data_processing_jobs_panel.py | 27 + ...1845_data_processing_job_binaries_panel.py | 24 + ...1850_data_processing_data_sources_panel.py | 24 + ...ta_processing_data_image_registry_panel.py | 24 + ...1860_data_processing_data_plugins_panel.py | 24 + sahara_dashboard/enabled/__init__.py | 0 sahara_dashboard/test/__init__.py | 0 sahara_dashboard/test/api_tests/__init__.py | 0 .../test/api_tests/sahara_tests.py | 48 ++ sahara_dashboard/test/helpers.py | 57 ++ .../test/settings.py | 6 +- sahara_dashboard/test/test_data/__init__.py | 0 .../test/test_data/keystone_data.py | 26 + .../test/test_data/sahara_data.py | 598 +++++++++++++++++ sahara_dashboard/test/test_data/utils.py | 53 ++ sahara_dashboard/test/urls.py | 20 + setup.cfg | 12 +- tools/clean_enabled_files.py | 45 ++ tools/install_venv.py | 154 +++++ tools/with_venv.sh | 7 + tox.ini | 2 +- 204 files changed, 13666 insertions(+), 19 deletions(-) create mode 100644 MANIFEST.in mode change 100644 => 100755 run_tests.sh rename {sahara-dashboard => sahara_dashboard}/__init__.py (100%) create mode 100644 sahara_dashboard/api/__init__.py create mode 100644 sahara_dashboard/api/sahara.py rename {sahara-dashboard/enabled => sahara_dashboard/content}/__init__.py (100%) rename {sahara-dashboard/test => sahara_dashboard/content/data_processing}/__init__.py (100%) create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/__init__.py create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/forms.py create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/panel.py create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/tables.py create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/tabs.py create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_configure_general_help.html create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_create_general_help.html create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_details.html create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_nodegroups_details.html create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_upload_file.html create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/cluster_node_groups_template.html create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/cluster_templates.html create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/configure.html create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/create.html create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/upload_file.html create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/tests.py create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/urls.py create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/views.py create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/workflows/__init__.py create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/workflows/copy.py create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/workflows/create.py create mode 100644 sahara_dashboard/content/data_processing/cluster_templates/workflows/edit.py create mode 100644 sahara_dashboard/content/data_processing/clusters/__init__.py create mode 100644 sahara_dashboard/content/data_processing/clusters/panel.py create mode 100644 sahara_dashboard/content/data_processing/clusters/tables.py create mode 100644 sahara_dashboard/content/data_processing/clusters/tabs.py create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_configure_general_help.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_create_cluster.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_create_general_help.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_details.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_event_log.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_instances_details.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_nodegroups_details.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_rich_status.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/clusters.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/configure.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/create.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/create_cluster.html create mode 100644 sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/scale.html create mode 100644 sahara_dashboard/content/data_processing/clusters/tests.py create mode 100644 sahara_dashboard/content/data_processing/clusters/urls.py create mode 100644 sahara_dashboard/content/data_processing/clusters/views.py create mode 100644 sahara_dashboard/content/data_processing/clusters/workflows/__init__.py create mode 100644 sahara_dashboard/content/data_processing/clusters/workflows/create.py create mode 100644 sahara_dashboard/content/data_processing/clusters/workflows/scale.py create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/__init__.py create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/forms.py create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/panel.py create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/tables.py create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_edit_tags.html create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_help.html create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_list_tags.html create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_register_image.html create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_tag_form.html create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/edit_tags.html create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/image_registry.html create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/register_image.html create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/tests.py create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/urls.py create mode 100644 sahara_dashboard/content/data_processing/data_image_registry/views.py create mode 100644 sahara_dashboard/content/data_processing/data_plugins/__init__.py create mode 100644 sahara_dashboard/content/data_processing/data_plugins/panel.py create mode 100644 sahara_dashboard/content/data_processing/data_plugins/tables.py create mode 100644 sahara_dashboard/content/data_processing/data_plugins/tabs.py create mode 100644 sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/_details.html create mode 100644 sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/plugins.html create mode 100644 sahara_dashboard/content/data_processing/data_plugins/tests.py create mode 100644 sahara_dashboard/content/data_processing/data_plugins/urls.py create mode 100644 sahara_dashboard/content/data_processing/data_plugins/views.py create mode 100644 sahara_dashboard/content/data_processing/data_sources/__init__.py create mode 100644 sahara_dashboard/content/data_processing/data_sources/panel.py create mode 100644 sahara_dashboard/content/data_processing/data_sources/tables.py create mode 100644 sahara_dashboard/content/data_processing/data_sources/tabs.py create mode 100644 sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_create_data_source_help.html create mode 100644 sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_details.html create mode 100644 sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/create.html create mode 100644 sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/data_sources.html create mode 100644 sahara_dashboard/content/data_processing/data_sources/tests.py create mode 100644 sahara_dashboard/content/data_processing/data_sources/urls.py create mode 100644 sahara_dashboard/content/data_processing/data_sources/views.py create mode 100644 sahara_dashboard/content/data_processing/data_sources/workflows/__init__.py create mode 100644 sahara_dashboard/content/data_processing/data_sources/workflows/create.py create mode 100644 sahara_dashboard/content/data_processing/data_sources/workflows/edit.py create mode 100644 sahara_dashboard/content/data_processing/job_binaries/__init__.py create mode 100644 sahara_dashboard/content/data_processing/job_binaries/forms.py create mode 100644 sahara_dashboard/content/data_processing/job_binaries/panel.py create mode 100644 sahara_dashboard/content/data_processing/job_binaries/tables.py create mode 100644 sahara_dashboard/content/data_processing/job_binaries/tabs.py create mode 100644 sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_create.html create mode 100644 sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_create_job_binary_help.html create mode 100644 sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_details.html create mode 100644 sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/create.html create mode 100644 sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/job_binaries.html create mode 100644 sahara_dashboard/content/data_processing/job_binaries/tests.py create mode 100644 sahara_dashboard/content/data_processing/job_binaries/urls.py create mode 100644 sahara_dashboard/content/data_processing/job_binaries/views.py create mode 100644 sahara_dashboard/content/data_processing/job_executions/__init__.py create mode 100644 sahara_dashboard/content/data_processing/job_executions/panel.py create mode 100644 sahara_dashboard/content/data_processing/job_executions/tables.py create mode 100644 sahara_dashboard/content/data_processing/job_executions/tabs.py create mode 100644 sahara_dashboard/content/data_processing/job_executions/templates/data_processing.job_executions/_details.html create mode 100644 sahara_dashboard/content/data_processing/job_executions/templates/data_processing.job_executions/job_executions.html create mode 100644 sahara_dashboard/content/data_processing/job_executions/tests.py create mode 100644 sahara_dashboard/content/data_processing/job_executions/urls.py create mode 100644 sahara_dashboard/content/data_processing/job_executions/views.py create mode 100644 sahara_dashboard/content/data_processing/jobs/__init__.py create mode 100644 sahara_dashboard/content/data_processing/jobs/panel.py create mode 100644 sahara_dashboard/content/data_processing/jobs/tables.py create mode 100644 sahara_dashboard/content/data_processing/jobs/tabs.py create mode 100644 sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_create_job_help.html create mode 100644 sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_create_job_libs_help.html create mode 100644 sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_details.html create mode 100644 sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_launch_job_configure_help.html create mode 100644 sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_launch_job_help.html create mode 100644 sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/config_template.html create mode 100644 sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/create.html create mode 100644 sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/job_interface_arguments_template.html create mode 100644 sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/jobs.html create mode 100644 sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/launch.html create mode 100644 sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/library_template.html create mode 100644 sahara_dashboard/content/data_processing/jobs/tests.py create mode 100644 sahara_dashboard/content/data_processing/jobs/urls.py create mode 100644 sahara_dashboard/content/data_processing/jobs/views.py create mode 100644 sahara_dashboard/content/data_processing/jobs/workflows/__init__.py create mode 100644 sahara_dashboard/content/data_processing/jobs/workflows/create.py create mode 100644 sahara_dashboard/content/data_processing/jobs/workflows/launch.py create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/__init__.py create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/panel.py create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/tables.py create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/tabs.py create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_configure_general_help.html create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_create_general_help.html create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_details.html create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_fields_help.html create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_service_confs.html create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/configure.html create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/create.html create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/nodegroup_templates.html create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/tests.py create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/urls.py create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/views.py create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/workflows/__init__.py create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/workflows/copy.py create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/workflows/create.py create mode 100644 sahara_dashboard/content/data_processing/nodegroup_templates/workflows/edit.py create mode 100644 sahara_dashboard/content/data_processing/static/dashboard/project/data_processing/data_processing.event_log.js create mode 100644 sahara_dashboard/content/data_processing/static/dashboard/project/data_processing/data_processing.job_interface_arguments.js create mode 100644 sahara_dashboard/content/data_processing/utils/__init__.py create mode 100644 sahara_dashboard/content/data_processing/utils/anti_affinity.py create mode 100644 sahara_dashboard/content/data_processing/utils/helpers.py create mode 100644 sahara_dashboard/content/data_processing/utils/neutron_support.py create mode 100644 sahara_dashboard/content/data_processing/utils/workflow_helpers.py create mode 100644 sahara_dashboard/content/data_processing/wizard/__init__.py create mode 100644 sahara_dashboard/content/data_processing/wizard/forms.py create mode 100644 sahara_dashboard/content/data_processing/wizard/panel.py create mode 100644 sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_job_type_select.html create mode 100644 sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_job_type_select_help.html create mode 100644 sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_plugin_select.html create mode 100644 sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_plugin_select_help.html create mode 100644 sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/cluster_guide.html create mode 100644 sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/job_type_select.html create mode 100644 sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/jobex_guide.html create mode 100644 sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/plugin_select.html create mode 100644 sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/wizard.html create mode 100644 sahara_dashboard/content/data_processing/wizard/tests.py create mode 100644 sahara_dashboard/content/data_processing/wizard/urls.py create mode 100644 sahara_dashboard/content/data_processing/wizard/views.py create mode 100644 sahara_dashboard/enabled/_1810_data_processing_panel_group.py create mode 100644 sahara_dashboard/enabled/_1815_data_processing_wizard_panel.py create mode 100644 sahara_dashboard/enabled/_1820_data_processing_clusters_panel.py create mode 100644 sahara_dashboard/enabled/_1825_data_processing_job_executions_panel.py create mode 100644 sahara_dashboard/enabled/_1830_data_processing_cluster_templates_panel.py create mode 100644 sahara_dashboard/enabled/_1835_data_processing_nodegroup_templates_panel.py create mode 100644 sahara_dashboard/enabled/_1840_data_processing_jobs_panel.py create mode 100644 sahara_dashboard/enabled/_1845_data_processing_job_binaries_panel.py create mode 100644 sahara_dashboard/enabled/_1850_data_processing_data_sources_panel.py create mode 100644 sahara_dashboard/enabled/_1855_data_processing_data_image_registry_panel.py create mode 100644 sahara_dashboard/enabled/_1860_data_processing_data_plugins_panel.py create mode 100644 sahara_dashboard/enabled/__init__.py create mode 100644 sahara_dashboard/test/__init__.py create mode 100644 sahara_dashboard/test/api_tests/__init__.py create mode 100644 sahara_dashboard/test/api_tests/sahara_tests.py create mode 100644 sahara_dashboard/test/helpers.py rename {sahara-dashboard => sahara_dashboard}/test/settings.py (97%) create mode 100644 sahara_dashboard/test/test_data/__init__.py create mode 100644 sahara_dashboard/test/test_data/keystone_data.py create mode 100644 sahara_dashboard/test/test_data/sahara_data.py create mode 100644 sahara_dashboard/test/test_data/utils.py create mode 100644 sahara_dashboard/test/urls.py create mode 100644 tools/clean_enabled_files.py create mode 100644 tools/install_venv.py create mode 100755 tools/with_venv.sh diff --git a/LICENSE b/LICENSE index 67db858..68c771a 100644 --- a/LICENSE +++ b/LICENSE @@ -173,3 +173,4 @@ defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..e12d4e1 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,4 @@ +recursive-include sahara_dashboard *.html *.scss *.js + +include AUTHORS +include ChangeLog diff --git a/README.rst b/README.rst index cf74118..6a55198 100644 --- a/README.rst +++ b/README.rst @@ -1,6 +1,19 @@ OpenStack Dashboard plugin for Sahara project ============================================= +How to use: +----------- + +Use pip to install the package on the server running Horizon. Then either copy +or link the files in sahara_dashboard/enabled to +openstack_dashboard/local/enabled. This step will cause the Horizon service to +pick up the Sahara plugin when it starts. + +To run unit tests: +------------------ + +./run_tests.sh + NOTE: ===== diff --git a/manage.py b/manage.py index c89c3ba..53c74c1 100755 --- a/manage.py +++ b/manage.py @@ -19,5 +19,5 @@ from django.core.management import execute_from_command_line if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", - "sahara-dashboard.test.settings") + "sahara_dashboard.test.settings") execute_from_command_line(sys.argv) diff --git a/run_tests.sh b/run_tests.sh old mode 100644 new mode 100755 index 1f6d0c1..e3bd777 --- a/run_tests.sh +++ b/run_tests.sh @@ -55,7 +55,7 @@ root=`pwd -P` venv=$root/.venv venv_env_version=$venv/environments with_venv=tools/with_venv.sh -included_dirs="sahara-dashboard" +included_dirs="sahara_dashboard" always_venv=0 backup_env=0 @@ -165,7 +165,7 @@ function warn_on_flake8_without_venv { function run_pep8 { echo "Running flake8 ..." warn_on_flake8_without_venv - DJANGO_SETTINGS_MODULE=sahara-dashboard.test.settings ${command_wrapper} flake8 + DJANGO_SETTINGS_MODULE=sahara_dashboard.test.settings ${command_wrapper} flake8 } function run_pep8_changed { @@ -178,13 +178,13 @@ function run_pep8_changed { files=$(git diff --name-only $base_commit | tr '\n' ' ') echo "Running flake8 on ${files}" warn_on_flake8_without_venv - diff -u --from-file /dev/null ${files} | DJANGO_SETTINGS_MODULE=sahara-dashboard.test.settings ${command_wrapper} flake8 --diff + diff -u --from-file /dev/null ${files} | DJANGO_SETTINGS_MODULE=sahara_dashboard.test.settings ${command_wrapper} flake8 --diff exit } function run_sphinx { echo "Building sphinx..." - DJANGO_SETTINGS_MODULE=sahara-dashboard.test.settings ${command_wrapper} python setup.py build_sphinx + DJANGO_SETTINGS_MODULE=sahara_dashboard.test.settings ${command_wrapper} python setup.py build_sphinx echo "Build complete." } @@ -322,6 +322,10 @@ function run_tests { export SELENIUM_HEADLESS=1 fi + # TODO(david-lyle) remove when configuration files for Sahara are not loaded + # by default in Horizon + ${command_wrapper} python tools/clean_enabled_files.py + if [ -z "$testargs" ]; then run_tests_all else @@ -335,8 +339,8 @@ function run_tests_subset { } function run_tests_all { - echo "Running Sahara-Dashboard application tests" - export NOSE_XUNIT_FILE=sahara-dashboard/nosetests.xml + echo "Running sahara_dashboard application tests" + export NOSE_XUNIT_FILE=sahara_dashboard/nosetests.xml if [ "$NOSE_WITH_HTML_OUTPUT" = '1' ]; then export NOSE_HTML_OUT_FILE='sahara_dashboard_nose_results.html' fi @@ -344,7 +348,7 @@ function run_tests_all { ${command_wrapper} python -m coverage.__main__ erase coverage_run="python -m coverage.__main__ run -p" fi - ${command_wrapper} ${coverage_run} $root/manage.py test sahara-dashboard --settings=sahara-dashboard.test.settings $testopts + ${command_wrapper} ${coverage_run} $root/manage.py test sahara_dashboard --settings=sahara_dashboard.test.settings $testopts # get results of the Horizon tests SAHARA_DASHBOARD_RESULT=$? @@ -545,4 +549,4 @@ if [ $runserver -eq 1 ]; then fi # Full test suite -run_tests || exit \ No newline at end of file +run_tests || exit diff --git a/sahara-dashboard/__init__.py b/sahara_dashboard/__init__.py similarity index 100% rename from sahara-dashboard/__init__.py rename to sahara_dashboard/__init__.py diff --git a/sahara_dashboard/api/__init__.py b/sahara_dashboard/api/__init__.py new file mode 100644 index 0000000..40a8703 --- /dev/null +++ b/sahara_dashboard/api/__init__.py @@ -0,0 +1,5 @@ +from sahara_dashboard.api import sahara + +__all__ = [ + "sahara" +] diff --git a/sahara_dashboard/api/sahara.py b/sahara_dashboard/api/sahara.py new file mode 100644 index 0000000..250882c --- /dev/null +++ b/sahara_dashboard/api/sahara.py @@ -0,0 +1,465 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.conf import settings + +from horizon import exceptions +from horizon.utils.memoized import memoized # noqa +from openstack_dashboard.api import base + +from saharaclient.api.base import APIException +from saharaclient import client as api_client + + +LOG = logging.getLogger(__name__) + +# "type" of Sahara service registered in keystone +SAHARA_SERVICE = 'data-processing' +# Sahara service_type registered in Juno +SAHARA_SERVICE_FALLBACK = 'data_processing' + +SAHARA_AUTO_IP_ALLOCATION_ENABLED = getattr( + settings, + 'SAHARA_AUTO_IP_ALLOCATION_ENABLED', + False) +VERSIONS = base.APIVersionManager( + SAHARA_SERVICE, + preferred_version=getattr(settings, + 'OPENSTACK_API_VERSIONS', + {}).get(SAHARA_SERVICE, 1.1)) +VERSIONS.load_supported_version(1.1, {"client": api_client, + "version": 1.1}) + + +def safe_call(func, *args, **kwargs): + """Call a function ignoring Not Found error + + This method is supposed to be used only for safe retrieving Sahara + objects. If the object is no longer available the None should be + returned. + + """ + + try: + return func(*args, **kwargs) + except APIException as e: + if e.error_code == 404: + return None # Not found. Exiting with None + raise # Other errors are not expected here + + +@memoized +def client(request): + try: + service_type = SAHARA_SERVICE + sahara_url = base.url_for(request, service_type) + except exceptions.ServiceCatalogException: + # if no endpoint found, fallback to the old service_type + service_type = SAHARA_SERVICE_FALLBACK + sahara_url = base.url_for(request, service_type) + + insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False) + cacert = getattr(settings, 'OPENSTACK_SSL_CACERT', None) + return api_client.Client(VERSIONS.get_active_version()["version"], + sahara_url=sahara_url, + service_type=service_type, + project_id=request.user.project_id, + input_auth_token=request.user.token.id, + insecure=insecure, + cacert=cacert) + + +def image_list(request, search_opts=None): + return client(request).images.list(search_opts=search_opts) + + +def image_get(request, image_id): + return client(request).images.get(id=image_id) + + +def image_unregister(request, image_id): + client(request).images.unregister_image(image_id=image_id) + + +def image_update(request, image_id, user_name, desc): + client(request).images.update_image(image_id=image_id, + user_name=user_name, + desc=desc) + + +def image_tags_update(request, image_id, image_tags): + client(request).images.update_tags(image_id=image_id, + new_tags=image_tags) + + +def plugin_list(request, search_opts=None): + return client(request).plugins.list(search_opts=search_opts) + + +def plugin_get(request, plugin_name): + return client(request).plugins.get(plugin_name=plugin_name) + + +def plugin_get_version_details(request, plugin_name, hadoop_version): + return client(request).plugins.get_version_details( + plugin_name=plugin_name, + hadoop_version=hadoop_version) + + +def plugin_convert_to_template(request, plugin_name, hadoop_version, + template_name, file_content): + return client(request).plugins.convert_to_cluster_template( + plugin_name=plugin_name, + hadoop_version=hadoop_version, + template_name=template_name, + filecontent=file_content) + + +def nodegroup_template_create(request, name, plugin_name, hadoop_version, + flavor_id, description=None, + volumes_per_node=None, volumes_size=None, + node_processes=None, node_configs=None, + floating_ip_pool=None, security_groups=None, + auto_security_group=False, + availability_zone=False, + volumes_availability_zone=False, + volume_type=None, + is_proxy_gateway=False, + volume_local_to_instance=False, + use_autoconfig=None): + return client(request).node_group_templates.create( + name=name, + plugin_name=plugin_name, + hadoop_version=hadoop_version, + flavor_id=flavor_id, + description=description, + volumes_per_node=volumes_per_node, + volumes_size=volumes_size, + node_processes=node_processes, + node_configs=node_configs, + floating_ip_pool=floating_ip_pool, + security_groups=security_groups, + auto_security_group=auto_security_group, + availability_zone=availability_zone, + volumes_availability_zone=volumes_availability_zone, + volume_type=volume_type, + is_proxy_gateway=is_proxy_gateway, + volume_local_to_instance=volume_local_to_instance, + use_autoconfig=use_autoconfig) + + +def nodegroup_template_list(request, search_opts=None): + return client(request).node_group_templates.list(search_opts=search_opts) + + +def nodegroup_template_get(request, ngt_id): + return client(request).node_group_templates.get(ng_template_id=ngt_id) + + +def nodegroup_template_find(request, **kwargs): + return client(request).node_group_templates.find(**kwargs) + + +def nodegroup_template_delete(request, ngt_id): + client(request).node_group_templates.delete(ng_template_id=ngt_id) + + +def nodegroup_template_update(request, ngt_id, name, plugin_name, + hadoop_version, flavor_id, + description=None, volumes_per_node=None, + volumes_size=None, node_processes=None, + node_configs=None, floating_ip_pool=None, + security_groups=None, auto_security_group=False, + availability_zone=False, + volumes_availability_zone=False, + volume_type=None, + is_proxy_gateway=False, + volume_local_to_instance=False, + use_autoconfig=None): + return client(request).node_group_templates.update( + ng_template_id=ngt_id, + name=name, + plugin_name=plugin_name, + hadoop_version=hadoop_version, + flavor_id=flavor_id, + description=description, + volumes_per_node=volumes_per_node, + volumes_size=volumes_size, + node_processes=node_processes, + node_configs=node_configs, + floating_ip_pool=floating_ip_pool, + security_groups=security_groups, + auto_security_group=auto_security_group, + availability_zone=availability_zone, + volumes_availability_zone=volumes_availability_zone, + volume_type=volume_type, + is_proxy_gateway=is_proxy_gateway, + volume_local_to_instance=volume_local_to_instance, + use_autoconfig=use_autoconfig) + + +def cluster_template_create(request, name, plugin_name, hadoop_version, + description=None, cluster_configs=None, + node_groups=None, anti_affinity=None, + net_id=None, use_autoconfig=None): + return client(request).cluster_templates.create( + name=name, + plugin_name=plugin_name, + hadoop_version=hadoop_version, + description=description, + cluster_configs=cluster_configs, + node_groups=node_groups, + anti_affinity=anti_affinity, + net_id=net_id, + use_autoconfig=use_autoconfig) + + +def cluster_template_list(request, search_opts=None): + return client(request).cluster_templates.list(search_opts=search_opts) + + +def cluster_template_get(request, ct_id): + return client(request).cluster_templates.get(cluster_template_id=ct_id) + + +def cluster_template_delete(request, ct_id): + client(request).cluster_templates.delete(cluster_template_id=ct_id) + + +def cluster_template_update(request, ct_id, name, plugin_name, + hadoop_version, description=None, + cluster_configs=None, node_groups=None, + anti_affinity=None, net_id=None, + use_autoconfig=None): + try: + template = client(request).cluster_templates.update( + cluster_template_id=ct_id, + name=name, + plugin_name=plugin_name, + hadoop_version=hadoop_version, + description=description, + cluster_configs=cluster_configs, + node_groups=node_groups, + anti_affinity=anti_affinity, + net_id=net_id, + use_autoconfig=use_autoconfig) + + except APIException as e: + raise exceptions.Conflict(e) + return template + + +def cluster_create(request, name, plugin_name, hadoop_version, + cluster_template_id=None, default_image_id=None, + is_transient=None, description=None, cluster_configs=None, + node_groups=None, user_keypair_id=None, anti_affinity=None, + net_id=None, count=None, use_autoconfig=None): + return client(request).clusters.create( + name=name, + plugin_name=plugin_name, + hadoop_version=hadoop_version, + cluster_template_id=cluster_template_id, + default_image_id=default_image_id, + is_transient=is_transient, + description=description, + cluster_configs=cluster_configs, + node_groups=node_groups, + user_keypair_id=user_keypair_id, + anti_affinity=anti_affinity, + net_id=net_id, + count=count, + use_autoconfig=use_autoconfig) + + +def cluster_scale(request, cluster_id, scale_object): + return client(request).clusters.scale( + cluster_id=cluster_id, + scale_object=scale_object) + + +def cluster_list(request, search_opts=None): + return client(request).clusters.list(search_opts=search_opts) + + +def cluster_get(request, cluster_id, show_progress=False): + return client(request).clusters.get( + cluster_id=cluster_id, + show_progress=show_progress) + + +def cluster_delete(request, cluster_id): + client(request).clusters.delete(cluster_id=cluster_id) + + +def data_source_create(request, name, description, ds_type, url, + credential_user=None, credential_pass=None): + return client(request).data_sources.create( + name=name, + description=description, + data_source_type=ds_type, + url=url, + credential_user=credential_user, + credential_pass=credential_pass) + + +def data_source_list(request, search_opts=None): + return client(request).data_sources.list(search_opts=search_opts) + + +def data_source_get(request, ds_id): + return client(request).data_sources.get(data_source_id=ds_id) + + +def data_source_delete(request, ds_id): + client(request).data_sources.delete(data_source_id=ds_id) + + +def data_source_update(request, ds_id, data): + return client(request).data_sources.update(ds_id, data) + + +def job_binary_create(request, name, url, description, extra): + return client(request).job_binaries.create( + name=name, + url=url, + description=description, + extra=extra) + + +def job_binary_list(request, search_opts=None): + return client(request).job_binaries.list(search_opts=search_opts) + + +def job_binary_get(request, jb_id): + return client(request).job_binaries.get(job_binary_id=jb_id) + + +def job_binary_delete(request, jb_id): + client(request).job_binaries.delete(job_binary_id=jb_id) + + +def job_binary_get_file(request, jb_id): + return client(request).job_binaries.get_file(job_binary_id=jb_id) + + +def job_binary_update(request, jb_id, data): + return client(request).job_binaries.update(jb_id, data) + + +def job_binary_internal_create(request, name, data): + return client(request).job_binary_internals.create( + name=name, + data=data) + + +def job_binary_internal_list(request, search_opts=None): + return client(request).job_binary_internals.list(search_opts=search_opts) + + +def job_binary_internal_get(request, jbi_id): + # The argument name looks wrong. This should be changed in the sahara + # client first and then updated here + return client(request).job_binary_internals.get(job_binary_id=jbi_id) + + +def job_binary_internal_delete(request, jbi_id): + # The argument name looks wrong. This should be changed in the sahara + # client first and then updated here + client(request).job_binary_internals.delete(job_binary_id=jbi_id) + + +def job_create(request, name, j_type, mains, libs, description, interface): + return client(request).jobs.create( + name=name, + type=j_type, + mains=mains, + libs=libs, + description=description, + interface=interface) + + +def job_list(request, search_opts=None): + return client(request).jobs.list(search_opts=search_opts) + + +def job_get(request, job_id): + return client(request).jobs.get(job_id=job_id) + + +def job_delete(request, job_id): + client(request).jobs.delete(job_id=job_id) + + +def job_get_configs(request, job_type): + return client(request).jobs.get_configs(job_type=job_type) + + +def job_execution_create(request, job_id, cluster_id, + input_id, output_id, configs, + interface): + return client(request).job_executions.create( + job_id=job_id, + cluster_id=cluster_id, + input_id=input_id, + output_id=output_id, + configs=configs, + interface=interface) + + +def _resolve_job_execution_names(job_execution, cluster=None, + job=None): + + job_execution.cluster_name = None + if cluster: + job_execution.cluster_name = cluster.name + + job_execution.job_name = None + if job: + job_execution.job_name = job.name + + return job_execution + + +def job_execution_list(request, search_opts=None): + job_execution_list = client(request).job_executions.list( + search_opts=search_opts) + job_dict = dict((j.id, j) for j in job_list(request)) + cluster_dict = dict((c.id, c) for c in cluster_list(request)) + + resolved_job_execution_list = [ + _resolve_job_execution_names( + job_execution, + cluster_dict.get(job_execution.cluster_id), + job_dict.get(job_execution.job_id)) + for job_execution in job_execution_list + ] + + return resolved_job_execution_list + + +def job_execution_get(request, jex_id): + jex = client(request).job_executions.get(obj_id=jex_id) + cluster = safe_call(client(request).clusters.get, jex.cluster_id) + job = safe_call(client(request).jobs.get, jex.job_id) + + return _resolve_job_execution_names(jex, cluster, job) + + +def job_execution_delete(request, jex_id): + client(request).job_executions.delete(obj_id=jex_id) + + +def job_types_list(request): + return client(request).job_types.list() diff --git a/sahara-dashboard/enabled/__init__.py b/sahara_dashboard/content/__init__.py similarity index 100% rename from sahara-dashboard/enabled/__init__.py rename to sahara_dashboard/content/__init__.py diff --git a/sahara-dashboard/test/__init__.py b/sahara_dashboard/content/data_processing/__init__.py similarity index 100% rename from sahara-dashboard/test/__init__.py rename to sahara_dashboard/content/data_processing/__init__.py diff --git a/sahara_dashboard/content/data_processing/cluster_templates/__init__.py b/sahara_dashboard/content/data_processing/cluster_templates/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/cluster_templates/forms.py b/sahara_dashboard/content/data_processing/cluster_templates/forms.py new file mode 100644 index 0000000..d0d77a1 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/forms.py @@ -0,0 +1,58 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import forms + +from sahara_dashboard.api import sahara as saharaclient +from sahara_dashboard.content.data_processing. \ + utils import workflow_helpers + +LOG = logging.getLogger(__name__) + + +class UploadFileForm(forms.SelfHandlingForm, + workflow_helpers.PluginAndVersionMixin): + template_name = forms.CharField(max_length=80, + label=_("Cluster Template Name")) + + def __init__(self, request, *args, **kwargs): + super(UploadFileForm, self).__init__(request, *args, **kwargs) + + sahara = saharaclient.client(request) + self._generate_plugin_version_fields(sahara) + + self.fields['template_file'] = forms.FileField(label=_("Template")) + + def handle(self, request, data): + try: + # we can set a limit on file size, but should we? + filecontent = self.files['template_file'].read() + + plugin_name = data['plugin_name'] + hadoop_version = data.get(plugin_name + "_version") + + saharaclient.plugin_convert_to_template(request, + plugin_name, + hadoop_version, + data['template_name'], + filecontent) + return True + except Exception: + exceptions.handle(request, + _("Unable to upload cluster template file")) + return False diff --git a/sahara_dashboard/content/data_processing/cluster_templates/panel.py b/sahara_dashboard/content/data_processing/cluster_templates/panel.py new file mode 100644 index 0000000..94b9d40 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/panel.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +import horizon + +from openstack_dashboard.dashboards.project import dashboard + + +class ClusterTemplatesPanel(horizon.Panel): + name = _("Cluster Templates") + slug = 'data_processing.cluster_templates' + permissions = (('openstack.services.data-processing', + 'openstack.services.data_processing'),) + + +dashboard.Project.register(ClusterTemplatesPanel) diff --git a/sahara_dashboard/content/data_processing/cluster_templates/tables.py b/sahara_dashboard/content/data_processing/cluster_templates/tables.py new file mode 100644 index 0000000..d880f80 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/tables.py @@ -0,0 +1,149 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.core import urlresolvers +from django.template import defaultfilters as filters +from django.utils import http +from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import ungettext_lazy + +from horizon import tables + +from sahara_dashboard.api import sahara as saharaclient + +LOG = logging.getLogger(__name__) + + +class ClusterTemplatesFilterAction(tables.FilterAction): + filter_type = "server" + filter_choices = (('name', _("Name"), True), + ('plugin_name', _("Plugin"), True), + ('hadoop_version', _("Version"), True), + ('description', _("Description"))) + + +class UploadFile(tables.LinkAction): + name = 'upload_file' + verbose_name = _("Upload Template") + url = 'horizon:project:data_processing.cluster_templates:upload_file' + classes = ("btn-launch", "ajax-modal") + icon = "upload" + + +class CreateCluster(tables.LinkAction): + name = "create cluster" + verbose_name = _("Launch Cluster") + url = "horizon:project:data_processing.clusters:configure-cluster" + classes = ("ajax-modal",) + icon = "plus" + + def get_link_url(self, datum): + base_url = urlresolvers.reverse(self.url) + + params = http.urlencode({"hadoop_version": datum.hadoop_version, + "plugin_name": datum.plugin_name, + "cluster_template_id": datum.id}) + return "?".join([base_url, params]) + + +class CopyTemplate(tables.LinkAction): + name = "copy" + verbose_name = _("Copy Template") + url = "horizon:project:data_processing.cluster_templates:copy" + classes = ("ajax-modal", ) + + +class EditTemplate(tables.LinkAction): + name = "edit" + verbose_name = _("Edit Template") + url = "horizon:project:data_processing.cluster_templates:edit" + classes = ("ajax-modal", ) + + +class DeleteTemplate(tables.DeleteAction): + @staticmethod + def action_present(count): + return ungettext_lazy( + u"Delete Template", + u"Delete Templates", + count + ) + + @staticmethod + def action_past(count): + return ungettext_lazy( + u"Deleted Template", + u"Deleted Templates", + count + ) + + def delete(self, request, template_id): + saharaclient.cluster_template_delete(request, template_id) + + +class CreateClusterTemplate(tables.LinkAction): + name = "create" + verbose_name = _("Create Template") + url = ("horizon:project:data_processing.cluster_templates:" + "create-cluster-template") + classes = ("ajax-modal", "create-clustertemplate-btn") + icon = "plus" + + +class ConfigureClusterTemplate(tables.LinkAction): + name = "configure" + verbose_name = _("Configure Cluster Template") + url = ("horizon:project:data_processing.cluster_templates:" + "configure-cluster-template") + classes = ("ajax-modal", "configure-clustertemplate-btn") + icon = "plus" + attrs = {"style": "display: none"} + + +def render_node_groups(cluster_template): + node_groups = [node_group['name'] + ': ' + str(node_group['count']) + for node_group in cluster_template.node_groups] + return node_groups + + +class ClusterTemplatesTable(tables.DataTable): + name = tables.Column("name", + verbose_name=_("Name"), + link=("horizon:project:data_processing." + "cluster_templates:details")) + plugin_name = tables.Column("plugin_name", + verbose_name=_("Plugin")) + hadoop_version = tables.Column("hadoop_version", + verbose_name=_("Version")) + node_groups = tables.Column(render_node_groups, + verbose_name=_("Node Groups"), + wrap_list=True, + filters=(filters.unordered_list,)) + description = tables.Column("description", + verbose_name=_("Description")) + + class Meta(object): + name = "cluster_templates" + verbose_name = _("Cluster Templates") + table_actions = (UploadFile, + CreateClusterTemplate, + ConfigureClusterTemplate, + DeleteTemplate, + ClusterTemplatesFilterAction,) + + row_actions = (CreateCluster, + EditTemplate, + CopyTemplate, + DeleteTemplate,) diff --git a/sahara_dashboard/content/data_processing/cluster_templates/tabs.py b/sahara_dashboard/content/data_processing/cluster_templates/tabs.py new file mode 100644 index 0000000..ba09e06 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/tabs.py @@ -0,0 +1,76 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import tabs + +from openstack_dashboard.api import nova +from sahara_dashboard.api import sahara as saharaclient +from sahara_dashboard.content. \ + data_processing.utils import workflow_helpers as helpers + + +LOG = logging.getLogger(__name__) + + +class GeneralTab(tabs.Tab): + name = _("General Info") + slug = "cluster_template_details_tab" + template_name = ( + "project/data_processing.cluster_templates/_details.html") + + def get_context_data(self, request): + template_id = self.tab_group.kwargs['template_id'] + try: + template = saharaclient.cluster_template_get(request, template_id) + except Exception as e: + template = {} + LOG.error("Unable to fetch cluster template details: %s" % str(e)) + return {"template": template} + + +class NodeGroupsTab(tabs.Tab): + name = _("Node Groups") + slug = "cluster_template_nodegroups_tab" + template_name = ( + "project/data_processing.cluster_templates/_nodegroups_details.html") + + def get_context_data(self, request): + template_id = self.tab_group.kwargs['template_id'] + try: + template = saharaclient.cluster_template_get(request, template_id) + for ng in template.node_groups: + if not ng["flavor_id"]: + continue + ng["flavor_name"] = ( + nova.flavor_get(request, ng["flavor_id"]).name) + ng["node_group_template"] = saharaclient.safe_call( + saharaclient.nodegroup_template_get, + request, ng.get("node_group_template_id", None)) + ng["security_groups_full"] = helpers.get_security_groups( + request, ng.get("security_groups")) + except Exception: + template = {} + exceptions.handle(request, + _("Unable to fetch node group details.")) + return {"template": template} + + +class ClusterTemplateDetailsTabs(tabs.TabGroup): + slug = "cluster_template_details" + tabs = (GeneralTab, NodeGroupsTab, ) + sticky = True diff --git a/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_configure_general_help.html b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_configure_general_help.html new file mode 100644 index 0000000..36cf092 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_configure_general_help.html @@ -0,0 +1,22 @@ +{% load i18n horizon %} +
+

+ {% blocktrans %}This Cluster Template will be created for:{% endblocktrans %} +
+ {% blocktrans %}Plugin{% endblocktrans %}: {{ plugin_name }} +
+ {% blocktrans %}Version{% endblocktrans %}: {{ hadoop_version }} +
+

+

+ {% blocktrans %}The Cluster Template object should specify Node Group Templates that will be used to build a Cluster. + You can add Node Groups using Node Group Templates on a "Node Groups" tab.{% endblocktrans %} +

+

+ {% blocktrans %}You may set cluster scoped configurations on corresponding tabs.{% endblocktrans %} +

+

+ {% blocktrans %}The Cluster Template object may specify a list of processes in anti-affinity group. + That means these processes may not be launched more than once on a single host.{% endblocktrans %} +

+
diff --git a/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_create_general_help.html b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_create_general_help.html new file mode 100644 index 0000000..4266625 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_create_general_help.html @@ -0,0 +1,4 @@ +{% load i18n horizon %} +

+ {% blocktrans %}Select a plugin and version for a new Cluster template.{% endblocktrans %} +

diff --git a/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_details.html b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_details.html new file mode 100644 index 0000000..63b0896 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_details.html @@ -0,0 +1,55 @@ +{% load i18n sizeformat %} + +
+
+
{% trans "Name" %}
+
{{ template.name }}
+
{% trans "ID" %}
+
{{ template.id }}
+
{% trans "Description" %}
+
{{ template.description|default:_("None") }}
+
+
+
{% trans "Plugin" %}
+
{{ template.plugin_name }}
+
{% trans "Version" %}
+
{{ template.hadoop_version }}
+
{% trans "Use auto-configuration" %}
+
{{ template.use_autoconfig }}
+
+
+
{% trans "Anti-affinity enabled for" %}
+ {% if template.anti_affinity %} +
+
    + {% for process in template.anti_affinity %} +
  • {{ process }}
  • + {% endfor %} +
+
+ {% else %} +
{% trans "no processes" %}
+ {% endif %} +
+
+
{% trans "Node Configurations" %}
+ {% if template.cluster_configs %} +
+ {% for service, service_conf in template.cluster_configs.items %} +

{{ service }}

+ {% if service_conf %} +
    + {% for conf_name, conf_value in service_conf.items %} +
  • {% blocktrans %}{{ conf_name }}: {{ conf_value }}{% endblocktrans %}
  • + {% endfor %} +
+ {% else %} +
{% trans "No configurations" %}
+ {% endif %} + {% endfor %} +
+ {% else %} +
{% trans "Cluster configurations are not specified" %}
+ {% endif %} +
+
diff --git a/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_nodegroups_details.html b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_nodegroups_details.html new file mode 100644 index 0000000..0dbcae1 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_nodegroups_details.html @@ -0,0 +1,81 @@ +{% load i18n sizeformat %} + +
+ {% for node_group in template.node_groups %} +
+

{% blocktrans with node_group_name=node_group.name %}Node Group: {{ node_group_name }}{% endblocktrans %}

+
{% trans "Nodes Count" %}
+
{{ node_group.count }}
+ +
{% trans "Flavor" %}
+
{{ node_group.flavor_id|default:_("Flavor is not specified") }}
+ +
{% trans "Template" %}
+ {% if node_group.node_group_template_id %} +
{{ node_group.node_group_template.name }}
+ {% else %} +
{% trans "Template not specified" %}
+ {% endif %} + + {% if node_group.availability_zone %} +
{% trans "Availability Zone" %}
+
{{ node_group.availability_zone }}
+ {% endif %} + +
{% trans "Use auto-configuration" %}
+
{{ node_group.use_autoconfig }}
+ +
{% trans "Proxy Gateway" %}
+
{{ node_group.is_proxy_gateway|yesno }}
+ +
{% trans "Auto Security Group" %}
+
{{ node_group.auto_security_group|yesno }}
+ +
{% trans "Security Groups" %}
+
+
    + {% for group in node_group.security_groups_full %} + {% if group.id %} +
  • {{ group.name }}
  • + {% else %} +
  • {{ group.name }}
  • + {% endif %} + {% endfor %} +
+
+ +
{% trans "Node Processes" %}
+ {% if node_group.node_processes %} +
+
    + {% for process in node_group.node_processes %} +
  • {{ process }}
  • + {% endfor %} +
+
+ {% else %} +
{% trans "Node processes are not specified" %}
+ {% endif %} + +
{% trans "Node Configurations" %}
+ {% if node_group.node_configs %} +
+ {% for service, service_conf in node_group.node_configs.items %} +
{{ service }}
+ {% if service_conf %} +
    + {% for conf_name, conf_value in service_conf.items %} +
  • {% blocktrans %}{{ conf_name }}: {{ conf_value }}{% endblocktrans %}
  • + {% endfor %} +
+ {% else %} +
{% trans "No configurations" %}
+ {% endif %} + {% endfor %} +
+ {% else %} +
{% trans "Node configurations are not specified" %}
+ {% endif %} +
+ {% endfor %} +
diff --git a/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_upload_file.html b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_upload_file.html new file mode 100644 index 0000000..9ad4e9f --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/_upload_file.html @@ -0,0 +1,23 @@ +{% extends "horizon/common/_modal_form.html" %} + + +{% load i18n %} + +{% block form_id %}upload_file{% endblock %} +{% block form_action %}{% url 'horizon:project:data_processing.cluster_templates:upload_file' %}{% endblock %} +{% block form_attrs %}enctype="multipart/form-data"{% endblock %} + +{% block modal-header %}{% trans "Upload Template" %}{% endblock %} + +{% block modal-body %} +
+
+ {% include "horizon/common/_form_fields.html" %} +
+
+{% endblock %} + +{% block modal-footer %} + + {% trans "Cancel" %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/cluster_node_groups_template.html b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/cluster_node_groups_template.html new file mode 100644 index 0000000..2a2b234 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/cluster_node_groups_template.html @@ -0,0 +1,159 @@ +{% load i18n %} + + + + + + + + + + + +
+
+ + + + + + +
+
+ diff --git a/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/cluster_templates.html b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/cluster_templates.html new file mode 100644 index 0000000..a7263e4 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/cluster_templates.html @@ -0,0 +1,63 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + +
+ {{ cluster_templates_table.render }} +
+ + + +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/configure.html b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/configure.html new file mode 100644 index 0000000..72d3683 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/configure.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Create Cluster Template" %}{% endblock %} + +{% block main %} + {% include 'horizon/common/_workflow.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/create.html b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/create.html new file mode 100644 index 0000000..9d497c5 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/create.html @@ -0,0 +1,11 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{{ name }}{% endblock %} + +{% block page_header %} + {% include "horizon/common/_page_header.html" with title={{ name }} %} +{% endblock page_header %} + +{% block main %} + {% include 'horizon/common/_workflow.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/upload_file.html b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/upload_file.html new file mode 100644 index 0000000..a694a68 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/templates/data_processing.cluster_templates/upload_file.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Upload Template" %}{% endblock %} + +{% block main %} + {% include 'project/data_processing.cluster_templates/_upload_file.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/cluster_templates/tests.py b/sahara_dashboard/content/data_processing/cluster_templates/tests.py new file mode 100644 index 0000000..8ea15fd --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/tests.py @@ -0,0 +1,167 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import base64 +import copy + +from django.core.urlresolvers import reverse +from django import http + +from mox3.mox import IsA # noqa +from oslo_serialization import jsonutils +import six + +from openstack_dashboard import api as dash_api +from openstack_dashboard.test import helpers as test + +from sahara_dashboard import api + +INDEX_URL = reverse('horizon:project:data_processing.cluster_templates:index') +DETAILS_URL = reverse( + 'horizon:project:data_processing.cluster_templates:details', args=['id']) + + +class DataProcessingClusterTemplateTests(test.TestCase): + @test.create_stubs({api.sahara: ('cluster_template_list',)}) + def test_index(self): + api.sahara.cluster_template_list(IsA(http.HttpRequest), {}) \ + .AndReturn(self.cluster_templates.list()) + self.mox.ReplayAll() + res = self.client.get(INDEX_URL) + self.assertTemplateUsed(res, + 'project/data_processing.cluster_templates/' + 'cluster_templates.html') + self.assertContains(res, 'Cluster Templates') + self.assertContains(res, 'Name') + + @test.create_stubs({api.sahara: ('cluster_template_get',), + dash_api.nova: ('flavor_get',)}) + def test_details(self): + flavor = self.flavors.first() + ct = self.cluster_templates.first() + dash_api.nova.flavor_get(IsA(http.HttpRequest), flavor.id) \ + .MultipleTimes().AndReturn(flavor) + api.sahara.cluster_template_get(IsA(http.HttpRequest), + IsA(six.text_type)) \ + .MultipleTimes().AndReturn(ct) + self.mox.ReplayAll() + res = self.client.get(DETAILS_URL) + self.assertTemplateUsed(res, 'horizon/common/_detail.html') + + @test.create_stubs({api.sahara: ('cluster_template_get', + 'plugin_get_version_details', + 'nodegroup_template_find')}) + def test_copy(self): + ct = self.cluster_templates.first() + ngts = self.nodegroup_templates.list() + configs = self.plugins_configs.first() + api.sahara.cluster_template_get(IsA(http.HttpRequest), + ct.id) \ + .AndReturn(ct) + api.sahara.plugin_get_version_details(IsA(http.HttpRequest), + ct.plugin_name, + ct.hadoop_version) \ + .MultipleTimes().AndReturn(configs) + api.sahara.nodegroup_template_find(IsA(http.HttpRequest), + plugin_name=ct.plugin_name, + hadoop_version=ct.hadoop_version) \ + .MultipleTimes().AndReturn(ngts) + self.mox.ReplayAll() + + url = reverse('horizon:project:data_processing.cluster_templates:copy', + args=[ct.id]) + res = self.client.get(url) + workflow = res.context['workflow'] + step = workflow.get_step("generalconfigaction") + self.assertEqual(step.action['cluster_template_name'].field.initial, + ct.name + "-copy") + + @test.create_stubs({api.sahara: ('cluster_template_list', + 'cluster_template_delete')}) + def test_delete(self): + ct = self.cluster_templates.first() + api.sahara.cluster_template_list(IsA(http.HttpRequest), {}) \ + .AndReturn(self.cluster_templates.list()) + api.sahara.cluster_template_delete(IsA(http.HttpRequest), ct.id) + self.mox.ReplayAll() + + form_data = {'action': 'cluster_templates__delete__%s' % ct.id} + res = self.client.post(INDEX_URL, form_data) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) + + @test.create_stubs({api.sahara: ('cluster_template_get', + 'cluster_template_update', + 'plugin_get_version_details', + 'nodegroup_template_find')}) + def test_update(self): + ct = self.cluster_templates.first() + ngts = self.nodegroup_templates.list() + configs = self.plugins_configs.first() + new_name = "UpdatedName" + new_ct = copy.copy(ct) + new_ct.name = new_name + + api.sahara.cluster_template_get(IsA(http.HttpRequest), ct.id) \ + .AndReturn(ct) + api.sahara.plugin_get_version_details(IsA(http.HttpRequest), + ct.plugin_name, + ct.hadoop_version) \ + .MultipleTimes().AndReturn(configs) + api.sahara.nodegroup_template_find(IsA(http.HttpRequest), + plugin_name=ct.plugin_name, + hadoop_version=ct.hadoop_version) \ + .MultipleTimes().AndReturn(ngts) + api.sahara.cluster_template_update(request=IsA(http.HttpRequest), + ct_id=ct.id, + name=new_name, + plugin_name=ct.plugin_name, + hadoop_version=ct.hadoop_version, + description=ct.description, + cluster_configs=ct.cluster_configs, + node_groups=ct.node_groups, + anti_affinity=ct.anti_affinity, + use_autoconfig=False)\ + .AndReturn(new_ct) + self.mox.ReplayAll() + + url = reverse('horizon:project:data_processing.cluster_templates:edit', + args=[ct.id]) + + def serialize(obj): + return base64.urlsafe_b64encode(jsonutils.dump_as_bytes(obj)) + + res = self.client.post( + url, + {'ct_id': ct.id, + 'cluster_template_name': new_name, + 'plugin_name': ct.plugin_name, + 'hadoop_version': ct.hadoop_version, + 'description': ct.description, + 'hidden_configure_field': "", + 'template_id_0': ct.node_groups[0]['node_group_template_id'], + 'group_name_0': ct.node_groups[0]['name'], + 'count_0': 1, + 'serialized_0': serialize(ct.node_groups[0]), + 'template_id_1': ct.node_groups[1]['node_group_template_id'], + 'group_name_1': ct.node_groups[1]['name'], + 'count_1': 2, + 'serialized_1': serialize(ct.node_groups[1]), + 'forms_ids': "[0,1]", + 'anti-affinity': ct.anti_affinity, + }) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) diff --git a/sahara_dashboard/content/data_processing/cluster_templates/urls.py b/sahara_dashboard/content/data_processing/cluster_templates/urls.py new file mode 100644 index 0000000..2b41bde --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/urls.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.conf.urls import patterns +from django.conf.urls import url + +import sahara_dashboard.content. \ + data_processing.cluster_templates.views as views + + +urlpatterns = patterns('', + url(r'^$', views.ClusterTemplatesView.as_view(), + name='index'), + url(r'^$', views.ClusterTemplatesView.as_view(), + name='cluster-templates'), + url(r'^upload_file$', + views.UploadFileView.as_view(), + name='upload_file'), + url(r'^create-cluster-template$', + views.CreateClusterTemplateView.as_view(), + name='create-cluster-template'), + url(r'^configure-cluster-template$', + views.ConfigureClusterTemplateView.as_view(), + name='configure-cluster-template'), + url(r'^(?P[^/]+)$', + views.ClusterTemplateDetailsView.as_view(), + name='details'), + url(r'^(?P[^/]+)/copy$', + views.CopyClusterTemplateView.as_view(), + name='copy'), + url(r'^(?P[^/]+)/edit$', + views.EditClusterTemplateView.as_view(), + name='edit')) diff --git a/sahara_dashboard/content/data_processing/cluster_templates/views.py b/sahara_dashboard/content/data_processing/cluster_templates/views.py new file mode 100644 index 0000000..85c94ee --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/views.py @@ -0,0 +1,149 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.core.urlresolvers import reverse_lazy +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import forms +from horizon import tables +from horizon import tabs +from horizon.utils import memoized +from horizon.utils.urlresolvers import reverse # noqa +from horizon import workflows + +from sahara_dashboard.api import sahara as saharaclient +from sahara_dashboard.content.data_processing. \ + cluster_templates import forms as cluster_forms +import sahara_dashboard.content.data_processing. \ + cluster_templates.tables as ct_tables +import sahara_dashboard.content.data_processing. \ + cluster_templates.tabs as _tabs +import sahara_dashboard.content.data_processing. \ + cluster_templates.workflows.copy as copy_flow +import sahara_dashboard.content.data_processing. \ + cluster_templates.workflows.create as create_flow +import sahara_dashboard.content.data_processing. \ + cluster_templates.workflows.edit as edit_flow + +LOG = logging.getLogger(__name__) + + +class ClusterTemplatesView(tables.DataTableView): + table_class = ct_tables.ClusterTemplatesTable + template_name = ( + 'project/data_processing.cluster_templates/cluster_templates.html') + page_title = _("Cluster Templates") + + def get_data(self): + try: + search_opts = {} + filter = self.get_server_filter_info(self.request) + if filter['value'] and filter['field']: + search_opts = {filter['field']: filter['value']} + cluster_templates = saharaclient.cluster_template_list( + self.request, search_opts) + except Exception: + cluster_templates = [] + exceptions.handle(self.request, + _("Unable to fetch cluster template list")) + return cluster_templates + + +class ClusterTemplateDetailsView(tabs.TabView): + tab_group_class = _tabs.ClusterTemplateDetailsTabs + template_name = 'horizon/common/_detail.html' + page_title = "{{ template.name|default:template.id }}" + + @memoized.memoized_method + def get_object(self): + ct_id = self.kwargs["template_id"] + try: + return saharaclient.cluster_template_get(self.request, ct_id) + except Exception: + msg = _('Unable to retrieve details for ' + 'cluster template "%s".') % ct_id + redirect = reverse("horizon:project:data_processing." + "cluster_templates:cluster-templates") + exceptions.handle(self.request, msg, redirect=redirect) + + def get_context_data(self, **kwargs): + context = super(ClusterTemplateDetailsView, self)\ + .get_context_data(**kwargs) + context['template'] = self.get_object() + return context + + +class UploadFileView(forms.ModalFormView): + form_class = cluster_forms.UploadFileForm + template_name = ( + 'project/data_processing.cluster_templates/upload_file.html') + success_url = reverse_lazy( + 'horizon:project:data_processing.cluster_templates:index') + page_title = _("Upload Template") + + +class CreateClusterTemplateView(workflows.WorkflowView): + workflow_class = create_flow.CreateClusterTemplate + success_url = ("horizon:project:data_processing.cluster_templates" + ":create-cluster-template") + classes = ("ajax-modal",) + template_name = "project/data_processing.cluster_templates/create.html" + page_title = _("Create Cluster Template") + + +class ConfigureClusterTemplateView(workflows.WorkflowView): + workflow_class = create_flow.ConfigureClusterTemplate + success_url = "horizon:project:data_processing.cluster_templates" + template_name = "project/data_processing.cluster_templates/configure.html" + page_title = _("Configure Cluster Template") + + +class CopyClusterTemplateView(workflows.WorkflowView): + workflow_class = copy_flow.CopyClusterTemplate + success_url = "horizon:project:data_processing.cluster_templates" + template_name = "project/data_processing.cluster_templates/configure.html" + page_title = _("Copy Cluster Template") + + def get_context_data(self, **kwargs): + context = super(CopyClusterTemplateView, self)\ + .get_context_data(**kwargs) + + context["template_id"] = kwargs["template_id"] + return context + + def get_object(self, *args, **kwargs): + if not hasattr(self, "_object"): + template_id = self.kwargs['template_id'] + try: + template = saharaclient.cluster_template_get(self.request, + template_id) + except Exception: + template = {} + exceptions.handle(self.request, + _("Unable to fetch cluster template.")) + self._object = template + return self._object + + def get_initial(self): + initial = super(CopyClusterTemplateView, self).get_initial() + initial['template_id'] = self.kwargs['template_id'] + return initial + + +class EditClusterTemplateView(CopyClusterTemplateView): + workflow_class = edit_flow.EditClusterTemplate + success_url = "horizon:project:data_processing.cluster_templates" + template_name = "project/data_processing.cluster_templates/configure.html" diff --git a/sahara_dashboard/content/data_processing/cluster_templates/workflows/__init__.py b/sahara_dashboard/content/data_processing/cluster_templates/workflows/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/cluster_templates/workflows/copy.py b/sahara_dashboard/content/data_processing/cluster_templates/workflows/copy.py new file mode 100644 index 0000000..537cbb8 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/workflows/copy.py @@ -0,0 +1,99 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import json +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions + +from sahara_dashboard.api import sahara as saharaclient +import sahara_dashboard.content.data_processing. \ + cluster_templates.workflows.create as create_flow +import sahara_dashboard.content.data_processing.utils. \ + workflow_helpers as wf_helpers + +LOG = logging.getLogger(__name__) + + +class CopyClusterTemplate(create_flow.ConfigureClusterTemplate): + success_message = _("Cluster Template copy %s created") + entry_point = "generalconfigaction" + + def __init__(self, request, context_seed, entry_point, *args, **kwargs): + self.cluster_template_id = context_seed["template_id"] + try: + self.template = saharaclient.cluster_template_get( + request, + self.cluster_template_id) + self._set_configs_to_copy(self.template.cluster_configs) + + request.GET = request.GET.copy() + request.GET.update({"plugin_name": self.template.plugin_name, + "hadoop_version": self.template.hadoop_version, + "aa_groups": self.template.anti_affinity}) + + super(CopyClusterTemplate, self).__init__(request, context_seed, + entry_point, *args, + **kwargs) + # Initialize node groups. + # TODO(rdopieralski) The same (or very similar) code appears + # multiple times in this dashboard. It should be refactored to + # a function. + for step in self.steps: + if isinstance(step, create_flow.ConfigureNodegroups): + ng_action = step.action + template_ngs = self.template.node_groups + + if 'forms_ids' in request.POST: + continue + ng_action.groups = [] + for i, templ_ng in enumerate(template_ngs): + group_name = "group_name_%d" % i + template_id = "template_id_%d" % i + count = "count_%d" % i + serialized = "serialized_%d" % i + + # save the original node group with all its fields in + # case the template id is missing + serialized_val = base64.urlsafe_b64encode( + json.dumps(wf_helpers.clean_node_group(templ_ng))) + + ng = { + "name": templ_ng["name"], + "count": templ_ng["count"], + "id": i, + "deletable": "true", + "serialized": serialized_val + } + if "node_group_template_id" in templ_ng: + ng["template_id"] = templ_ng[ + "node_group_template_id"] + ng_action.groups.append(ng) + + wf_helpers.build_node_group_fields( + ng_action, group_name, template_id, count, + serialized) + + elif isinstance(step, create_flow.GeneralConfig): + fields = step.action.fields + fields["cluster_template_name"].initial = ( + self.template.name + "-copy") + fields['use_autoconfig'].initial = ( + self.template.use_autoconfig) + fields["description"].initial = self.template.description + except Exception: + exceptions.handle(request, + _("Unable to fetch template to copy.")) diff --git a/sahara_dashboard/content/data_processing/cluster_templates/workflows/create.py b/sahara_dashboard/content/data_processing/cluster_templates/workflows/create.py new file mode 100644 index 0000000..e1789e0 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/workflows/create.py @@ -0,0 +1,337 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import json +import logging + +from django.utils.translation import ugettext_lazy as _ +from saharaclient.api import base as api_base + +from horizon import exceptions +from horizon import forms +from horizon import workflows +from sahara_dashboard.api import sahara as saharaclient +from sahara_dashboard.content.data_processing. \ + utils import helpers as helpers +from sahara_dashboard.content.data_processing. \ + utils import anti_affinity as aa +import sahara_dashboard.content.data_processing. \ + utils.workflow_helpers as whelpers + + +LOG = logging.getLogger(__name__) + + +class SelectPluginAction(workflows.Action): + hidden_create_field = forms.CharField( + required=False, + widget=forms.HiddenInput(attrs={"class": "hidden_create_field"})) + + def __init__(self, request, *args, **kwargs): + super(SelectPluginAction, self).__init__(request, *args, **kwargs) + + try: + plugins = saharaclient.plugin_list(request) + except Exception: + plugins = [] + exceptions.handle(request, + _("Unable to fetch plugin list.")) + plugin_choices = [(plugin.name, plugin.title) for plugin in plugins] + + self.fields["plugin_name"] = forms.ChoiceField( + label=_("Plugin name"), + choices=plugin_choices, + widget=forms.Select(attrs={"class": "plugin_name_choice"})) + + for plugin in plugins: + field_name = plugin.name + "_version" + choice_field = forms.ChoiceField( + label=_("Version"), + choices=[(version, version) for version in plugin.versions], + widget=forms.Select( + attrs={"class": "plugin_version_choice " + + field_name + "_choice"}) + ) + self.fields[field_name] = choice_field + + class Meta(object): + name = _("Select plugin and hadoop version for cluster template") + help_text_template = ("project/data_processing.cluster_templates/" + "_create_general_help.html") + + +class SelectPlugin(workflows.Step): + action_class = SelectPluginAction + + +class CreateClusterTemplate(workflows.Workflow): + slug = "create_cluster_template" + name = _("Create Cluster Template") + finalize_button_name = _("Next") + success_message = _("Created") + failure_message = _("Could not create") + success_url = "horizon:project:data_processing.cluster_templates:index" + default_steps = (SelectPlugin,) + + +class GeneralConfigAction(workflows.Action): + hidden_configure_field = forms.CharField( + required=False, + widget=forms.HiddenInput(attrs={"class": "hidden_configure_field"})) + + hidden_to_delete_field = forms.CharField( + required=False, + widget=forms.HiddenInput(attrs={"class": "hidden_to_delete_field"})) + + cluster_template_name = forms.CharField(label=_("Template Name")) + + description = forms.CharField(label=_("Description"), + required=False, + widget=forms.Textarea(attrs={'rows': 4})) + + use_autoconfig = forms.BooleanField( + label=_("Auto-configure"), + help_text=_("If selected, instances of a cluster will be " + "automatically configured during creation. Otherwise you " + "should manually specify configuration values"), + required=False, + widget=forms.CheckboxInput(), + initial=True, + ) + + anti_affinity = aa.anti_affinity_field() + + def __init__(self, request, *args, **kwargs): + super(GeneralConfigAction, self).__init__(request, *args, **kwargs) + plugin, hadoop_version = whelpers.\ + get_plugin_and_hadoop_version(request) + + self.fields["plugin_name"] = forms.CharField( + widget=forms.HiddenInput(), + initial=plugin + ) + self.fields["hadoop_version"] = forms.CharField( + widget=forms.HiddenInput(), + initial=hadoop_version + ) + + populate_anti_affinity_choices = aa.populate_anti_affinity_choices + + def get_help_text(self): + extra = dict() + plugin, hadoop_version = whelpers\ + .get_plugin_and_hadoop_version(self.request) + + extra["plugin_name"] = plugin + extra["hadoop_version"] = hadoop_version + return super(GeneralConfigAction, self).get_help_text(extra) + + def clean(self): + cleaned_data = super(GeneralConfigAction, self).clean() + if cleaned_data.get("hidden_configure_field", None) \ + == "create_nodegroup": + self._errors = dict() + return cleaned_data + + class Meta(object): + name = _("Details") + help_text_template = ("project/data_processing.cluster_templates/" + "_configure_general_help.html") + + +class GeneralConfig(workflows.Step): + action_class = GeneralConfigAction + contributes = ("hidden_configure_field", ) + + def contribute(self, data, context): + for k, v in data.items(): + context["general_" + k] = v + + post = self.workflow.request.POST + context['anti_affinity_info'] = post.getlist("anti_affinity") + return context + + +class ConfigureNodegroupsAction(workflows.Action): + hidden_nodegroups_field = forms.CharField( + required=False, + widget=forms.HiddenInput(attrs={"class": "hidden_nodegroups_field"})) + forms_ids = forms.CharField( + required=False, + widget=forms.HiddenInput()) + + def __init__(self, request, *args, **kwargs): + super(ConfigureNodegroupsAction, self). \ + __init__(request, *args, **kwargs) + + plugin = request.REQUEST.get("plugin_name") + version = request.REQUEST.get("hadoop_version") + if plugin and not version: + version_name = plugin + "_version" + version = request.REQUEST.get(version_name) + + if not plugin or not version: + self.templates = saharaclient.nodegroup_template_find(request) + else: + self.templates = saharaclient.nodegroup_template_find( + request, plugin_name=plugin, hadoop_version=version) + + deletable = request.REQUEST.get("deletable", dict()) + + request_source = None + if 'forms_ids' in request.POST: + request_source = request.POST + elif 'forms_ids' in request.REQUEST: + request_source = request.REQUEST + if request_source: + self.groups = [] + for id in json.loads(request_source['forms_ids']): + group_name = "group_name_" + str(id) + template_id = "template_id_" + str(id) + count = "count_" + str(id) + serialized = "serialized_" + str(id) + self.groups.append({"name": request_source[group_name], + "template_id": request_source[template_id], + "count": request_source[count], + "id": id, + "deletable": deletable.get( + request_source[group_name], "true"), + "serialized": request_source[serialized]}) + + whelpers.build_node_group_fields(self, + group_name, + template_id, + count, + serialized) + + def clean(self): + cleaned_data = super(ConfigureNodegroupsAction, self).clean() + if cleaned_data.get("hidden_nodegroups_field", None) \ + == "create_nodegroup": + self._errors = dict() + return cleaned_data + + class Meta(object): + name = _("Node Groups") + + +class ConfigureNodegroups(workflows.Step): + action_class = ConfigureNodegroupsAction + contributes = ("hidden_nodegroups_field", ) + template_name = ("project/data_processing.cluster_templates/" + "cluster_node_groups_template.html") + + def contribute(self, data, context): + for k, v in data.items(): + context["ng_" + k] = v + return context + + +class ConfigureClusterTemplate(whelpers.ServiceParametersWorkflow, + whelpers.StatusFormatMixin): + slug = "configure_cluster_template" + name = _("Create Cluster Template") + finalize_button_name = _("Create") + success_message = _("Created Cluster Template %s") + name_property = "general_cluster_template_name" + success_url = "horizon:project:data_processing.cluster_templates:index" + default_steps = (GeneralConfig, + ConfigureNodegroups) + + def __init__(self, request, context_seed, entry_point, *args, **kwargs): + ConfigureClusterTemplate._cls_registry = set([]) + + hlps = helpers.Helpers(request) + plugin, hadoop_version = whelpers.\ + get_plugin_and_hadoop_version(request) + general_parameters = hlps.get_cluster_general_configs( + plugin, + hadoop_version) + service_parameters = hlps.get_targeted_cluster_configs( + plugin, + hadoop_version) + + self._populate_tabs(general_parameters, service_parameters) + + super(ConfigureClusterTemplate, self).__init__(request, + context_seed, + entry_point, + *args, **kwargs) + + def is_valid(self): + steps_valid = True + for step in self.steps: + if not step.action.is_valid(): + steps_valid = False + step.has_errors = True + errors_fields = list(step.action.errors.keys()) + step.action.errors_fields = errors_fields + if not steps_valid: + return steps_valid + return self.validate(self.context) + + def handle(self, request, context): + try: + node_groups = [] + configs_dict = whelpers.parse_configs_from_context(context, + self.defaults) + + ids = json.loads(context['ng_forms_ids']) + for id in ids: + name = context['ng_group_name_' + str(id)] + template_id = context['ng_template_id_' + str(id)] + count = context['ng_count_' + str(id)] + + raw_ng = context.get("ng_serialized_" + str(id)) + + if raw_ng and raw_ng != 'null': + ng = json.loads(base64.urlsafe_b64decode(str(raw_ng))) + else: + ng = dict() + ng["name"] = name + ng["count"] = count + if template_id and template_id != u'None': + ng["node_group_template_id"] = template_id + node_groups.append(ng) + + plugin, hadoop_version = whelpers.\ + get_plugin_and_hadoop_version(request) + + # TODO(nkonovalov): Fix client to support default_image_id + saharaclient.cluster_template_create( + request, + context["general_cluster_template_name"], + plugin, + hadoop_version, + context["general_description"], + configs_dict, + node_groups, + context["anti_affinity_info"], + use_autoconfig=context['general_use_autoconfig'] + ) + + hlps = helpers.Helpers(request) + if hlps.is_from_guide(): + request.session["guide_cluster_template_name"] = ( + context["general_cluster_template_name"]) + self.success_url = ( + "horizon:project:data_processing.wizard:cluster_guide") + return True + except api_base.APIException as e: + self.error_description = str(e) + return False + except Exception: + exceptions.handle(request, + _("Cluster template creation failed")) + return False diff --git a/sahara_dashboard/content/data_processing/cluster_templates/workflows/edit.py b/sahara_dashboard/content/data_processing/cluster_templates/workflows/edit.py new file mode 100644 index 0000000..1e4a7f5 --- /dev/null +++ b/sahara_dashboard/content/data_processing/cluster_templates/workflows/edit.py @@ -0,0 +1,103 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import json +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import forms + +from sahara_dashboard.api import sahara as saharaclient +import sahara_dashboard.content.data_processing. \ + cluster_templates.workflows.create as create_flow +import sahara_dashboard.content.data_processing. \ + cluster_templates.workflows.copy as copy_flow +import sahara_dashboard.content.data_processing. \ + utils.workflow_helpers as whelpers + +LOG = logging.getLogger(__name__) + + +class EditClusterTemplate(copy_flow.CopyClusterTemplate): + success_message = _("Cluster Template %s updated") + entry_point = "generalconfigaction" + finalize_button_name = _("Update") + name = _("Edit Cluster Template") + + def __init__(self, request, context_seed, entry_point, *args, **kwargs): + try: + super(EditClusterTemplate, self).__init__(request, context_seed, + entry_point, *args, + **kwargs) + + for step in self.steps: + if isinstance(step, create_flow.GeneralConfig): + fields = step.action.fields + fields["cluster_template_name"].initial = ( + self.template.name) + fields["cluster_template_id"] = forms.CharField( + widget=forms.HiddenInput(), + initial=self.cluster_template_id) + except Exception: + exceptions.handle(request, + _("Unable to fetch template to edit.")) + + def handle(self, request, context): + try: + node_groups = [] + configs_dict = whelpers.parse_configs_from_context(context, + self.defaults) + ids = json.loads(context['ng_forms_ids']) + for id in ids: + name = context['ng_group_name_' + str(id)] + template_id = context['ng_template_id_' + str(id)] + count = context['ng_count_' + str(id)] + + raw_ng = context.get("ng_serialized_" + str(id)) + + if raw_ng and raw_ng != 'null': + ng = json.loads(base64.urlsafe_b64decode(str(raw_ng))) + else: + ng = dict() + ng["name"] = name + ng["count"] = count + if template_id and template_id != u'None': + ng["node_group_template_id"] = template_id + node_groups.append(ng) + + plugin, hadoop_version = whelpers. \ + get_plugin_and_hadoop_version(request) + + saharaclient.cluster_template_update( + request=request, + ct_id=self.cluster_template_id, + name=context["general_cluster_template_name"], + plugin_name=plugin, + hadoop_version=hadoop_version, + description=context["general_description"], + cluster_configs=configs_dict, + node_groups=node_groups, + anti_affinity=context["anti_affinity_info"], + use_autoconfig=context['general_use_autoconfig'] + ) + return True + except exceptions.Conflict as e: + self.error_description = str(e) + return False + except Exception: + exceptions.handle(request, + _("Cluster template update failed")) + return False diff --git a/sahara_dashboard/content/data_processing/clusters/__init__.py b/sahara_dashboard/content/data_processing/clusters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/clusters/panel.py b/sahara_dashboard/content/data_processing/clusters/panel.py new file mode 100644 index 0000000..ff45f86 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/panel.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +import horizon + +from openstack_dashboard.dashboards.project import dashboard + + +class ClustersPanel(horizon.Panel): + name = _("Clusters") + slug = 'data_processing.clusters' + permissions = (('openstack.services.data-processing', + 'openstack.services.data_processing'),) + + +dashboard.Project.register(ClustersPanel) diff --git a/sahara_dashboard/content/data_processing/clusters/tables.py b/sahara_dashboard/content/data_processing/clusters/tables.py new file mode 100644 index 0000000..340959e --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/tables.py @@ -0,0 +1,177 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.http import Http404 # noqa +from django.template.loader import render_to_string +from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import ungettext_lazy + +from horizon import messages +from horizon import tables +from horizon.tables import base as tables_base + +from sahara_dashboard.api import sahara as saharaclient + +from saharaclient.api import base as api_base + + +LOG = logging.getLogger(__name__) + + +class ClustersFilterAction(tables.FilterAction): + filter_type = "server" + filter_choices = (('name', _("Name"), True), + ('status', _("Status"), True)) + + +class ClusterGuide(tables.LinkAction): + name = "cluster_guide" + verbose_name = _("Cluster Creation Guide") + url = "horizon:project:data_processing.wizard:cluster_guide" + + +class CreateCluster(tables.LinkAction): + name = "create" + verbose_name = _("Launch Cluster") + url = "horizon:project:data_processing.clusters:create-cluster" + classes = ("ajax-modal",) + icon = "plus" + + +class ScaleCluster(tables.LinkAction): + name = "scale" + verbose_name = _("Scale Cluster") + url = "horizon:project:data_processing.clusters:scale" + classes = ("ajax-modal", "btn-edit") + + def allowed(self, request, cluster=None): + return cluster.status == "Active" + + +class DeleteCluster(tables.DeleteAction): + @staticmethod + def action_present(count): + return ungettext_lazy( + u"Delete Cluster", + u"Delete Clusters", + count + ) + + @staticmethod + def action_past(count): + return ungettext_lazy( + u"Deleted Cluster", + u"Deleted Clusters", + count + ) + + def delete(self, request, obj_id): + saharaclient.cluster_delete(request, obj_id) + + +class UpdateRow(tables.Row): + ajax = True + + def get_data(self, request, instance_id): + try: + return saharaclient.cluster_get(request, instance_id) + except api_base.APIException as e: + if e.error_code == 404: + raise Http404 + else: + messages.error(request, + _("Unable to update row")) + + +def get_instances_count(cluster): + return sum([len(ng["instances"]) + for ng in cluster.node_groups]) + + +class RichErrorCell(tables_base.Cell): + @property + def status(self): + # The error cell values becomes quite complex and cannot be handled + # correctly with STATUS_CHOICES. Handling that explicitly. + status = self.datum.status.lower() + if status == "error": + return False + elif status == "active": + return True + + return None + + +def get_rich_status_info(cluster): + return { + "status": cluster.status, + "status_description": cluster.status_description + } + + +def rich_status_filter(status_dict): + # Render the status "as is" if no description is provided. + if not status_dict["status_description"]: + return status_dict["status"] + + # Error is rendered with a template containing an error description. + return render_to_string( + "project/data_processing.clusters/_rich_status.html", status_dict) + + +class ConfigureCluster(tables.LinkAction): + name = "configure" + verbose_name = _("Configure Cluster") + url = "horizon:project:data_processing.clusters:configure-cluster" + classes = ("ajax-modal", "configure-cluster-btn") + icon = "plus" + attrs = {"style": "display: none"} + + +class ClustersTable(tables.DataTable): + + name = tables.Column("name", + verbose_name=_("Name"), + link=("horizon:project:data_processing." + "clusters:details")) + + plugin = tables.Column("plugin_name", + verbose_name=_("Plugin")) + + version = tables.Column("hadoop_version", + verbose_name=_("Version")) + + # Status field need the whole cluster object to build the rich status. + status = tables.Column(get_rich_status_info, + verbose_name=_("Status"), + status=True, + filters=(rich_status_filter,)) + + instances_count = tables.Column(get_instances_count, + verbose_name=_("Instances Count")) + + class Meta(object): + name = "clusters" + verbose_name = _("Clusters") + row_class = UpdateRow + cell_class = RichErrorCell + status_columns = ["status"] + table_actions = (ClusterGuide, + CreateCluster, + ConfigureCluster, + DeleteCluster, + ClustersFilterAction) + row_actions = (ScaleCluster, + DeleteCluster,) diff --git a/sahara_dashboard/content/data_processing/clusters/tabs.py b/sahara_dashboard/content/data_processing/clusters/tabs.py new file mode 100644 index 0000000..c2eed15 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/tabs.py @@ -0,0 +1,195 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import tables +from horizon import tabs + +from sahara_dashboard.content.data_processing.utils \ + import workflow_helpers as helpers + +from openstack_dashboard.api import glance +from openstack_dashboard.api import network +from openstack_dashboard.api import neutron +from openstack_dashboard.api import nova + +from sahara_dashboard.api import sahara as saharaclient + +LOG = logging.getLogger(__name__) + + +class GeneralTab(tabs.Tab): + name = _("General Info") + slug = "cluster_details_tab" + template_name = "project/data_processing.clusters/_details.html" + + def get_context_data(self, request): + cluster_id = self.tab_group.kwargs['cluster_id'] + cluster_info = {} + try: + sahara = saharaclient.client(request) + cluster = sahara.clusters.get(cluster_id) + + for info_key, info_val in cluster.info.items(): + for key, val in info_val.items(): + if str(val).startswith(('http://', 'https://')): + cluster.info[info_key][key] = build_link(val) + + base_image = glance.image_get(request, + cluster.default_image_id) + + if getattr(cluster, 'cluster_template_id', None): + cluster_template = saharaclient.safe_call( + sahara.cluster_templates.get, + cluster.cluster_template_id) + else: + cluster_template = None + + if getattr(cluster, 'neutron_management_network', None): + net_id = cluster.neutron_management_network + network = neutron.network_get(request, net_id) + net_name = network.name_or_id + else: + net_name = None + + cluster_info.update({"cluster": cluster, + "base_image": base_image, + "cluster_template": cluster_template, + "network": net_name}) + except Exception as e: + LOG.error("Unable to fetch cluster details: %s" % str(e)) + + return cluster_info + + +def build_link(url): + return "" + url + "" + + +class NodeGroupsTab(tabs.Tab): + name = _("Node Groups") + slug = "cluster_nodegroups_tab" + template_name = ( + "project/data_processing.clusters/_nodegroups_details.html") + + def get_context_data(self, request): + cluster_id = self.tab_group.kwargs['cluster_id'] + try: + sahara = saharaclient.client(request) + cluster = sahara.clusters.get(cluster_id) + for ng in cluster.node_groups: + if ng["flavor_id"]: + ng["flavor_name"] = ( + nova.flavor_get(request, ng["flavor_id"]).name) + if ng["floating_ip_pool"]: + ng["floating_ip_pool_name"] = ( + self._get_floating_ip_pool_name( + request, ng["floating_ip_pool"])) + + if ng.get("node_group_template_id", None): + ng["node_group_template"] = saharaclient.safe_call( + sahara.node_group_templates.get, + ng["node_group_template_id"]) + + ng["security_groups_full"] = helpers.get_security_groups( + request, ng["security_groups"]) + except Exception: + cluster = {} + exceptions.handle(request, + _("Unable to get node group details.")) + + return {"cluster": cluster} + + def _get_floating_ip_pool_name(self, request, pool_id): + pools = [pool for pool in network.floating_ip_pools_list( + request) if pool.id == pool_id] + + return pools[0].name if pools else pool_id + + +class Instance(object): + def __init__(self, name=None, id=None, internal_ip=None, + management_ip=None): + self.name = name + self.id = id + self.internal_ip = internal_ip + self.management_ip = management_ip + + +class InstancesTable(tables.DataTable): + name = tables.Column("name", + link="horizon:project:instances:detail", + verbose_name=_("Name")) + + internal_ip = tables.Column("internal_ip", + verbose_name=_("Internal IP")) + + management_ip = tables.Column("management_ip", + verbose_name=_("Management IP")) + + class Meta(object): + name = "cluster_instances" + verbose_name = _("Cluster Instances") + + +class InstancesTab(tabs.TableTab): + name = _("Instances") + slug = "cluster_instances_tab" + template_name = "project/data_processing.clusters/_instances_details.html" + table_classes = (InstancesTable, ) + + def get_cluster_instances_data(self): + cluster_id = self.tab_group.kwargs['cluster_id'] + + try: + sahara = saharaclient.client(self.request) + cluster = sahara.clusters.get(cluster_id) + + instances = [] + for ng in cluster.node_groups: + for instance in ng["instances"]: + instances.append(Instance( + name=instance["instance_name"], + id=instance["instance_id"], + internal_ip=instance.get("internal_ip", + "Not assigned"), + management_ip=instance.get("management_ip", + "Not assigned"))) + except Exception: + instances = [] + exceptions.handle(self.request, + _("Unable to fetch instance details.")) + return instances + + +class EventLogTab(tabs.Tab): + name = _("Cluster Events") + slug = "cluster_event_log" + template_name = "project/data_processing.clusters/_event_log.html" + + def get_context_data(self, request, **kwargs): + cluster_id = self.tab_group.kwargs['cluster_id'] + kwargs["cluster_id"] = cluster_id + kwargs['data_update_url'] = request.get_full_path() + + return kwargs + + +class ClusterDetailsTabs(tabs.TabGroup): + slug = "cluster_details" + tabs = (GeneralTab, NodeGroupsTab, InstancesTab, EventLogTab) + sticky = True diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_configure_general_help.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_configure_general_help.html new file mode 100644 index 0000000..158d811 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_configure_general_help.html @@ -0,0 +1,20 @@ +{% load i18n horizon %} +
+

+ {% blocktrans %}This Cluster will be started with:{% endblocktrans %} +
+ {% blocktrans %}Plugin{% endblocktrans %}: {{ plugin_name }} +
+ {% blocktrans %}Version{% endblocktrans %}: {{ hadoop_version }} +
+

+

+ {% blocktrans %}Cluster can be launched using existing Cluster Templates.{% endblocktrans %} +

+

+ {% blocktrans %}The Cluster object should specify OpenStack Image to boot instances for Cluster.{% endblocktrans %} +

+

+ {% blocktrans %}User has to choose a keypair to have access to clusters instances.{% endblocktrans %} +

+
diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_create_cluster.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_create_cluster.html new file mode 100644 index 0000000..e9b8062 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_create_cluster.html @@ -0,0 +1,22 @@ +{% extends "horizon/common/_modal_form.html" %} + + +{% load i18n %} + +{% block form_id %}create_cluster_form{% endblock %} +{% block form_action %}{% url 'horizon:project:data_processing.clusters:create' %}{% endblock %} + +{% block modal-header %}{% trans "Launch Cluster" %}{% endblock %} + +{% block modal-body %} +
+
+ {% include "horizon/common/_form_fields.html" %} +
+
+{% endblock %} + +{% block modal-footer %} + + {% trans "Cancel" %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_create_general_help.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_create_general_help.html new file mode 100644 index 0000000..897d6cc --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_create_general_help.html @@ -0,0 +1,4 @@ +{% load i18n %} +

+ {% trans "Select a plugin and version for a new Cluster." %} +

diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_details.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_details.html new file mode 100644 index 0000000..7f7eeb4 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_details.html @@ -0,0 +1,92 @@ +{% load i18n sizeformat %} + +
+
+
{% trans "Name" %}
+
{{ cluster.name }}
+
{% trans "ID" %}
+
{{ cluster.id }}
+
{% trans "Description" %}
+
{{ cluster.description|default:_("None") }}
+
{% trans "Status" %}
+
{{ cluster.status }}
+
+ + {% if cluster.error_description %} +

{% trans "Error Details" %}

+

+ {{ cluster.error_description }} +

+ {% endif %} + +
+
{% trans "Plugin" %}
+
{{ cluster.plugin_name }}
+
{% trans "Version" %}
+
{{ cluster.hadoop_version }}
+
+
+
{% trans "Template" %}
+ {% if cluster_template %} +
{{ cluster_template.name }}
+ {% else %} +
{% trans "Template not specified" %}
+ {% endif %} +
{% trans "Base Image" %}
+
{{ base_image.name }}
+ {% if network %} +
{% trans "Neutron Management Network" %}
+
{{ network }}
+ {% endif %} +
{% trans "Keypair" %}
+
{{ cluster.user_keypair_id }}
+
{% trans "Use auto-configuration" %}
+
{{ cluster.use_autoconfig }}
+
+
+
{% trans "Anti-affinity enabled for" %}
+ {% if cluster.anti_affinity %} +
+
    + {% for process in cluster.anti_affinity %} +
  • {{ process }}
  • + {% endfor %} +
+
+ {% else %} +
{% trans "no processes" %}
+ {% endif %} +
+
+
{% trans "Node Configurations" %}
+ {% if cluster.cluster_configs %} +
+ {% for service, service_conf in cluster.cluster_configs.items %} +

{{ service }}

+ {% if service_conf %} +
    + {% for conf_name, conf_value in service_conf.items %} +
  • {% blocktrans %}{{ conf_name }}: {{ conf_value }}{% endblocktrans %}
  • + {% endfor %} +
+ {% else %} +
{% trans "No configurations" %}
+ {% endif %} + {% endfor %} +
+ {% else %} +
{% trans "Cluster configurations are not specified" %}
+ {% endif %} +
+ +
+ {% for info_key, info_val in cluster.info.items %} +
{{ info_key }}
+ {% for key, val in info_val.items %} +
+ {% autoescape off %}{% blocktrans %}{{ key }}: {{ val }}{% endblocktrans %}{% endautoescape %} +
+ {% endfor %} + {% endfor %} +
+
diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_event_log.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_event_log.html new file mode 100644 index 0000000..74291eb --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_event_log.html @@ -0,0 +1,62 @@ +{% load i18n %} + +

{% trans "Cluster provision steps" %}

+ + + + + + + + + + + + +
{% trans "Step Description" %}{% trans "Started at" %}{% trans "Duration" %}{% trans "Progress" %}{% trans "Status" %}
+ + + + diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_instances_details.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_instances_details.html new file mode 100644 index 0000000..5e94a33 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_instances_details.html @@ -0,0 +1,4 @@ +{% load i18n sizeformat %} +
+ {{ cluster_instances_table.render }} +
diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_nodegroups_details.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_nodegroups_details.html new file mode 100644 index 0000000..2f57f81 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_nodegroups_details.html @@ -0,0 +1,82 @@ +{% load i18n sizeformat %} + + +
+ {% for node_group in cluster.node_groups %} +
+

{% blocktrans with node_group_name=node_group.name %}Name: {{ node_group_name }}{% endblocktrans %}

+
{% trans "Number of Nodes" %}
+
{{ node_group.count }}
+ +
{% trans "Flavor" %}
+
{{ node_group.flavor_name|default:_("Flavor is not specified") }}
+ + {% if node_group.floating_ip_pool %} +
{% trans "Floating IP Pool" %}
+
{{ node_group.floating_ip_pool_name }}
+ {% endif %} + +
{% trans "Template" %}
+ {% if node_group.node_group_template_id %} +
{{ node_group.node_group_template.name }}
+ {% else %} +
{% trans "Template not specified" %}
+ {% endif %} + +
{% trans "Use auto-configuration" %}
+
{{ node_group.use_autoconfig }}
+ +
{% trans "Proxy Gateway" %}
+
{{ node_group.is_proxy_gateway|yesno }}
+ +
{% trans "Auto Security Group" %}
+
{{ node_group.auto_security_group|yesno }}
+ +
{% trans "Security Groups" %}
+
+
    + {% for group in node_group.security_groups_full %} + {% if group.id %} +
  • {{ group.name }}
  • + {% else %} +
  • {{ group.name }}
  • + {% endif %} + {% endfor %} +
+
+ +
{% trans "Node Processes" %}
+ {% if node_group.node_processes %} +
+
    + {% for process in node_group.node_processes %} +
  • {{ process }}
  • + {% endfor %} +
+
+ {% else %} +
{% trans "Node processes are not specified" %}
+ {% endif %} + +
{% trans "Node Configurations" %}
+ {% if node_group.node_configs %} +
+ {% for service, service_conf in node_group.node_configs.items %} +

{{ service }}

+ {% if service_conf %} +
    + {% for conf_name, conf_value in service_conf.items %} +
  • {% blocktrans %}{{ conf_name }}: {{ conf_value }}{% endblocktrans %}
  • + {% endfor %} +
+ {% else %} +
{% trans "No configurations" %}
+ {% endif %} + {% endfor %} +
+ {% else %} +
{% trans "Node configurations are not specified" %}
+ {% endif %} +
+ {% endfor %} +
diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_rich_status.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_rich_status.html new file mode 100644 index 0000000..45bd92c --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/_rich_status.html @@ -0,0 +1,6 @@ +{{ status }}  + diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/clusters.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/clusters.html new file mode 100644 index 0000000..2290427 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/clusters.html @@ -0,0 +1,58 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + +
+ {{ clusters_table.render }} +
+ + + +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/configure.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/configure.html new file mode 100644 index 0000000..00e0642 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/configure.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Launch Cluster" %}{% endblock %} + +{% block main %} + {% include 'horizon/common/_workflow.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/create.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/create.html new file mode 100644 index 0000000..00e0642 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/create.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Launch Cluster" %}{% endblock %} + +{% block main %} + {% include 'horizon/common/_workflow.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/create_cluster.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/create_cluster.html new file mode 100644 index 0000000..6c04775 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/create_cluster.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Launch Cluster" %}{% endblock %} + +{% block main %} + {% include 'templates/data_processing.clusters/_create_cluster.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/scale.html b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/scale.html new file mode 100644 index 0000000..489557d --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/templates/data_processing.clusters/scale.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Scale Cluster" %}{% endblock %} + +{% block main %} + {% include 'horizon/common/_workflow.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/clusters/tests.py b/sahara_dashboard/content/data_processing/clusters/tests.py new file mode 100644 index 0000000..e6ca503 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/tests.py @@ -0,0 +1,93 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from django.core.urlresolvers import reverse +from django import http + +from mox3.mox import IsA # noqa +from oslo_serialization import jsonutils + +from openstack_dashboard.test import helpers as test + +from sahara_dashboard import api + + +INDEX_URL = reverse('horizon:project:data_processing.clusters:index') +DETAILS_URL = reverse( + 'horizon:project:data_processing.clusters:details', args=['id']) + + +class DataProcessingClusterTests(test.TestCase): + @test.create_stubs({api.sahara: ('cluster_list',)}) + def test_index(self): + api.sahara.cluster_list(IsA(http.HttpRequest), {}) \ + .AndReturn(self.clusters.list()) + self.mox.ReplayAll() + res = self.client.get(INDEX_URL) + self.assertTemplateUsed( + res, 'project/data_processing.clusters/clusters.html') + self.assertContains(res, 'Clusters') + self.assertContains(res, 'Name') + + @test.create_stubs({api.sahara: ('cluster_template_list', 'image_list')}) + def test_launch_cluster_get_nodata(self): + api.sahara.cluster_template_list(IsA(http.HttpRequest)) \ + .AndReturn([]) + api.sahara.image_list(IsA(http.HttpRequest)) \ + .AndReturn([]) + self.mox.ReplayAll() + url = reverse( + 'horizon:project:data_processing.clusters:configure-cluster') + res = self.client.get("%s?plugin_name=shoes&hadoop_version=1.1" % url) + self.assertContains(res, "No Images Available") + self.assertContains(res, "No Templates Available") + + @test.create_stubs({api.sahara: ('cluster_get',)}) + def test_event_log_tab(self): + cluster = self.clusters.list()[-1] + api.sahara.cluster_get(IsA(http.HttpRequest), + "cl2", show_progress=True).AndReturn(cluster) + self.mox.ReplayAll() + + url = reverse( + 'horizon:project:data_processing.clusters:events', args=["cl2"]) + res = self.client.get(url) + data = jsonutils.loads(res.content) + + self.assertIn("provision_steps", data) + self.assertEqual(data["need_update"], False) + + step_0 = data["provision_steps"][0] + self.assertEqual(2, step_0["completed"]) + self.assertEqual(2, len(step_0["events"])) + for evt in step_0["events"]: + self.assertEqual(True, evt["successful"]) + + step_1 = data["provision_steps"][1] + self.assertEqual(3, step_1["completed"]) + self.assertEqual(0, len(step_1["events"])) + + @test.create_stubs({api.sahara: ('cluster_list', + 'cluster_delete')}) + def test_delete(self): + cluster = self.clusters.first() + api.sahara.cluster_list(IsA(http.HttpRequest), {}) \ + .AndReturn(self.clusters.list()) + api.sahara.cluster_delete(IsA(http.HttpRequest), cluster.id) + self.mox.ReplayAll() + + form_data = {'action': 'clusters__delete__%s' % cluster.id} + res = self.client.post(INDEX_URL, form_data) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) diff --git a/sahara_dashboard/content/data_processing/clusters/urls.py b/sahara_dashboard/content/data_processing/clusters/urls.py new file mode 100644 index 0000000..4647d2e --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/urls.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from django.conf.urls import patterns +from django.conf.urls import url + +import sahara_dashboard.content.data_processing.clusters.views as views + + +urlpatterns = patterns('', + url(r'^$', views.ClustersView.as_view(), + name='index'), + url(r'^$', views.ClustersView.as_view(), + name='clusters'), + url(r'^create-cluster$', + views.CreateClusterView.as_view(), + name='create-cluster'), + url(r'^configure-cluster$', + views.ConfigureClusterView.as_view(), + name='configure-cluster'), + url(r'^(?P[^/]+)$', + views.ClusterDetailsView.as_view(), + name='details'), + url(r'^(?P[^/]+)/events$', + views.ClusterEventsView.as_view(), + name='events'), + url(r'^(?P[^/]+)/scale$', + views.ScaleClusterView.as_view(), + name='scale')) diff --git a/sahara_dashboard/content/data_processing/clusters/views.py b/sahara_dashboard/content/data_processing/clusters/views.py new file mode 100644 index 0000000..d7db9cf --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/views.py @@ -0,0 +1,225 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import datetime +import json +import logging + +from django.http import HttpResponse +from django.utils.translation import ugettext as _ +from django.views.generic import base as django_base +import six + +from horizon import exceptions +from horizon import tables +from horizon import tabs +from horizon.utils import memoized +from horizon.utils.urlresolvers import reverse # noqa +from horizon import workflows + +from sahara_dashboard.api import sahara as saharaclient + +import sahara_dashboard.content.data_processing.clusters. \ + tables as c_tables +import sahara_dashboard.content.data_processing.clusters. \ + tabs as _tabs +import sahara_dashboard.content.data_processing.clusters. \ + workflows.create as create_flow +import sahara_dashboard.content.data_processing.clusters. \ + workflows.scale as scale_flow +from saharaclient.api.base import APIException + +LOG = logging.getLogger(__name__) + + +class ClustersView(tables.DataTableView): + table_class = c_tables.ClustersTable + template_name = 'project/data_processing.clusters/clusters.html' + page_title = _("Clusters") + + def get_data(self): + try: + search_opts = {} + filter = self.get_server_filter_info(self.request) + if filter['value'] and filter['field']: + search_opts = {filter['field']: filter['value']} + clusters = saharaclient.cluster_list(self.request, search_opts) + except Exception: + clusters = [] + exceptions.handle(self.request, + _("Unable to fetch cluster list")) + return clusters + + +class ClusterDetailsView(tabs.TabView): + tab_group_class = _tabs.ClusterDetailsTabs + template_name = 'horizon/common/_detail.html' + page_title = "{{ cluster.name|default:cluster.id }}" + + @memoized.memoized_method + def get_object(self): + cl_id = self.kwargs["cluster_id"] + try: + return saharaclient.cluster_get(self.request, cl_id) + except Exception: + msg = _('Unable to retrieve details for cluster "%s".') % cl_id + redirect = reverse( + "horizon:project:data_processing.clusters:clusters") + exceptions.handle(self.request, msg, redirect=redirect) + + def get_context_data(self, **kwargs): + context = super(ClusterDetailsView, self).get_context_data(**kwargs) + context['cluster'] = self.get_object() + return context + + +class ClusterEventsView(django_base.View): + + _date_format = "%Y-%m-%dT%H:%M:%S" + + @staticmethod + def _created_at_key(obj): + return datetime.strptime(obj["created_at"], + ClusterEventsView._date_format) + + def get(self, request, *args, **kwargs): + + cluster_id = kwargs.get("cluster_id") + + try: + cluster = saharaclient.cluster_get(request, cluster_id, + show_progress=True) + node_group_mapping = {} + for node_group in cluster.node_groups: + node_group_mapping[node_group["id"]] = node_group["name"] + + provision_steps = cluster.provision_progress + + # Sort by create time + provision_steps = sorted(provision_steps, + key=ClusterEventsView._created_at_key, + reverse=True) + + for step in provision_steps: + # Sort events of the steps also + step["events"] = sorted(step["events"], + key=ClusterEventsView._created_at_key, + reverse=True) + + successful_events_count = 0 + + for event in step["events"]: + if event["node_group_id"]: + event["node_group_name"] = node_group_mapping[ + event["node_group_id"]] + + event_result = _("Unknown") + if event["successful"] is True: + successful_events_count += 1 + event_result = _("Completed Successfully") + elif event["successful"] is False: + event_result = _("Failed") + + event["result"] = event_result + + if not event["event_info"]: + event["event_info"] = _("No info available") + + start_time = datetime.strptime(step["created_at"], + self._date_format) + end_time = datetime.now() + # Clear out microseconds. There is no need for that precision. + end_time = end_time.replace(microsecond=0) + if step["successful"] is not None: + updated_at = step["updated_at"] + end_time = datetime.strptime(updated_at, + self._date_format) + step["duration"] = six.text_type(end_time - start_time) + + result = _("In progress") + step["completed"] = successful_events_count + + if step["successful"] is True: + step["completed"] = step["total"] + result = _("Completed Successfully") + elif step["successful"] is False: + result = _("Failed") + + step["result"] = result + + status = cluster.status.lower() + need_update = status not in ("active", "error") + except APIException: + # Cluster is not available. Returning empty event log. + need_update = False + provision_steps = [] + + context = {"provision_steps": provision_steps, + "need_update": need_update} + + return HttpResponse(json.dumps(context), + content_type='application/json') + + +class CreateClusterView(workflows.WorkflowView): + workflow_class = create_flow.CreateCluster + success_url = \ + "horizon:project:data_processing.clusters:create-cluster" + classes = ("ajax-modal",) + template_name = "project/data_processing.clusters/create.html" + page_title = _("Launch Cluster") + + +class ConfigureClusterView(workflows.WorkflowView): + workflow_class = create_flow.ConfigureCluster + success_url = "horizon:project:data_processing.clusters" + template_name = "project/data_processing.clusters/configure.html" + page_title = _("Configure Cluster") + + def get_initial(self): + initial = super(ConfigureClusterView, self).get_initial() + initial.update(self.kwargs) + return initial + + +class ScaleClusterView(workflows.WorkflowView): + workflow_class = scale_flow.ScaleCluster + success_url = "horizon:project:data_processing.clusters" + classes = ("ajax-modal",) + template_name = "project/data_processing.clusters/scale.html" + page_title = _("Scale Cluster") + + def get_context_data(self, **kwargs): + context = super(ScaleClusterView, self)\ + .get_context_data(**kwargs) + + context["cluster_id"] = kwargs["cluster_id"] + return context + + def get_object(self, *args, **kwargs): + if not hasattr(self, "_object"): + template_id = self.kwargs['cluster_id'] + try: + template = saharaclient.cluster_template_get(self.request, + template_id) + except Exception: + template = None + exceptions.handle(self.request, + _("Unable to fetch cluster template.")) + self._object = template + return self._object + + def get_initial(self): + initial = super(ScaleClusterView, self).get_initial() + initial.update({'cluster_id': self.kwargs['cluster_id']}) + return initial diff --git a/sahara_dashboard/content/data_processing/clusters/workflows/__init__.py b/sahara_dashboard/content/data_processing/clusters/workflows/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/clusters/workflows/create.py b/sahara_dashboard/content/data_processing/clusters/workflows/create.py new file mode 100644 index 0000000..76b6740 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/workflows/create.py @@ -0,0 +1,258 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from horizon import exceptions +from horizon import forms +from horizon import workflows + +from openstack_dashboard.api import nova + +from sahara_dashboard.content.data_processing.utils import neutron_support +import sahara_dashboard.content.data_processing.utils. \ + workflow_helpers as whelpers + +from django.utils.translation import ugettext_lazy as _ + +from sahara_dashboard.api import sahara as saharaclient +import sahara_dashboard.content.data_processing. \ + cluster_templates.workflows.create as t_flows + +from saharaclient.api import base as api_base + +import logging + +LOG = logging.getLogger(__name__) + +KEYPAIR_IMPORT_URL = "horizon:project:access_and_security:keypairs:import" +BASE_IMAGE_URL = "horizon:project:data_processing.data_image_registry:register" +TEMPLATE_UPLOAD_URL = ( + "horizon:project:data_processing.cluster_templates:upload_file") + + +class SelectPluginAction(t_flows.SelectPluginAction): + class Meta(object): + name = _("Select plugin and hadoop version for cluster") + help_text_template = ( + "project/data_processing.clusters/_create_general_help.html") + + +class SelectPlugin(t_flows.SelectPlugin): + action_class = SelectPluginAction + + +class CreateCluster(t_flows.CreateClusterTemplate): + slug = "create_cluster" + name = _("Launch Cluster") + success_url = "horizon:project:data_processing.cluster_templates:index" + default_steps = (SelectPlugin,) + + +class GeneralConfigAction(workflows.Action): + populate_neutron_management_network_choices = \ + neutron_support.populate_neutron_management_network_choices + + hidden_configure_field = forms.CharField( + required=False, + widget=forms.HiddenInput(attrs={"class": "hidden_configure_field"})) + + hidden_to_delete_field = forms.CharField( + required=False, + widget=forms.HiddenInput(attrs={"class": "hidden_to_delete_field"})) + + cluster_name = forms.CharField(label=_("Cluster Name")) + + description = forms.CharField(label=_("Description"), + required=False, + widget=forms.Textarea(attrs={'rows': 4})) + cluster_template = forms.DynamicChoiceField(label=_("Cluster Template"), + initial=(None, "None"), + add_item_link= + TEMPLATE_UPLOAD_URL) + + cluster_count = forms.IntegerField(min_value=1, + label=_("Cluster Count"), + initial=1, + help_text=( + _("Number of clusters to launch."))) + + image = forms.DynamicChoiceField(label=_("Base Image"), + add_item_link=BASE_IMAGE_URL) + + keypair = forms.DynamicChoiceField( + label=_("Keypair"), + required=False, + help_text=_("Which keypair to use for authentication."), + add_item_link=KEYPAIR_IMPORT_URL) + + def __init__(self, request, *args, **kwargs): + super(GeneralConfigAction, self).__init__(request, *args, **kwargs) + + plugin, hadoop_version = whelpers.\ + get_plugin_and_hadoop_version(request) + + if saharaclient.base.is_service_enabled(request, 'network'): + self.fields["neutron_management_network"] = forms.ChoiceField( + label=_("Neutron Management Network"), + choices=self.populate_neutron_management_network_choices( + request, {}) + ) + + self.fields["plugin_name"] = forms.CharField( + widget=forms.HiddenInput(), + initial=plugin + ) + self.fields["hadoop_version"] = forms.CharField( + widget=forms.HiddenInput(), + initial=hadoop_version + ) + + def populate_image_choices(self, request, context): + try: + all_images = saharaclient.image_list(request) + + plugin, hadoop_version = whelpers.\ + get_plugin_and_hadoop_version(request) + + details = saharaclient.plugin_get_version_details(request, + plugin, + hadoop_version) + + choices = [(image.id, image.name) for image in all_images + if (set(details.required_image_tags). + issubset(set(image.tags)))] + except Exception: + exceptions.handle(request, + _("Unable to fetch image choices.")) + choices = [] + if not choices: + choices.append(("", _("No Images Available"))) + + return choices + + def populate_keypair_choices(self, request, context): + try: + keypairs = nova.keypair_list(request) + except Exception: + keypairs = [] + exceptions.handle(request, + _("Unable to fetch keypair choices.")) + keypair_list = [(kp.name, kp.name) for kp in keypairs] + keypair_list.insert(0, ("", _("No keypair"))) + + return keypair_list + + def populate_cluster_template_choices(self, request, context): + templates = saharaclient.cluster_template_list(request) + + plugin, hadoop_version = whelpers.\ + get_plugin_and_hadoop_version(request) + + choices = [(template.id, template.name) + for template in templates + if (template.hadoop_version == hadoop_version and + template.plugin_name == plugin)] + + if not choices: + choices.append(("", _("No Templates Available"))) + # cluster_template_id comes from cluster templates table, when + # Create Cluster from template is clicked there + selected_template_name = None + if request.REQUEST.get("cluster_template_name"): + selected_template_name = ( + request.REQUEST.get("cluster_template_name")) + if selected_template_name: + for template in templates: + if template.name == selected_template_name: + selected_template_id = template.id + break + else: + selected_template_id = ( + request.REQUEST.get("cluster_template_id", None)) + + for template in templates: + if template.id == selected_template_id: + self.fields['cluster_template'].initial = template.id + + return choices + + def get_help_text(self): + extra = dict() + plugin, hadoop_version = whelpers.\ + get_plugin_and_hadoop_version(self.request) + extra["plugin_name"] = plugin + extra["hadoop_version"] = hadoop_version + return super(GeneralConfigAction, self).get_help_text(extra) + + def clean(self): + cleaned_data = super(GeneralConfigAction, self).clean() + if cleaned_data.get("hidden_configure_field", None) \ + == "create_nodegroup": + self._errors = dict() + return cleaned_data + + class Meta(object): + name = _("Configure Cluster") + help_text_template = \ + ("project/data_processing.clusters/_configure_general_help.html") + + +class GeneralConfig(workflows.Step): + action_class = GeneralConfigAction + contributes = ("hidden_configure_field", ) + + def contribute(self, data, context): + for k, v in data.items(): + context["general_" + k] = v + + return context + + +class ConfigureCluster(whelpers.StatusFormatMixin, workflows.Workflow): + slug = "configure_cluster" + name = _("Launch Cluster") + finalize_button_name = _("Launch") + success_message = _("Launched Cluster %s") + name_property = "general_cluster_name" + success_url = "horizon:project:data_processing.clusters:index" + default_steps = (GeneralConfig, ) + + def handle(self, request, context): + try: + # TODO(nkonovalov) Implement AJAX Node Groups. + node_groups = None + + plugin, hadoop_version = whelpers.\ + get_plugin_and_hadoop_version(request) + + cluster_template_id = context["general_cluster_template"] or None + user_keypair = context["general_keypair"] or None + + saharaclient.cluster_create( + request, + context["general_cluster_name"], + plugin, hadoop_version, + cluster_template_id=cluster_template_id, + default_image_id=context["general_image"], + description=context["general_description"], + node_groups=node_groups, + user_keypair_id=user_keypair, + count=context['general_cluster_count'], + net_id=context.get("general_neutron_management_network", None)) + return True + except api_base.APIException as e: + self.error_description = str(e) + return False + except Exception: + exceptions.handle(request, + _('Unable to create the cluster')) + return False diff --git a/sahara_dashboard/content/data_processing/clusters/workflows/scale.py b/sahara_dashboard/content/data_processing/clusters/workflows/scale.py new file mode 100644 index 0000000..f797615 --- /dev/null +++ b/sahara_dashboard/content/data_processing/clusters/workflows/scale.py @@ -0,0 +1,172 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import json +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions + +from sahara_dashboard.api import sahara as saharaclient +import sahara_dashboard.content.data_processing. \ + cluster_templates.workflows.create as clt_create_flow +import sahara_dashboard.content.data_processing. \ + clusters.workflows.create as cl_create_flow +from sahara_dashboard.content.data_processing.utils import workflow_helpers + +from saharaclient.api import base as api_base + +LOG = logging.getLogger(__name__) + + +class NodeGroupsStep(clt_create_flow.ConfigureNodegroups): + pass + + +class ScaleCluster(cl_create_flow.ConfigureCluster, + workflow_helpers.StatusFormatMixin): + slug = "scale_cluster" + name = _("Scale Cluster") + finalize_button_name = _("Scale") + success_url = "horizon:project:data_processing.clusters:index" + default_steps = (NodeGroupsStep, ) + + def __init__(self, request, context_seed, entry_point, *args, **kwargs): + ScaleCluster._cls_registry = set([]) + + self.success_message = _("Scaled cluster successfully started.") + + cluster_id = context_seed["cluster_id"] + try: + cluster = saharaclient.cluster_get(request, cluster_id) + plugin = cluster.plugin_name + hadoop_version = cluster.hadoop_version + + # Initialize deletable node groups. + deletable = dict() + for group in cluster.node_groups: + deletable[group["name"]] = "false" + request.GET = request.GET.copy() + request.GET.update({ + "cluster_id": cluster_id, + "plugin_name": plugin, + "hadoop_version": hadoop_version, + "deletable": deletable + }) + + super(ScaleCluster, self).__init__(request, context_seed, + entry_point, *args, + **kwargs) + # Initialize node groups. + for step in self.steps: + if not isinstance(step, clt_create_flow.ConfigureNodegroups): + continue + ng_action = step.action + template_ngs = cluster.node_groups + + if 'forms_ids' in request.POST: + continue + ng_action.groups = [] + for i, templ_ng in enumerate(template_ngs): + group_name = "group_name_%d" % i + template_id = "template_id_%d" % i + count = "count_%d" % i + serialized = "serialized_%d" % i + + serialized_val = base64.urlsafe_b64encode(json.dumps( + workflow_helpers.clean_node_group(templ_ng))) + + ng_action.groups.append({ + "name": templ_ng["name"], + "template_id": templ_ng["node_group_template_id"], + "count": templ_ng["count"], + "id": i, + "deletable": "false", + "serialized": serialized_val + }) + workflow_helpers.build_node_group_fields(ng_action, + group_name, + template_id, + count, + serialized) + except Exception: + exceptions.handle(request, + _("Unable to fetch cluster to scale")) + + def format_status_message(self, message): + # Scaling form requires special handling because it has no Cluster name + # in it's context + + error_description = getattr(self, 'error_description', None) + if error_description: + return error_description + else: + return self.success_message + + def handle(self, request, context): + cluster_id = request.GET["cluster_id"] + try: + cluster = saharaclient.cluster_get(request, cluster_id) + existing_node_groups = set([]) + for ng in cluster.node_groups: + existing_node_groups.add(ng["name"]) + + scale_object = dict() + + ids = json.loads(context["ng_forms_ids"]) + + for _id in ids: + name = context["ng_group_name_%s" % _id] + template_id = context["ng_template_id_%s" % _id] + count = context["ng_count_%s" % _id] + + if name not in existing_node_groups: + if "add_node_groups" not in scale_object: + scale_object["add_node_groups"] = [] + + scale_object["add_node_groups"].append( + {"name": name, + "node_group_template_id": template_id, + "count": int(count)}) + else: + old_count = None + for ng in cluster.node_groups: + if name == ng["name"]: + old_count = ng["count"] + break + + if old_count != count: + if "resize_node_groups" not in scale_object: + scale_object["resize_node_groups"] = [] + + scale_object["resize_node_groups"].append( + {"name": name, + "count": int(count)} + ) + except Exception: + scale_object = {} + exceptions.handle(request, + _("Unable to fetch cluster to scale.")) + + try: + saharaclient.cluster_scale(request, cluster_id, scale_object) + return True + except api_base.APIException as e: + self.error_description = str(e) + return False + except Exception: + exceptions.handle(request, + _("Scale cluster operation failed")) + return False diff --git a/sahara_dashboard/content/data_processing/data_image_registry/__init__.py b/sahara_dashboard/content/data_processing/data_image_registry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/data_image_registry/forms.py b/sahara_dashboard/content/data_processing/data_image_registry/forms.py new file mode 100644 index 0000000..82273c3 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/forms.py @@ -0,0 +1,116 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import forms +from horizon import messages +from openstack_dashboard.api import glance +from sahara_dashboard.api import sahara as saharaclient + + +class ImageForm(forms.SelfHandlingForm): + image_id = forms.CharField(widget=forms.HiddenInput()) + tags_list = forms.CharField(widget=forms.HiddenInput()) + user_name = forms.CharField(max_length=80, label=_("User Name")) + description = forms.CharField(max_length=80, + label=_("Description"), + required=False, + widget=forms.Textarea(attrs={'rows': 4})) + + def handle(self, request, data): + try: + image_id = data['image_id'] + user_name = data['user_name'] + desc = data['description'] + saharaclient.image_update(request, image_id, user_name, desc) + + image_tags = json.loads(data["tags_list"]) + saharaclient.image_tags_update(request, image_id, image_tags) + updated_image = saharaclient.image_get(request, image_id) + messages.success(request, + _("Successfully updated image.")) + return updated_image + except Exception: + exceptions.handle(request, + _("Failed to update image.")) + return False + + +class EditTagsForm(ImageForm): + image_id = forms.CharField(widget=forms.HiddenInput()) + + +class RegisterImageForm(ImageForm): + image_id = forms.ChoiceField(label=_("Image")) + + def __init__(self, request, *args, **kwargs): + super(RegisterImageForm, self).__init__(request, *args, **kwargs) + self._populate_image_id_choices() + + def _populate_image_id_choices(self): + images = self._get_available_images(self.request) + choices = [(image.id, image.name) + for image in images + if image.properties.get("image_type", '') != "snapshot"] + if choices: + choices.insert(0, ("", _("Select Image"))) + else: + choices.insert(0, ("", _("No images available."))) + self.fields['image_id'].choices = choices + + def _get_images(self, request, filter): + try: + images, _more, _prev = ( + glance.image_list_detailed(request, filters=filter)) + except Exception: + images = [] + exceptions.handle(request, + _("Unable to retrieve images with filter %s.") % + filter) + return images + + def _get_public_images(self, request): + filter = {"is_public": True, + "status": "active"} + return self._get_images(request, filter) + + def _get_tenant_images(self, request): + filter = {"owner": request.user.tenant_id, + "status": "active"} + return self._get_images(request, filter) + + def _get_available_images(self, request): + + images = self._get_tenant_images(request) + if request.user.is_superuser: + images += self._get_public_images(request) + + final_images = [] + + try: + image_ids = set(img.id for img in saharaclient.image_list(request)) + except Exception: + image_ids = set() + exceptions.handle(request, + _("Unable to fetch available images.")) + + for image in images: + if (image not in final_images and + image.id not in image_ids and + image.container_format not in ('aki', 'ari')): + final_images.append(image) + return final_images diff --git a/sahara_dashboard/content/data_processing/data_image_registry/panel.py b/sahara_dashboard/content/data_processing/data_image_registry/panel.py new file mode 100644 index 0000000..541f7a1 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/panel.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +import horizon + +from openstack_dashboard.dashboards.project import dashboard + + +class ImageRegistryPanel(horizon.Panel): + name = _("Image Registry") + slug = 'data_processing.data_image_registry' + permissions = (('openstack.services.data-processing', + 'openstack.services.data_processing'),) + + +dashboard.Project.register(ImageRegistryPanel) diff --git a/sahara_dashboard/content/data_processing/data_image_registry/tables.py b/sahara_dashboard/content/data_processing/data_image_registry/tables.py new file mode 100644 index 0000000..68cdcd6 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/tables.py @@ -0,0 +1,83 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django import template +from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import ungettext_lazy + +from horizon import tables + +from sahara_dashboard.api import sahara as saharaclient + + +LOG = logging.getLogger(__name__) + + +class EditTagsAction(tables.LinkAction): + name = "edit_tags" + verbose_name = _("Edit Tags") + url = "horizon:project:data_processing.data_image_registry:edit_tags" + classes = ("ajax-modal",) + + +def tags_to_string(image): + template_name = ( + 'project/data_processing.data_image_registry/_list_tags.html') + context = {"image": image} + return template.loader.render_to_string(template_name, context) + + +class RegisterImage(tables.LinkAction): + name = "register" + verbose_name = _("Register Image") + url = "horizon:project:data_processing.data_image_registry:register" + classes = ("ajax-modal",) + icon = "plus" + + +class UnregisterImages(tables.DeleteAction): + @staticmethod + def action_present(count): + return ungettext_lazy( + u"Unregister Image", + u"Unregister Images", + count + ) + + @staticmethod + def action_past(count): + return ungettext_lazy( + u"Unregistered Image", + u"Unregistered Images", + count + ) + + def delete(self, request, obj_id): + saharaclient.image_unregister(request, obj_id) + + +class ImageRegistryTable(tables.DataTable): + name = tables.Column("name", + verbose_name=_("Image"), + link=("horizon:project:" + "images:images:detail")) + tags = tables.Column(tags_to_string, + verbose_name=_("Tags")) + + class Meta(object): + name = "image_registry" + verbose_name = _("Image Registry") + table_actions = (RegisterImage, UnregisterImages,) + row_actions = (EditTagsAction, UnregisterImages,) diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_edit_tags.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_edit_tags.html new file mode 100644 index 0000000..0e2a54a --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_edit_tags.html @@ -0,0 +1,28 @@ +{% extends "horizon/common/_modal_form.html" %} + + +{% load i18n %} + +{% block form_id %}edit_tags_form{% endblock %} +{% block form_action %}{% url 'horizon:project:data_processing.data_image_registry:edit_tags' image.id %}{% endblock %} + +{% block modal-header %}{% trans "Edit Image Tags" %}{% endblock %} + +{% block modal-body %} + + +
+
+ {% include "horizon/common/_form_fields.html" %} +
+ {% include 'project/data_processing.data_image_registry/_tag_form.html' %} +
+
+ {% include 'project/data_processing.data_image_registry/_help.html' %} +
+{% endblock %} + +{% block modal-footer %} + + {% trans "Cancel" %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_help.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_help.html new file mode 100644 index 0000000..65a0615 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_help.html @@ -0,0 +1,21 @@ +{% load i18n %} +
+

{% blocktrans %}Image Registry tool:{% endblocktrans %}

+
+

+ {% blocktrans %}Image Registry is used to provide additional information about images for Data Processing.{% endblocktrans %} +

+

+ {% blocktrans %}Specified User Name will be used by Data Processing to apply configs and manage processes on instances.{% endblocktrans %} +

+

+ {% blocktrans %}Tags are used for filtering images suitable for each plugin and each Data Processing version. + To add required tags, select a plugin and a Data Processing version and click "Add plugin tags" button.{% endblocktrans %} +

+

+ {% blocktrans %}You may also add any custom tag.{% endblocktrans %} +

+

+ {% blocktrans %}Unnecessary tags may be removed by clicking a cross near tag's name.{% endblocktrans %} +

+
\ No newline at end of file diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_list_tags.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_list_tags.html new file mode 100644 index 0000000..4359c9d --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_list_tags.html @@ -0,0 +1,5 @@ +
    + {% for tag in image.tags %} +
  • {{ tag }}
  • + {% endfor %} +
\ No newline at end of file diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_register_image.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_register_image.html new file mode 100644 index 0000000..70460f4 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_register_image.html @@ -0,0 +1,26 @@ +{% extends "horizon/common/_modal_form.html" %} + + +{% load i18n %} + +{% block form_id %}register_image_form{% endblock %} +{% block form_action %}{% url 'horizon:project:data_processing.data_image_registry:register' %}{% endblock %} + +{% block modal-header %}{% trans "Register Image" %}{% endblock %} + +{% block modal-body %} +
+
+ {% include "horizon/common/_form_fields.html" %} + {% include 'project/data_processing.data_image_registry/_tag_form.html' %} +
+
+
+ {% include 'project/data_processing.data_image_registry/_help.html' %} +
+{% endblock %} + +{% block modal-footer %} + + {% trans "Cancel" %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_tag_form.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_tag_form.html new file mode 100644 index 0000000..5c56821 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_tag_form.html @@ -0,0 +1,123 @@ +{% load i18n %} +
+
+
+
{% trans "Register tags required for the Plugin with specified Data Processing Version" %}
+ + {% trans "Plugin" %} + {% trans "Version" %} + + + + + + + {% for plugin, version_dict in plugins.items %} +
+ +
+ {% endfor %} +
+ + + +
+ + + + + + + + + +
+ + diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/edit_tags.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/edit_tags.html new file mode 100644 index 0000000..28b616e --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/edit_tags.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Edit Image Tags" %}{% endblock %} + +{% block main %} + {% include 'project/data_processing.data_image_registry/_edit_tags.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/image_registry.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/image_registry.html new file mode 100644 index 0000000..5d0acc7 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/image_registry.html @@ -0,0 +1,24 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + +
+ {{ image_registry_table.render }} +
+ + +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/register_image.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/register_image.html new file mode 100644 index 0000000..c78d065 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/register_image.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Register Image" %}{% endblock %} + +{% block main %} + {% include 'project/data_processing.data_image_registry/_register_image.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/data_image_registry/tests.py b/sahara_dashboard/content/data_processing/data_image_registry/tests.py new file mode 100644 index 0000000..a0b40f8 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/tests.py @@ -0,0 +1,131 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from django.core.urlresolvers import reverse +from django import http + +from mox3.mox import IsA # noqa + +from openstack_dashboard import api as dash_api +from openstack_dashboard.test import helpers as test + +from sahara_dashboard import api + +INDEX_URL = reverse( + 'horizon:project:data_processing.data_image_registry:index') +REGISTER_URL = reverse( + 'horizon:project:data_processing.data_image_registry:register') + + +class DataProcessingImageRegistryTests(test.TestCase): + @test.create_stubs({api.sahara: ('image_list',)}) + def test_index(self): + api.sahara.image_list(IsA(http.HttpRequest)) \ + .AndReturn(self.images.list()) + self.mox.ReplayAll() + + res = self.client.get(INDEX_URL) + + self.assertTemplateUsed( + res, + 'project/data_processing.data_image_registry/image_registry.html') + self.assertContains(res, 'Image Registry') + self.assertContains(res, 'Image') + self.assertContains(res, 'Tags') + + @test.create_stubs({api.sahara: ('image_get', + 'image_update', + 'image_tags_update', + 'image_list'), + dash_api.glance: ('image_list_detailed',)}) + def test_register(self): + image = self.images.first() + image_id = image.id + test_username = 'myusername' + test_description = 'mydescription' + api.sahara.image_get(IsA(http.HttpRequest), + image_id).MultipleTimes().AndReturn(image) + dash_api.glance.image_list_detailed(IsA(http.HttpRequest), + filters={'owner': self.user.id, + 'status': 'active'}) \ + .AndReturn((self.images.list(), False, False)) + api.sahara.image_update(IsA(http.HttpRequest), + image_id, + test_username, + test_description) \ + .AndReturn(True) + api.sahara.image_tags_update(IsA(http.HttpRequest), + image_id, + {}) \ + .AndReturn(True) + api.sahara.image_list(IsA(http.HttpRequest)) \ + .AndReturn([]) + self.mox.ReplayAll() + + res = self.client.post( + REGISTER_URL, + {'image_id': image_id, + 'user_name': test_username, + 'description': test_description, + 'tags_list': '{}'}) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) + + @test.create_stubs({api.sahara: ('image_list', + 'image_unregister')}) + def test_unregister(self): + image = self.images.first() + api.sahara.image_list(IsA(http.HttpRequest)) \ + .AndReturn(self.images.list()) + api.sahara.image_unregister(IsA(http.HttpRequest), image.id) + self.mox.ReplayAll() + + form_data = {'action': 'image_registry__delete__%s' % image.id} + res = self.client.post(INDEX_URL, form_data) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) + + @test.create_stubs({api.sahara: ('image_get', + 'image_update', + 'image_tags_update')}) + def test_edit_tags(self): + image = self.registered_images.first() + api.sahara.image_get(IsA(http.HttpRequest), + image.id).MultipleTimes().AndReturn(image) + api.sahara.image_update(IsA(http.HttpRequest), + image.id, + image.username, + image.description) \ + .AndReturn(True) + api.sahara.image_tags_update(IsA(http.HttpRequest), + image.id, + {"0": "mytag"}) \ + .AndReturn(True) + self.mox.ReplayAll() + + edit_tags_url = reverse( + 'horizon:project:data_processing.data_image_registry:edit_tags', + args=[image.id]) + res = self.client.post( + edit_tags_url, + {'image_id': image.id, + 'user_name': image.username, + 'description': image.description, + 'tags_list': '{"0": "mytag"}'}) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) diff --git a/sahara_dashboard/content/data_processing/data_image_registry/urls.py b/sahara_dashboard/content/data_processing/data_image_registry/urls.py new file mode 100644 index 0000000..5f62977 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/urls.py @@ -0,0 +1,33 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from django.conf.urls import patterns +from django.conf.urls import url + +import sahara_dashboard.content. \ + data_processing.data_image_registry.views as views + + +urlpatterns = patterns('', + url(r'^$', views.ImageRegistryView.as_view(), + name='index'), + url(r'^$', views.ImageRegistryView.as_view(), + name='image_registry'), + url(r'^edit_tags/(?P[^/]+)/$', + views.EditTagsView.as_view(), + name='edit_tags'), + url(r'^register/$', + views.RegisterImageView.as_view(), + name='register'), + ) diff --git a/sahara_dashboard/content/data_processing/data_image_registry/views.py b/sahara_dashboard/content/data_processing/data_image_registry/views.py new file mode 100644 index 0000000..2d26a02 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_image_registry/views.py @@ -0,0 +1,129 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging + +from django.core.urlresolvers import reverse_lazy +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import forms +from horizon import tables +from horizon.utils import memoized + +from sahara_dashboard.api import sahara as saharaclient +from sahara_dashboard.content. \ + data_processing.data_image_registry.forms import EditTagsForm +from sahara_dashboard.content. \ + data_processing.data_image_registry.forms import RegisterImageForm +from sahara_dashboard.content. \ + data_processing.data_image_registry.tables import ImageRegistryTable + + +LOG = logging.getLogger(__name__) + + +class ImageRegistryView(tables.DataTableView): + table_class = ImageRegistryTable + template_name = ( + 'project/data_processing.data_image_registry/image_registry.html') + page_title = _("Image Registry") + + def get_data(self): + try: + images = saharaclient.image_list(self.request) + except Exception: + images = [] + msg = _('Unable to retrieve image list') + exceptions.handle(self.request, msg) + return images + + +def update_context_with_plugin_tags(request, context): + try: + plugins = saharaclient.plugin_list(request) + except Exception: + plugins = [] + msg = _("Unable to process plugin tags") + exceptions.handle(request, msg) + + plugins_object = dict() + for plugin in plugins: + plugins_object[plugin.name] = dict() + for version in plugin.versions: + try: + details = saharaclient. \ + plugin_get_version_details(request, + plugin.name, + version) + plugins_object[plugin.name][version] = ( + details.required_image_tags) + except Exception: + msg = _("Unable to process plugin tags") + exceptions.handle(request, msg) + + context["plugins"] = plugins_object + + +class EditTagsView(forms.ModalFormView): + form_class = EditTagsForm + template_name = ( + 'project/data_processing.data_image_registry/edit_tags.html') + success_url = reverse_lazy( + 'horizon:project:data_processing.data_image_registry:index') + page_title = _("Edit Image Tags") + + def get_context_data(self, **kwargs): + context = super(EditTagsView, self).get_context_data(**kwargs) + context['image'] = self.get_object() + update_context_with_plugin_tags(self.request, context) + return context + + @memoized.memoized_method + def get_object(self): + try: + image = saharaclient.image_get(self.request, + self.kwargs["image_id"]) + except Exception: + image = None + msg = _("Unable to fetch the image details") + exceptions.handle(self.request, msg) + return image + + def get_initial(self): + image = self.get_object() + + return {"image_id": image.id, + "tags_list": json.dumps(image.tags), + "user_name": image.username, + "description": image.description} + + +class RegisterImageView(forms.ModalFormView): + form_class = RegisterImageForm + template_name = ( + 'project/data_processing.data_image_registry/register_image.html') + success_url = reverse_lazy( + 'horizon:project:data_processing.data_image_registry:index') + page_title = _("Register Image") + + def get_context_data(self, **kwargs): + context = super(RegisterImageView, self).get_context_data(**kwargs) + update_context_with_plugin_tags(self.request, context) + return context + + def get_initial(self): + # need this initialization to allow registration + # of images without tags + return {"tags_list": json.dumps([])} diff --git a/sahara_dashboard/content/data_processing/data_plugins/__init__.py b/sahara_dashboard/content/data_processing/data_plugins/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/data_plugins/panel.py b/sahara_dashboard/content/data_processing/data_plugins/panel.py new file mode 100644 index 0000000..72cf7f6 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_plugins/panel.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +import horizon + +from openstack_dashboard.dashboards.project import dashboard + + +class PluginsPanel(horizon.Panel): + name = _("Plugins") + slug = 'data_processing.data_plugins' + permissions = (('openstack.services.data-processing', + 'openstack.services.data_processing'),) + + +dashboard.Project.register(PluginsPanel) diff --git a/sahara_dashboard/content/data_processing/data_plugins/tables.py b/sahara_dashboard/content/data_processing/data_plugins/tables.py new file mode 100644 index 0000000..15e0116 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_plugins/tables.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.template import defaultfilters as filters +from django.utils.translation import ugettext_lazy as _ + +from horizon import tables + +LOG = logging.getLogger(__name__) + + +class PluginsTable(tables.DataTable): + title = tables.Column("title", + verbose_name=_("Title"), + link=("horizon:project:data_processing." + "data_plugins:details")) + + versions = tables.Column("versions", + verbose_name=_("Supported Versions"), + wrap_list=True, + filters=(filters.unordered_list,)) + + description = tables.Column("description", + verbose_name=_("Description")) + + class Meta(object): + name = "plugins" + verbose_name = _("Plugins") diff --git a/sahara_dashboard/content/data_processing/data_plugins/tabs.py b/sahara_dashboard/content/data_processing/data_plugins/tabs.py new file mode 100644 index 0000000..e309d07 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_plugins/tabs.py @@ -0,0 +1,46 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import tabs +from sahara_dashboard.api import sahara as saharaclient + +LOG = logging.getLogger(__name__) + + +class DetailsTab(tabs.Tab): + name = _("Details") + slug = "plugin_details_tab" + template_name = ("project/data_processing.data_plugins/_details.html") + + def get_context_data(self, request): + plugin_id = self.tab_group.kwargs['plugin_id'] + plugin = None + try: + plugin = saharaclient.plugin_get(request, plugin_id) + except Exception as e: + LOG.error("Unable to get plugin with plugin_id %s (%s)" % + (plugin_id, str(e))) + exceptions.handle(self.tab_group.request, + _('Unable to retrieve plugin.')) + return {"plugin": plugin} + + +class PluginDetailsTabs(tabs.TabGroup): + slug = "cluster_details" + tabs = (DetailsTab,) + sticky = True diff --git a/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/_details.html b/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/_details.html new file mode 100644 index 0000000..1082fbf --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/_details.html @@ -0,0 +1,20 @@ +{% load i18n %} + +
+
+
{% trans "Name" %}
+
{{ plugin.name }}
+
{% trans "Title" %}
+
{{ plugin.title }}
+
{% trans "Description" %}
+
{{ plugin.description }}
+
{% trans "Supported Versions" %}
+
+
    + {% for version in plugin.versions %} +
  • {{ version }}
  • + {% endfor %} +
+
+
+
diff --git a/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/plugins.html b/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/plugins.html new file mode 100644 index 0000000..ac74f8f --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/plugins.html @@ -0,0 +1,11 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + +
+ {{ plugins_table.render }} +
+ +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/data_plugins/tests.py b/sahara_dashboard/content/data_processing/data_plugins/tests.py new file mode 100644 index 0000000..14091d7 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_plugins/tests.py @@ -0,0 +1,49 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from django.core.urlresolvers import reverse +from django import http + +from mox3.mox import IsA # noqa +from openstack_dashboard.test import helpers as test +import six + +from sahara_dashboard import api + + +INDEX_URL = reverse( + 'horizon:project:data_processing.data_plugins:index') +DETAILS_URL = reverse( + 'horizon:project:data_processing.data_plugins:details', args=['id']) + + +class DataProcessingPluginsTests(test.TestCase): + @test.create_stubs({api.sahara: ('plugin_list',)}) + def test_index(self): + api.sahara.plugin_list(IsA(http.HttpRequest)) \ + .AndReturn(self.plugins.list()) + self.mox.ReplayAll() + res = self.client.get(INDEX_URL) + self.assertTemplateUsed( + res, 'project/data_processing.data_plugins/plugins.html') + self.assertContains(res, 'vanilla') + self.assertContains(res, 'plugin') + + @test.create_stubs({api.sahara: ('plugin_get',)}) + def test_details(self): + api.sahara.plugin_get(IsA(http.HttpRequest), IsA(six.text_type)) \ + .AndReturn(self.plugins.list()[0]) + self.mox.ReplayAll() + res = self.client.get(DETAILS_URL) + self.assertTemplateUsed(res, 'horizon/common/_detail.html') + self.assertContains(res, 'vanilla') + self.assertContains(res, 'plugin') diff --git a/sahara_dashboard/content/data_processing/data_plugins/urls.py b/sahara_dashboard/content/data_processing/data_plugins/urls.py new file mode 100644 index 0000000..60251b8 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_plugins/urls.py @@ -0,0 +1,25 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.conf.urls import patterns +from django.conf.urls import url + +from sahara_dashboard.content.\ + data_processing.data_plugins import views + + +urlpatterns = patterns('', + url(r'^$', views.PluginsView.as_view(), name='index'), + url(r'^(?P[^/]+)$', + views.PluginDetailsView.as_view(), name='details'), + ) diff --git a/sahara_dashboard/content/data_processing/data_plugins/views.py b/sahara_dashboard/content/data_processing/data_plugins/views.py new file mode 100644 index 0000000..eabf7e7 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_plugins/views.py @@ -0,0 +1,49 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import tables +from horizon import tabs + +from sahara_dashboard.api import sahara as saharaclient +import sahara_dashboard.content.data_processing. \ + data_plugins.tables as p_tables +import sahara_dashboard.content.data_processing. \ + data_plugins.tabs as p_tabs + +LOG = logging.getLogger(__name__) + + +class PluginsView(tables.DataTableView): + table_class = p_tables.PluginsTable + template_name = 'project/data_processing.data_plugins/plugins.html' + page_title = _("Data Processing Plugins") + + def get_data(self): + try: + plugins = saharaclient.plugin_list(self.request) + except Exception: + plugins = [] + msg = _('Unable to retrieve data processing plugins.') + exceptions.handle(self.request, msg) + return plugins + + +class PluginDetailsView(tabs.TabView): + tab_group_class = p_tabs.PluginDetailsTabs + template_name = 'horizon/common/_detail.html' + page_title = _("Data Processing Plugin Details") diff --git a/sahara_dashboard/content/data_processing/data_sources/__init__.py b/sahara_dashboard/content/data_processing/data_sources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/data_sources/panel.py b/sahara_dashboard/content/data_processing/data_sources/panel.py new file mode 100644 index 0000000..7a265ef --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/panel.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +import horizon + +from openstack_dashboard.dashboards.project import dashboard + + +class DataSourcesPanel(horizon.Panel): + name = _("Data Sources") + slug = 'data_processing.data_sources' + permissions = (('openstack.services.data-processing', + 'openstack.services.data_processing'),) + + +dashboard.Project.register(DataSourcesPanel) diff --git a/sahara_dashboard/content/data_processing/data_sources/tables.py b/sahara_dashboard/content/data_processing/data_sources/tables.py new file mode 100644 index 0000000..bf636ef --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/tables.py @@ -0,0 +1,78 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import ungettext_lazy + +from horizon import tables + +from sahara_dashboard.api import sahara as saharaclient + +LOG = logging.getLogger(__name__) + + +class CreateDataSource(tables.LinkAction): + name = "create data source" + verbose_name = _("Create Data Source") + url = "horizon:project:data_processing.data_sources:create-data-source" + classes = ("ajax-modal",) + icon = "plus" + + +class DeleteDataSource(tables.DeleteAction): + @staticmethod + def action_present(count): + return ungettext_lazy( + u"Delete Data Source", + u"Delete Data Sources", + count + ) + + @staticmethod + def action_past(count): + return ungettext_lazy( + u"Deleted Data Source", + u"Deleted Data Sources", + count + ) + + def delete(self, request, obj_id): + saharaclient.data_source_delete(request, obj_id) + + +class EditDataSource(tables.LinkAction): + name = "edit data source" + verbose_name = _("Edit Data Source") + url = "horizon:project:data_processing.data_sources:edit-data-source" + classes = ("ajax-modal",) + + +class DataSourcesTable(tables.DataTable): + name = tables.Column("name", + verbose_name=_("Name"), + link=("horizon:project:data_processing." + "data_sources:details")) + type = tables.Column("type", + verbose_name=_("Type")) + description = tables.Column("description", + verbose_name=_("Description")) + + class Meta(object): + name = "data_sources" + verbose_name = _("Data Sources") + table_actions = (CreateDataSource, + DeleteDataSource) + row_actions = (DeleteDataSource, + EditDataSource,) diff --git a/sahara_dashboard/content/data_processing/data_sources/tabs.py b/sahara_dashboard/content/data_processing/data_sources/tabs.py new file mode 100644 index 0000000..0034f9e --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/tabs.py @@ -0,0 +1,44 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import tabs + +from sahara_dashboard.api import sahara as saharaclient + +LOG = logging.getLogger(__name__) + + +class GeneralTab(tabs.Tab): + name = _("General Info") + slug = "data_source_details_tab" + template_name = ("project/data_processing.data_sources/_details.html") + + def get_context_data(self, request): + data_source_id = self.tab_group.kwargs['data_source_id'] + try: + data_source = saharaclient.data_source_get(request, data_source_id) + except Exception as e: + data_source = {} + LOG.error("Unable to fetch data source details: %s" % str(e)) + + return {"data_source": data_source} + + +class DataSourceDetailsTabs(tabs.TabGroup): + slug = "data_source_details" + tabs = (GeneralTab,) + sticky = True diff --git a/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_create_data_source_help.html b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_create_data_source_help.html new file mode 100644 index 0000000..5f1610c --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_create_data_source_help.html @@ -0,0 +1,15 @@ +{% load i18n horizon %} +
+

+ {% blocktrans %}Create a Data Source with a specified name.{% endblocktrans %} +

+

+ {% blocktrans %}Select the type of your Data Source.{% endblocktrans %} +

+

+ {% blocktrans %}You may need to enter the username and password for your Data Source.{% endblocktrans %} +

+

+ {% blocktrans %}You may also enter an optional description for your Data Source.{% endblocktrans %} +

+
\ No newline at end of file diff --git a/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_details.html b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_details.html new file mode 100644 index 0000000..f06be54 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_details.html @@ -0,0 +1,18 @@ +{% load i18n sizeformat %} + +
+
+
{% trans "Name" %}
+
{{ data_source.name }}
+
{% trans "ID" %}
+
{{ data_source.id }}
+
{% trans "Type" %}
+
{{ data_source.type }}
+
{% trans "URL" %}
+
{{ data_source.url }}
+
{% trans "Description" %}
+
{{ data_source.description|default:_("None") }}
+
{% trans "Create time" %}
+
{{ data_source.created_at }}
+
+
diff --git a/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/create.html b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/create.html new file mode 100644 index 0000000..00a7d0a --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/create.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Create Data Source" %}{% endblock %} + +{% block main %} + {% include 'horizon/common/_workflow.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/data_sources.html b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/data_sources.html new file mode 100644 index 0000000..21b43bc --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/data_sources.html @@ -0,0 +1,11 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + +
+ {{ data_sources_table.render }} +
+ +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/data_sources/tests.py b/sahara_dashboard/content/data_processing/data_sources/tests.py new file mode 100644 index 0000000..0a090e1 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/tests.py @@ -0,0 +1,124 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from django.core.urlresolvers import reverse +from django import http + +from mox3.mox import IsA # noqa +from openstack_dashboard.test import helpers as test +import six + +from sahara_dashboard import api + +INDEX_URL = reverse('horizon:project:data_processing.data_sources:index') +DETAILS_URL = reverse( + 'horizon:project:data_processing.data_sources:details', args=['id']) +CREATE_URL = reverse( + 'horizon:project:data_processing.data_sources:create-data-source') +EDIT_URL = reverse( + 'horizon:project:data_processing.data_sources:edit-data-source', + args=['id']) + + +class DataProcessingDataSourceTests(test.TestCase): + @test.create_stubs({api.sahara: ('data_source_list',)}) + def test_index(self): + api.sahara.data_source_list(IsA(http.HttpRequest)) \ + .AndReturn(self.data_sources.list()) + self.mox.ReplayAll() + res = self.client.get(INDEX_URL) + self.assertTemplateUsed( + res, 'project/data_processing.data_sources/data_sources.html') + self.assertContains(res, 'Data Sources') + self.assertContains(res, 'Name') + self.assertContains(res, 'sampleOutput') + self.assertContains(res, 'sampleOutput2') + + @test.create_stubs({api.sahara: ('data_source_get',)}) + def test_details(self): + api.sahara.data_source_get(IsA(http.HttpRequest), IsA(six.text_type)) \ + .MultipleTimes().AndReturn(self.data_sources.first()) + self.mox.ReplayAll() + res = self.client.get(DETAILS_URL) + self.assertTemplateUsed(res, 'horizon/common/_detail.html') + self.assertContains(res, 'sampleOutput') + + @test.create_stubs({api.sahara: ('data_source_list', + 'data_source_delete')}) + def test_delete(self): + data_source = self.data_sources.first() + api.sahara.data_source_list(IsA(http.HttpRequest)) \ + .AndReturn(self.data_sources.list()) + api.sahara.data_source_delete(IsA(http.HttpRequest), data_source.id) + self.mox.ReplayAll() + + form_data = {'action': 'data_sources__delete__%s' % data_source.id} + res = self.client.post(INDEX_URL, form_data) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) + + @test.create_stubs({api.sahara: ('data_source_create',)}) + def test_create(self): + data_source = self.data_sources.first() + api.sahara.data_source_create(IsA(http.HttpRequest), + data_source.name, + data_source.description, + data_source.type, + data_source.url, + "", + "") \ + .AndReturn(self.data_sources.first()) + self.mox.ReplayAll() + form_data = { + 'data_source_url': data_source.url, + 'data_source_name': data_source.name, + 'data_source_description': data_source.description, + 'data_source_type': data_source.type + } + res = self.client.post(CREATE_URL, form_data) + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) + + @test.create_stubs({api.sahara: ('data_source_update', + 'data_source_get',)}) + def test_edit(self): + data_source = self.data_sources.first() + api_data = { + 'url': data_source.url, + 'credentials': {'user': '', 'pass': ''}, + 'type': data_source.type, + 'name': data_source.name, + 'description': data_source.description + } + api.sahara.data_source_get(IsA(http.HttpRequest), + IsA(six.text_type)) \ + .AndReturn(self.data_sources.first()) + api.sahara.data_source_update(IsA(http.HttpRequest), + IsA(six.text_type), + api_data) \ + .AndReturn(self.data_sources.first()) + self.mox.ReplayAll() + + form_data = { + 'data_source_url': data_source.url, + 'data_source_name': data_source.name, + 'data_source_description': data_source.description, + 'data_source_type': data_source.type + } + res = self.client.post(EDIT_URL, form_data) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) diff --git a/sahara_dashboard/content/data_processing/data_sources/urls.py b/sahara_dashboard/content/data_processing/data_sources/urls.py new file mode 100644 index 0000000..33bda03 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/urls.py @@ -0,0 +1,35 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from django.conf.urls import patterns +from django.conf.urls import url + +import sahara_dashboard.content.data_processing. \ + data_sources.views as views + + +urlpatterns = patterns('', + url(r'^$', views.DataSourcesView.as_view(), + name='index'), + url(r'^$', views.DataSourcesView.as_view(), + name='data-sources'), + url(r'^create-data-source$', + views.CreateDataSourceView.as_view(), + name='create-data-source'), + url(r'^(?P[^/]+)/edit$', + views.EditDataSourceView.as_view(), + name='edit-data-source'), + url(r'^(?P[^/]+)$', + views.DataSourceDetailsView.as_view(), + name='details')) diff --git a/sahara_dashboard/content/data_processing/data_sources/views.py b/sahara_dashboard/content/data_processing/data_sources/views.py new file mode 100644 index 0000000..3a5ac9f --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/views.py @@ -0,0 +1,99 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import tables +from horizon import tabs +from horizon.utils import memoized +from horizon.utils.urlresolvers import reverse # noqa +from horizon import workflows + +from sahara_dashboard.api import sahara as saharaclient + +import sahara_dashboard.content.data_processing. \ + data_sources.tables as ds_tables +import sahara_dashboard.content.data_processing. \ + data_sources.tabs as _tabs +import sahara_dashboard.content.data_processing. \ + data_sources.workflows.create as create_flow +import sahara_dashboard.content.data_processing. \ + data_sources.workflows.edit as edit_flow + +LOG = logging.getLogger(__name__) + + +class DataSourcesView(tables.DataTableView): + table_class = ds_tables.DataSourcesTable + template_name = 'project/data_processing.data_sources/data_sources.html' + page_title = _("Data Sources") + + def get_data(self): + try: + data_sources = saharaclient.data_source_list(self.request) + except Exception: + data_sources = [] + exceptions.handle(self.request, + _("Unable to fetch data sources.")) + return data_sources + + +class CreateDataSourceView(workflows.WorkflowView): + workflow_class = create_flow.CreateDataSource + success_url = \ + "horizon:project:data_processing.data-sources:create-data-source" + classes = ("ajax-modal",) + template_name = "project/data_processing.data_sources/create.html" + page_title = _("Create Data Source") + + +class EditDataSourceView(CreateDataSourceView): + workflow_class = edit_flow.EditDataSource + page_title = _("Edit Data Source") + + def get_context_data(self, **kwargs): + context = super(EditDataSourceView, self) \ + .get_context_data(**kwargs) + + context["data_source_id"] = kwargs["data_source_id"] + return context + + def get_initial(self): + initial = super(EditDataSourceView, self).get_initial() + initial['data_source_id'] = self.kwargs['data_source_id'] + return initial + + +class DataSourceDetailsView(tabs.TabView): + tab_group_class = _tabs.DataSourceDetailsTabs + template_name = 'horizon/common/_detail.html' + page_title = "{{ data_source.name|default:data_source.id }}" + + @memoized.memoized_method + def get_object(self): + ds_id = self.kwargs["data_source_id"] + try: + return saharaclient.data_source_get(self.request, ds_id) + except Exception: + msg = _('Unable to retrieve details for data source "%s".') % ds_id + redirect = reverse( + "horizon:project:data_processing.data_sources:data-sources") + exceptions.handle(self.request, msg, redirect=redirect) + + def get_context_data(self, **kwargs): + context = super(DataSourceDetailsView, self).get_context_data(**kwargs) + context['data_source'] = self.get_object() + return context diff --git a/sahara_dashboard/content/data_processing/data_sources/workflows/__init__.py b/sahara_dashboard/content/data_processing/data_sources/workflows/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/data_sources/workflows/create.py b/sahara_dashboard/content/data_processing/data_sources/workflows/create.py new file mode 100644 index 0000000..ffd8fb9 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/workflows/create.py @@ -0,0 +1,121 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import forms +from horizon import workflows + +from sahara_dashboard.api import sahara as saharaclient +from sahara_dashboard.content.data_processing \ + .utils import helpers + +LOG = logging.getLogger(__name__) + + +class GeneralConfigAction(workflows.Action): + data_source_name = forms.CharField(label=_("Name")) + + data_source_type = forms.ChoiceField( + label=_("Data Source Type"), + choices=[("swift", "Swift"), ("hdfs", "HDFS"), ("maprfs", "MapR FS")], + widget=forms.Select(attrs={ + "class": "switchable", + "data-slug": "ds_type" + })) + + data_source_url = forms.CharField(label=_("URL")) + + data_source_credential_user = forms.CharField( + label=_("Source username"), + required=False, + widget=forms.TextInput(attrs={ + "class": "switched", + "data-switch-on": "ds_type", + "data-ds_type-swift": _("Source username") + })) + + data_source_credential_pass = forms.CharField( + widget=forms.PasswordInput(attrs={ + 'class': 'switched', + 'data-switch-on': 'ds_type', + 'data-ds_type-swift': _("Source password"), + 'autocomplete': 'off' + }), + label=_("Source password"), + required=False) + + data_source_description = forms.CharField( + label=_("Description"), + required=False, + widget=forms.Textarea(attrs={'rows': 4})) + + def __init__(self, request, *args, **kwargs): + super(GeneralConfigAction, self).__init__(request, *args, **kwargs) + + class Meta(object): + name = _("Create Data Source") + help_text_template = ("project/data_processing.data_sources/" + "_create_data_source_help.html") + + +class GeneralConfig(workflows.Step): + action_class = GeneralConfigAction + + def contribute(self, data, context): + for k, v in data.items(): + context["general_" + k] = v + + context["source_url"] = context["general_data_source_url"] + + if context["general_data_source_type"] == "swift": + if not context["general_data_source_url"].startswith("swift://"): + context["source_url"] = "swift://{0}".format( + context["general_data_source_url"]) + + return context + + +class CreateDataSource(workflows.Workflow): + slug = "create_data_source" + name = _("Create Data Source") + finalize_button_name = _("Create") + success_message = _("Data source created") + failure_message = _("Could not create data source") + success_url = "horizon:project:data_processing.data_sources:index" + default_steps = (GeneralConfig, ) + + def handle(self, request, context): + try: + self.object = saharaclient.data_source_create( + request, + context["general_data_source_name"], + context["general_data_source_description"], + context["general_data_source_type"], + context["source_url"], + context.get("general_data_source_credential_user", None), + context.get("general_data_source_credential_pass", None)) + + hlps = helpers.Helpers(request) + if hlps.is_from_guide(): + request.session["guide_datasource_id"] = self.object.id + request.session["guide_datasource_name"] = self.object.name + self.success_url = ( + "horizon:project:data_processing.wizard:jobex_guide") + return True + except Exception: + exceptions.handle(request) + return False diff --git a/sahara_dashboard/content/data_processing/data_sources/workflows/edit.py b/sahara_dashboard/content/data_processing/data_sources/workflows/edit.py new file mode 100644 index 0000000..4a11e90 --- /dev/null +++ b/sahara_dashboard/content/data_processing/data_sources/workflows/edit.py @@ -0,0 +1,79 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions + +from sahara_dashboard.api import sahara as saharaclient +from sahara_dashboard.content.data_processing \ + .data_sources.workflows import create + +LOG = logging.getLogger(__name__) + + +class EditDataSource(create.CreateDataSource): + slug = "edit_data_source" + name = _("Edit Data Source") + finalize_button_name = _("Update") + success_message = _("Data source updated") + failure_message = _("Could not update data source") + success_url = "horizon:project:data_processing.data_sources:index" + default_steps = (create.GeneralConfig,) + + FIELD_MAP = { + "data_source_name": "name", + "data_source_type": "type", + "data_source_description": "description", + "data_source_url": "url", + "data_source_credential_user": None, + "data_source_credential_pass": None, + } + + def __init__(self, request, context_seed, entry_point, *args, **kwargs): + self.data_source_id = context_seed["data_source_id"] + data_source = saharaclient.data_source_get(request, + self.data_source_id) + super(EditDataSource, self).__init__(request, context_seed, + entry_point, *args, **kwargs) + for step in self.steps: + if isinstance(step, create.GeneralConfig): + fields = step.action.fields + for field in fields: + if self.FIELD_MAP[field]: + fields[field].initial = getattr(data_source, + self.FIELD_MAP[field], + None) + + def handle(self, request, context): + try: + update_data = { + "name": context["general_data_source_name"], + "description": context["general_data_source_description"], + "type": context["general_data_source_type"], + "url": context["source_url"], + "credentials": { + "user": context.get("general_data_source_credential_user", + None), + "pass": context.get("general_data_source_credential_pass", + None) + } + } + return saharaclient.data_source_update(request, + self.data_source_id, + update_data) + except Exception: + exceptions.handle(request) + return False diff --git a/sahara_dashboard/content/data_processing/job_binaries/__init__.py b/sahara_dashboard/content/data_processing/job_binaries/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/job_binaries/forms.py b/sahara_dashboard/content/data_processing/job_binaries/forms.py new file mode 100644 index 0000000..a0a012b --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/forms.py @@ -0,0 +1,311 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import uuid + +from django.forms import widgets +from django import template +from django.template import defaultfilters +from django.utils.encoding import force_text +from django.utils.safestring import mark_safe +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import forms +from horizon import messages + +from sahara_dashboard.api import sahara as saharaclient + +LOG = logging.getLogger(__name__) + + +class LabeledInput(widgets.TextInput): + def render(self, name, values, attrs=None): + input = super(LabeledInput, self).render(name, values, attrs) + label = "%s" %\ + ("id_%s_label" % name, + "swift://") + result = "%s%s" % (label, input) + return mark_safe(result) + + +class JobBinaryCreateForm(forms.SelfHandlingForm): + NEW_SCRIPT = "newscript" + UPLOAD_BIN = "uploadfile" + action_url = ('horizon:project:data_processing.' + 'job_binaries:create-job-binary') + + def __init__(self, request, *args, **kwargs): + super(JobBinaryCreateForm, self).__init__(request, *args, **kwargs) + + self.help_text_template = ("project/data_processing.job_binaries/" + "_create_job_binary_help.html") + + self.fields["job_binary_name"] = forms.CharField(label=_("Name")) + + self.fields["job_binary_type"] = forms.ChoiceField( + label=_("Storage type"), + widget=forms.Select( + attrs={ + 'class': 'switchable', + 'data-slug': 'jb_type' + })) + + self.fields["job_binary_url"] = forms.CharField( + label=_("URL"), + required=False, + widget=LabeledInput( + attrs={ + 'class': 'switched', + 'data-switch-on': 'jb_type', + 'data-jb_type-swift': _('URL') + })) + + self.fields["job_binary_internal"] = forms.ChoiceField( + label=_("Internal binary"), + required=False, + widget=forms.Select( + attrs={ + 'class': 'switched switchable', + 'data-slug': 'jb_internal', + 'data-switch-on': 'jb_type', + 'data-jb_type-internal-db': _('Internal Binary') + })) + + self.fields["job_binary_file"] = forms.FileField( + label=_("Upload File"), + required=False, + widget=forms.ClearableFileInput( + attrs={ + 'class': 'switched', + 'data-switch-on': 'jb_internal', + 'data-jb_internal-uploadfile': _("Upload File") + })) + + self.fields["job_binary_script_name"] = forms.CharField( + label=_("Script name"), + required=False, + widget=forms.TextInput( + attrs={ + 'class': 'switched', + 'data-switch-on': 'jb_internal', + 'data-jb_internal-newscript': _("Script name") + })) + + self.fields["job_binary_script"] = forms.CharField( + label=_("Script text"), + required=False, + widget=forms.Textarea( + attrs={ + 'rows': 4, + 'class': 'switched', + 'data-switch-on': 'jb_internal', + 'data-jb_internal-newscript': _("Script text") + })) + + self.fields["job_binary_username"] = forms.CharField( + label=_("Username"), + required=False, + widget=forms.TextInput( + attrs={ + 'class': 'switched', + 'data-switch-on': 'jb_type', + 'data-jb_type-swift': _('Username') + })) + + self.fields["job_binary_password"] = forms.CharField( + label=_("Password"), + required=False, + widget=forms.PasswordInput( + attrs={ + 'autocomplete': 'off', + 'class': 'switched', + 'data-switch-on': 'jb_type', + 'data-jb_type-swift': _('Password') + })) + + self.fields["job_binary_description"] = ( + forms.CharField(label=_("Description"), + required=False, + widget=forms.Textarea())) + + self.fields["job_binary_type"].choices =\ + [("internal-db", "Internal database"), + ("swift", "Swift")] + + self.fields["job_binary_internal"].choices =\ + self.populate_job_binary_internal_choices(request) + + self.load_form_values() + + def load_form_values(self): + if "job_binary" in self.initial: + jb = self.initial["job_binary"] + for field in self.fields: + if self.FIELD_MAP[field]: + if field == "job_binary_url": + url = getattr(jb, self.FIELD_MAP[field], None) + (type, loc) = url.split("://") + self.fields['job_binary_type'].initial = type + self.fields[field].initial = loc + else: + self.fields[field].initial = ( + getattr(jb, self.FIELD_MAP[field], None)) + + def populate_job_binary_internal_choices(self, request): + try: + job_binaries = saharaclient.job_binary_internal_list(request) + except Exception: + exceptions.handle(request, + _("Failed to get list of internal binaries.")) + job_binaries = [] + + choices = [(job_binary.id, job_binary.name) + for job_binary in job_binaries] + choices.insert(0, (self.NEW_SCRIPT, '*Create a script')) + choices.insert(0, (self.UPLOAD_BIN, '*Upload a new file')) + + return choices + + def handle(self, request, context): + try: + extra = {} + bin_url = "%s://%s" % (context["job_binary_type"], + context["job_binary_url"]) + if(context["job_binary_type"] == "internal-db"): + bin_url = self.handle_internal(request, context) + elif(context["job_binary_type"] == "swift"): + extra = self.handle_swift(request, context) + + bin_object = saharaclient.job_binary_create( + request, + context["job_binary_name"], + bin_url, + context["job_binary_description"], + extra) + messages.success(request, "Successfully created job binary") + return bin_object + except Exception: + exceptions.handle(request, + _("Unable to create job binary")) + return False + + def get_help_text(self, extra_context=None): + text = "" + extra_context = extra_context or {} + if self.help_text_template: + tmpl = template.loader.get_template(self.help_text_template) + context = template.RequestContext(self.request, extra_context) + text += tmpl.render(context) + else: + text += defaultfilters.linebreaks(force_text(self.help_text)) + return defaultfilters.safe(text) + + class Meta(object): + name = _("Create Job Binary") + help_text_template = ("project/data_processing.job_binaries/" + "_create_job_binary_help.html") + + def handle_internal(self, request, context): + result = "" + + bin_id = context["job_binary_internal"] + if(bin_id == self.UPLOAD_BIN): + try: + result = saharaclient.job_binary_internal_create( + request, + self.get_unique_binary_name( + request, request.FILES["job_binary_file"].name), + request.FILES["job_binary_file"].read()) + bin_id = result.id + except Exception: + exceptions.handle(request, + _("Unable to upload job binary")) + return None + elif(bin_id == self.NEW_SCRIPT): + try: + result = saharaclient.job_binary_internal_create( + request, + self.get_unique_binary_name( + request, context["job_binary_script_name"]), + context["job_binary_script"]) + bin_id = result.id + except Exception: + exceptions.handle(request, + _("Unable to create job binary")) + return None + + return "internal-db://%s" % bin_id + + def handle_swift(self, request, context): + username = context["job_binary_username"] + password = context["job_binary_password"] + + extra = { + "user": username, + "password": password + } + return extra + + def get_unique_binary_name(self, request, base_name): + try: + internals = saharaclient.job_binary_internal_list(request) + except Exception: + internals = [] + exceptions.handle(request, + _("Failed to fetch internal binary list")) + names = [internal.name for internal in internals] + if base_name in names: + return "%s_%s" % (base_name, uuid.uuid1()) + return base_name + + +class JobBinaryEditForm(JobBinaryCreateForm): + FIELD_MAP = { + 'job_binary_description': 'description', + 'job_binary_file': None, + 'job_binary_internal': None, + 'job_binary_name': 'name', + 'job_binary_password': None, + 'job_binary_script': None, + 'job_binary_script_name': None, + 'job_binary_type': None, + 'job_binary_url': 'url', + 'job_binary_username': None, + } + + def handle(self, request, context): + try: + extra = {} + bin_url = "%s://%s" % (context["job_binary_type"], + context["job_binary_url"]) + if (context["job_binary_type"] == "swift"): + extra = self.handle_swift(request, context) + + update_data = { + "name": context["job_binary_name"], + "description": context["job_binary_description"], + "extra": extra, + "url": bin_url, + } + + bin_object = saharaclient.job_binary_update( + request, self.initial["job_binary"].id, update_data) + + messages.success(request, "Successfully updated job binary") + return bin_object + except Exception: + exceptions.handle(request, + _("Unable to update job binary")) + return False diff --git a/sahara_dashboard/content/data_processing/job_binaries/panel.py b/sahara_dashboard/content/data_processing/job_binaries/panel.py new file mode 100644 index 0000000..69eb98e --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/panel.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +import horizon + +from openstack_dashboard.dashboards.project import dashboard + + +class JobBinariesPanel(horizon.Panel): + name = _("Job Binaries") + slug = 'data_processing.job_binaries' + permissions = (('openstack.services.data-processing', + 'openstack.services.data_processing'),) + + +dashboard.Project.register(JobBinariesPanel) diff --git a/sahara_dashboard/content/data_processing/job_binaries/tables.py b/sahara_dashboard/content/data_processing/job_binaries/tables.py new file mode 100644 index 0000000..8a30fbe --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/tables.py @@ -0,0 +1,98 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import ungettext_lazy + +from horizon import tables + +from sahara_dashboard.api import sahara as saharaclient + +from saharaclient.api import base as api_base + + +LOG = logging.getLogger(__name__) + + +class CreateJobBinary(tables.LinkAction): + name = "create job binary" + verbose_name = _("Create Job Binary") + url = "horizon:project:data_processing.job_binaries:create-job-binary" + classes = ("ajax-modal",) + icon = "plus" + + +class DeleteJobBinary(tables.DeleteAction): + @staticmethod + def action_present(count): + return ungettext_lazy( + u"Delete Job Binary", + u"Delete Job Binaries", + count + ) + + @staticmethod + def action_past(count): + return ungettext_lazy( + u"Deleted Job Binary", + u"Deleted Job Binaries", + count + ) + + def delete(self, request, obj_id): + jb = saharaclient.job_binary_get(request, obj_id) + (jb_type, jb_internal_id) = jb.url.split("://") + if jb_type == "internal-db": + try: + saharaclient.job_binary_internal_delete(request, + jb_internal_id) + except api_base.APIException: + # nothing to do for job-binary-internal if + # it does not exist. + pass + + saharaclient.job_binary_delete(request, obj_id) + + +class DownloadJobBinary(tables.LinkAction): + name = "download job binary" + verbose_name = _("Download Job Binary") + url = "horizon:project:data_processing.job_binaries:download" + classes = ("btn-edit",) + + +class EditJobBinary(tables.LinkAction): + name = "edit job binary" + verbose_name = _("Edit Job Binary") + url = "horizon:project:data_processing.job_binaries:edit-job-binary" + classes = ("btn-edit", "ajax-modal",) + + +class JobBinariesTable(tables.DataTable): + name = tables.Column( + "name", + verbose_name=_("Name"), + link="horizon:project:data_processing.job_binaries:details") + type = tables.Column("url", + verbose_name=_("Url")) + description = tables.Column("description", + verbose_name=_("Description")) + + class Meta(object): + name = "job_binaries" + verbose_name = _("Job Binaries") + table_actions = (CreateJobBinary, + DeleteJobBinary) + row_actions = (DeleteJobBinary, DownloadJobBinary, EditJobBinary) diff --git a/sahara_dashboard/content/data_processing/job_binaries/tabs.py b/sahara_dashboard/content/data_processing/job_binaries/tabs.py new file mode 100644 index 0000000..bc462b8 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/tabs.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import tabs + +from sahara_dashboard.api import sahara as saharaclient + +LOG = logging.getLogger(__name__) + + +class JobBinaryDetailsTab(tabs.Tab): + name = _("General Info") + slug = "job_binaries_details_tab" + template_name = ("project/data_processing.job_binaries/_details.html") + + def get_context_data(self, request): + job_binary_id = self.tab_group.kwargs['job_binary_id'] + try: + job_binary = saharaclient.job_binary_get(request, job_binary_id) + except Exception as e: + job_binary = {} + LOG.error("Unable to fetch job binary details: %s" % str(e)) + return {"job_binary": job_binary} + + +class JobBinaryDetailsTabs(tabs.TabGroup): + slug = "job_binary_details" + tabs = (JobBinaryDetailsTab,) + sticky = True diff --git a/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_create.html b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_create.html new file mode 100644 index 0000000..888c001 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_create.html @@ -0,0 +1,26 @@ +{% extends "horizon/common/_modal_form.html" %} + + +{% load i18n %} + +{% block form_id %}create-job-binary{% endblock %} +{% block form_action %}{{ submit_url }}{% endblock %} +{% block form_attrs %}enctype="multipart/form-data"{% endblock %} + +{% block modal-header %}{{ page_title }}{% endblock %} + +{% block modal-body %} +
+
+ {% include "horizon/common/_form_fields.html" %} +
+
+
+ {{ form.get_help_text }} +
+{% endblock %} + +{% block modal-footer %} + + {% trans "Cancel" %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_create_job_binary_help.html b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_create_job_binary_help.html new file mode 100644 index 0000000..fd2a877 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_create_job_binary_help.html @@ -0,0 +1,32 @@ +{% load i18n horizon %} +
+

+ {% blocktrans %}Important: The name that you give your job binary will be the name used in your job execution. + If your binary requires a particular name or extension (ie: ".jar"), be sure to include it here.{% endblocktrans %} +

+

+ {% blocktrans %}Select the storage type for your job binary.{% endblocktrans %} +

    +
  • {% blocktrans %}Data Processing internal database{% endblocktrans %}
  • +
  • {% blocktrans %}Swift{% endblocktrans %}
  • +
+

+

+ {% blocktrans %}For Data Processing internal job binaries, you may choose from the following:{% endblocktrans %} +

    +
  • {% blocktrans %}Choose an existing file{% endblocktrans %}
  • +
  • {% blocktrans %}Upload a new file{% endblocktrans %}
  • +
  • {% blocktrans %}Create a script to be uploaded dynamically{% endblocktrans %}
+ +

+

+ {% blocktrans %}For Object Store job binaries, you must:{% endblocktrans %} +

    +
  • {% blocktrans %}Enter the URL for the file{% endblocktrans %}
  • +
  • {% blocktrans %}Enter the username and password required to access that file{% endblocktrans %}
  • +
+

+

+ {% blocktrans %}You may also enter an optional description for your job binary.{% endblocktrans %} +

+
\ No newline at end of file diff --git a/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_details.html b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_details.html new file mode 100644 index 0000000..b0e0933 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_details.html @@ -0,0 +1,17 @@ +{% load i18n %} + +
+
+
{% trans "Name" %}
+
{{ job_binary.name }}
+
{% trans "ID" %}
+
{{ job_binary.id }}
+
{% trans "URL" %}
+
{{ job_binary.url }}
+
{% trans "Description" %}
+
{{ job_binary.description|default:_("None") }}
+
{% trans "Create time" %}
+
{{ job_binary.created_at|parse_isotime }}
+
+ {% trans "Download job binary" %} +
diff --git a/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/create.html b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/create.html new file mode 100644 index 0000000..f160212 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/create.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Create Job Binary" %}{% endblock %} + +{% block main %} + {% include 'project/data_processing.job_binaries/_create.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/job_binaries.html b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/job_binaries.html new file mode 100644 index 0000000..616d89c --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/job_binaries.html @@ -0,0 +1,19 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + + +
+ {{ job_binaries_table.render }} +
+ +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/job_binaries/tests.py b/sahara_dashboard/content/data_processing/job_binaries/tests.py new file mode 100644 index 0000000..b1752af --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/tests.py @@ -0,0 +1,125 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from django.core.urlresolvers import reverse +from django import http + +from mox3.mox import IsA # noqa +from openstack_dashboard.test import helpers as test +import six + +from sahara_dashboard import api + + +INDEX_URL = reverse('horizon:project:data_processing.job_binaries:index') +DETAILS_URL = reverse( + 'horizon:project:data_processing.job_binaries:details', args=['id']) +EDIT_URL = reverse('horizon:project:data_processing.job_binaries' + ':edit-job-binary', args=['id']) + + +class DataProcessingJobBinaryTests(test.TestCase): + @test.create_stubs({api.sahara: ('job_binary_list',)}) + def test_index(self): + api.sahara.job_binary_list(IsA(http.HttpRequest)) \ + .AndReturn(self.job_binaries.list()) + self.mox.ReplayAll() + res = self.client.get(INDEX_URL) + self.assertTemplateUsed( + res, 'project/data_processing.job_binaries/job_binaries.html') + self.assertContains(res, 'Job Binaries') + self.assertContains(res, 'Name') + self.assertContains(res, 'example.pig') + + @test.create_stubs({api.sahara: ('job_binary_get',)}) + def test_details(self): + api.sahara.job_binary_get(IsA(http.HttpRequest), IsA(six.text_type)) \ + .MultipleTimes().AndReturn(self.job_binaries.first()) + self.mox.ReplayAll() + res = self.client.get(DETAILS_URL) + self.assertTemplateUsed(res, 'horizon/common/_detail.html') + + @test.create_stubs({api.sahara: ('job_binary_list', + 'job_binary_get', + 'job_binary_internal_delete', + 'job_binary_delete',)}) + def test_delete(self): + jb_list = (api.sahara.job_binary_list(IsA(http.HttpRequest)) + .AndReturn(self.job_binaries.list())) + api.sahara.job_binary_get(IsA(http.HttpRequest), IsA(six.text_type)) \ + .AndReturn(self.job_binaries.list()[0]) + api.sahara.job_binary_delete(IsA(http.HttpRequest), jb_list[0].id) + int_id = jb_list[0].url.split("//")[1] + api.sahara.job_binary_internal_delete(IsA(http.HttpRequest), int_id) + self.mox.ReplayAll() + form_data = {"action": "job_binaries__delete__%s" % jb_list[0].id} + res = self.client.post(INDEX_URL, form_data) + self.assertRedirectsNoFollow(res, INDEX_URL) + + @test.create_stubs({api.sahara: ('job_binary_get', + 'job_binary_get_file')}) + def test_download(self): + jb = api.sahara.job_binary_get(IsA(http.HttpRequest), IsA(six.text_type)) \ + .AndReturn(self.job_binaries.list()[0]) + api.sahara.job_binary_get_file(IsA(http.HttpRequest), jb.id) \ + .AndReturn("TEST FILE CONTENT") + self.mox.ReplayAll() + + context = {'job_binary_id': jb.id} + url = reverse('horizon:project:data_processing.job_binaries:download', + kwargs={'job_binary_id': jb.id}) + res = self.client.get(url, context) + self.assertTrue(res.has_header('content-disposition')) + + @test.create_stubs({api.sahara: ('job_binary_get', + 'job_binary_get_file')}) + def test_download_with_spaces(self): + jb = api.sahara.job_binary_get(IsA(http.HttpRequest), IsA(six.text_type)) \ + .AndReturn(self.job_binaries.list()[1]) + api.sahara.job_binary_get_file(IsA(http.HttpRequest), jb.id) \ + .AndReturn("MORE TEST FILE CONTENT") + self.mox.ReplayAll() + + context = {'job_binary_id': jb.id} + url = reverse('horizon:project:data_processing.job_binaries:download', + kwargs={'job_binary_id': jb.id}) + res = self.client.get(url, context) + self.assertEqual( + res.get('Content-Disposition'), + 'attachment; filename="%s"' % jb.name + ) + + @test.create_stubs({api.sahara: ('job_binary_get', + 'job_binary_update')}) + def test_update(self): + jb = api.sahara.job_binary_get(IsA(http.HttpRequest), IsA(six.text_type)) \ + .AndReturn(self.job_binaries.first()) + api.sahara.job_binary_update(IsA(http.HttpRequest), + IsA(str), + IsA(dict)) \ + .AndReturn(self.job_binaries.first()) + self.mox.ReplayAll() + + form_data = { + 'job_binary_url': jb.url, + 'job_binary_name': jb.name, + 'job_binary_description': jb.description, + 'job_binary_type': "internal-db", + 'job_binary_internal': "", + 'job_binary_file': "", + 'job_binary_password': "", + 'job_binary_username': "", + 'job_binary_script': "", + 'job_binary_script_name': "" + } + res = self.client.post(EDIT_URL, form_data) + self.assertNoFormErrors(res) diff --git a/sahara_dashboard/content/data_processing/job_binaries/urls.py b/sahara_dashboard/content/data_processing/job_binaries/urls.py new file mode 100644 index 0000000..d3c99f1 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/urls.py @@ -0,0 +1,38 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from django.conf.urls import patterns +from django.conf.urls import url + +import sahara_dashboard.content.data_processing. \ + job_binaries.views as views + + +urlpatterns = patterns('', + url(r'^$', views.JobBinariesView.as_view(), + name='index'), + url(r'^$', views.JobBinariesView.as_view(), + name='job-binaries'), + url(r'^create-job-binary$', + views.CreateJobBinaryView.as_view(), + name='create-job-binary'), + url(r'^(?P[^/]+)$', + views.JobBinaryDetailsView.as_view(), + name='details'), + url(r'^(?P[^/]+)/edit$', + views.EditJobBinaryView.as_view(), + name='edit-job-binary'), + url(r'^(?P[^/]+)/download/$', + views.DownloadJobBinaryView.as_view(), + name='download')) diff --git a/sahara_dashboard/content/data_processing/job_binaries/views.py b/sahara_dashboard/content/data_processing/job_binaries/views.py new file mode 100644 index 0000000..57ccbf6 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_binaries/views.py @@ -0,0 +1,146 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.core.urlresolvers import reverse +from django.core.urlresolvers import reverse_lazy +from django import http +from django.utils.translation import ugettext_lazy as _ +import django.views + +from horizon import exceptions +from horizon import forms +from horizon import tables +from horizon import tabs +from horizon.utils import memoized +from horizon.utils.urlresolvers import reverse # noqa + +from sahara_dashboard.api import sahara as saharaclient + +from sahara_dashboard.content.data_processing.utils \ + import helpers +import sahara_dashboard.content.data_processing. \ + job_binaries.forms as job_binary_forms +from sahara_dashboard.content.data_processing.job_binaries \ + import tables as jb_tables +import sahara_dashboard.content.data_processing. \ + job_binaries.tabs as _tabs + + +LOG = logging.getLogger(__name__) + + +class JobBinariesView(tables.DataTableView): + table_class = jb_tables.JobBinariesTable + template_name = 'project/data_processing.job_binaries/job_binaries.html' + page_title = _("Job Binaries") + + def get_data(self): + try: + job_binaries = saharaclient.job_binary_list(self.request) + except Exception: + job_binaries = [] + exceptions.handle(self.request, + _("Unable to fetch job binary list.")) + return job_binaries + + +class CreateJobBinaryView(forms.ModalFormView): + form_class = job_binary_forms.JobBinaryCreateForm + success_url = reverse_lazy( + 'horizon:project:data_processing.job_binaries:index') + classes = ("ajax-modal",) + template_name = "project/data_processing.job_binaries/create.html" + page_title = _("Create Job Binary") + submit_url = ('horizon:project:data_processing.' + 'job_binaries:create-job-binary') + submit_label = _("Create") + + def get_success_url(self): + hlps = helpers.Helpers(self.request) + if hlps.is_from_guide(): + self.success_url = reverse_lazy( + "horizon:project:data_processing.wizard:jobex_guide") + return self.success_url + + def get_context_data(self, **kwargs): + context = super(CreateJobBinaryView, self).get_context_data(**kwargs) + context['submit_url'] = reverse(self.submit_url, kwargs=self.kwargs) + return context + + +class EditJobBinaryView(CreateJobBinaryView): + form_class = job_binary_forms.JobBinaryEditForm + page_title = _("Edit Job Binary") + submit_url = ('horizon:project:data_processing.' + 'job_binaries:edit-job-binary') + submit_label = _("Update") + + @memoized.memoized_method + def get_object(self): + jb_id = self.kwargs["job_binary_id"] + try: + return saharaclient.job_binary_get(self.request, jb_id) + except Exception: + msg = _('Unable to retrieve job binary "%s".') % jb_id + redirect = reverse( + "horizon:project:data_processing.job_binaries:job-binaries") + exceptions.handle(self.request, msg, redirect=redirect) + + def get_initial(self): + initial = super(EditJobBinaryView, self).get_initial() + initial['job_binary_id'] = self.kwargs['job_binary_id'] + initial['job_binary'] = self.get_object() + return initial + + +class JobBinaryDetailsView(tabs.TabView): + tab_group_class = _tabs.JobBinaryDetailsTabs + template_name = 'horizon/common/_detail.html' + page_title = "{{ job_binary.name|default:job_binary.id }}" + + @memoized.memoized_method + def get_object(self): + jb_id = self.kwargs["job_binary_id"] + try: + return saharaclient.job_binary_get(self.request, jb_id) + except Exception: + msg = _('Unable to retrieve details for job binary "%s".') % jb_id + redirect = reverse( + "horizon:project:data_processing.job_binaries:job-binaries") + exceptions.handle(self.request, msg, redirect=redirect) + + def get_context_data(self, **kwargs): + context = super(JobBinaryDetailsView, self).get_context_data(**kwargs) + context['job_binary'] = self.get_object() + return context + + +class DownloadJobBinaryView(django.views.generic.View): + def get(self, request, job_binary_id=None): + try: + jb = saharaclient.job_binary_get(request, job_binary_id) + data = saharaclient.job_binary_get_file(request, job_binary_id) + except Exception: + redirect = reverse( + 'horizon:project:data_processing.job_binaries:index') + exceptions.handle(self.request, + _('Unable to fetch job binary: %(exc)s'), + redirect=redirect) + response = http.HttpResponse(content_type='application/binary') + response['Content-Disposition'] = ( + 'attachment; filename="%s"' % jb.name) + response.write(data) + response['Content-Length'] = str(len(data)) + return response diff --git a/sahara_dashboard/content/data_processing/job_executions/__init__.py b/sahara_dashboard/content/data_processing/job_executions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/job_executions/panel.py b/sahara_dashboard/content/data_processing/job_executions/panel.py new file mode 100644 index 0000000..6e1755e --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_executions/panel.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +import horizon + +from openstack_dashboard.dashboards.project import dashboard + + +class JobExecutionsPanel(horizon.Panel): + name = _("Jobs") + slug = 'data_processing.job_executions' + permissions = (('openstack.services.data-processing', + 'openstack.services.data_processing'),) + + +dashboard.Project.register(JobExecutionsPanel) diff --git a/sahara_dashboard/content/data_processing/job_executions/tables.py b/sahara_dashboard/content/data_processing/job_executions/tables.py new file mode 100644 index 0000000..8372583 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_executions/tables.py @@ -0,0 +1,220 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.core.urlresolvers import reverse +from django.http import Http404 # noqa +from django.utils import http +from django.utils.translation import pgettext_lazy +from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import ungettext_lazy + +from saharaclient.api import base as api_base + +from horizon import messages +from horizon import tables + +from sahara_dashboard.api import sahara as saharaclient +from sahara_dashboard.content.data_processing. \ + jobs import tables as j_t + +LOG = logging.getLogger(__name__) + + +class JobExecutionsFilterAction(tables.FilterAction): + filter_type = "server" + filter_choices = (('id', _("ID"), True), + ('job', _("Job"), True), + ('cluster', _("Cluster"), True), + ('status', _("Status"), True)) + + +class JobExecutionGuide(tables.LinkAction): + name = "jobex_guide" + verbose_name = _("Job Guide") + url = "horizon:project:data_processing.wizard:jobex_guide" + + +class DeleteJobExecution(tables.DeleteAction): + @staticmethod + def action_present(count): + return ungettext_lazy( + u"Delete Job", + u"Delete Jobs", + count + ) + + @staticmethod + def action_past(count): + return ungettext_lazy( + u"Deleted Job", + u"Deleted Jobs", + count + ) + + def delete(self, request, obj_id): + saharaclient.job_execution_delete(request, obj_id) + + +class ReLaunchJobExistingCluster(j_t.ChoosePlugin): + @staticmethod + def action_present(count): + return ungettext_lazy( + u"Launch Job", + u"Launch Jobs", + count + ) + + @staticmethod + def action_past(count): + return ungettext_lazy( + u"Launched Job", + u"Launched Jobs", + count + ) + + name = "relaunch-job-existing" + verbose_name = _("Relaunch On Existing Cluster") + url = "horizon:project:data_processing.jobs:launch-job" + classes = ('ajax-modal', 'btn-launch') + + def get_link_url(self, datum): + base_url = reverse(self.url) + params = http.urlencode({'job_id': datum.job_id, + 'job_execution_id': datum.id}) + return "?".join([base_url, params]) + + +class ReLaunchJobNewCluster(ReLaunchJobExistingCluster): + @staticmethod + def action_present(count): + return ungettext_lazy( + u"Launch Job", + u"Launch Jobs", + count + ) + + @staticmethod + def action_past(count): + return ungettext_lazy( + u"Launched Job", + u"Launched Jobs", + count + ) + + name = "relaunch-job-new" + verbose_name = _("Relaunch On New Cluster") + url = "horizon:project:data_processing.jobs:choose-plugin" + classes = ('ajax-modal', 'btn-launch') + + +class UpdateRow(tables.Row): + ajax = True + + def get_data(self, request, job_execution_id): + try: + return saharaclient.job_execution_get(request, job_execution_id) + except api_base.APIException as e: + if e.error_code == 404: + raise Http404 + else: + messages.error(request, _("Unable to update row")) + + +class JobExecutionsTable(tables.DataTable): + class StatusColumn(tables.Column): + def get_raw_data(self, datum): + return datum.info['status'] + + class JobNameColumn(tables.Column): + @staticmethod + def link(job_execution): + if job_execution.job_name: + return reverse("horizon:project:data_processing.jobs:details", + args=(http.urlquote(job_execution.job_id),)) + else: + # No link should be generated for a deleted Job. + return None + + def get_data(self, job_execution): + return job_execution.job_name or _("Not available") + + class ClusterNameColumn(tables.Column): + + @staticmethod + def link(job_execution): + if job_execution.cluster_name: + return reverse( + "horizon:project:data_processing.clusters:details", + args=(http.urlquote(job_execution.cluster_id),)) + else: + # No link should be generated for a deleted Cluster. + return None + + def get_data(self, job_execution): + return job_execution.cluster_name or _("Not available") + + STATUS_CHOICES = ( + ("DONEWITHERROR", False), + ("FAILED", False), + ("KILLED", False), + ("SUCCEEDED", True), + ) + STATUS_DISPLAY_CHOICES = ( + ("DONEWITHERROR", pgettext_lazy("Current status of a Job", + u"Done with Error")), + ("FAILED", pgettext_lazy("Current status of a Job", + u"Failed")), + ("KILLED", pgettext_lazy("Current status of a Job", + u"Killed")), + ("SUCCEEDED", pgettext_lazy("Current status of a Job", + u"Succeeded")), + ) + + name = tables.Column("id", + verbose_name=_("ID"), + display_choices=(("id", "ID"), + ("name", pgettext_lazy("Name")),), + link=("horizon:project:data_processing." + "job_executions:details")) + job_name = JobNameColumn( + "job_name", + verbose_name=_("Job Template"), + link=JobNameColumn.link) + + cluster_name = ClusterNameColumn( + "cluster_name", + verbose_name=_("Cluster"), + link=ClusterNameColumn.link) + + status = StatusColumn("info", + status=True, + status_choices=STATUS_CHOICES, + display_choices=STATUS_DISPLAY_CHOICES, + verbose_name=_("Status")) + + def get_object_display(self, datum): + return datum.id + + class Meta(object): + name = "job_executions" + row_class = UpdateRow + status_columns = ["status"] + verbose_name = _("Jobs") + table_actions = [JobExecutionGuide, + DeleteJobExecution, + JobExecutionsFilterAction] + row_actions = [DeleteJobExecution, + ReLaunchJobExistingCluster, + ReLaunchJobNewCluster] diff --git a/sahara_dashboard/content/data_processing/job_executions/tabs.py b/sahara_dashboard/content/data_processing/job_executions/tabs.py new file mode 100644 index 0000000..8051ab5 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_executions/tabs.py @@ -0,0 +1,81 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import tabs + +from sahara_dashboard.api import sahara as saharaclient + +LOG = logging.getLogger(__name__) + + +class GeneralTab(tabs.Tab): + name = _("General Info") + slug = "job_execution_tab" + template_name = ("project/data_processing.job_executions/_details.html") + + def get_context_data(self, request): + jex_id = self.tab_group.kwargs['job_execution_id'] + try: + job_execution = saharaclient.job_execution_get(request, jex_id) + except Exception as e: + job_execution = {} + LOG.error("Unable to fetch job details: %s" % str(e)) + return {"job_execution": job_execution} + object_names = self.get_object_names(job_execution, + request) + object_names['input_url'] = job_execution.data_source_urls.get( + job_execution.input_id) + object_names['output_url'] = job_execution.data_source_urls.get( + job_execution.output_id) + + return {"job_execution": job_execution, + "object_names": object_names} + + def get_object_names(self, job_ex, request): + object_names = {} + obj_names_map = {'input_name': {'obj': 'data_source_get', + 'obj_id': job_ex.input_id}, + 'output_name': {'obj': 'data_source_get', + 'obj_id': job_ex.output_id}, + 'cluster_name': {'obj': 'cluster_get', + 'obj_id': job_ex.cluster_id}, + 'job_name': {'obj': 'job_get', + 'obj_id': job_ex.job_id}} + for item in obj_names_map: + object_names[item] = ( + self.get_object_name(obj_names_map[item]['obj_id'], + obj_names_map[item]['obj'], + request)) + + return object_names + + def get_object_name(self, obj_id, sahara_obj, request): + object_name = None + try: + s_func = getattr(saharaclient, sahara_obj) + obj = s_func(request, obj_id) + object_name = obj.name + except Exception as e: + LOG.warn("Unable to get name for %s with object_id %s (%s)" % + (sahara_obj, obj_id, str(e))) + return object_name + + +class JobExecutionDetailsTabs(tabs.TabGroup): + slug = "job_execution_details" + tabs = (GeneralTab,) + sticky = True diff --git a/sahara_dashboard/content/data_processing/job_executions/templates/data_processing.job_executions/_details.html b/sahara_dashboard/content/data_processing/job_executions/templates/data_processing.job_executions/_details.html new file mode 100644 index 0000000..82bcf63 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_executions/templates/data_processing.job_executions/_details.html @@ -0,0 +1,45 @@ +{% load i18n sizeformat %} + +
+
+
{% trans "Status" %}
+
{{ job_execution.info.status }}
+
{% trans "ID" %}
+
{{ job_execution.id }}
+
{% trans "Job Template" %}
+
{{ object_names.job_name }}
+ {% if job_execution.input_id %} +
{% trans "Input Data Source" %}
+
{{ object_names.input_name }} ({{ object_names.input_url }})
+ {% endif %} + {% if job_execution.output_id %} +
{% trans "Output Data Source" %}
+
{{ object_names.output_name }} ({{ object_names.output_url }})
+ {% endif %} +
{% trans "Cluster" %}
+
{{ object_names.cluster_name }}
+
{% trans "Last Updated" %}
+
{{ job_execution.updated_at }}
+
{% trans "Started" context "Start time" %}
+
{{ job_execution.start_time }}
+
{% trans "Ended" context "End time" %}
+
{{ job_execution.end_time }}
+
{% trans "Return Code" %}
+
{{ job_execution.return_code }}
+
{% trans "Oozie Job ID" %}
+
{{ job_execution.oozie_job_id }}
+
{% trans "Created" context "Created time" %}
+
{{ job_execution.created_at }}
+
{% trans "Job Configuration" %}
+
{% for group, vals in job_execution.job_configs.iteritems %} +
  • {% blocktrans %}{{ group }}:{% endblocktrans %} + {% if group == "args" %} +
      {% for val in vals %}
    • {{ val }}
    • {% endfor %}
    + {% else %} +
      {% for key, val in vals.iteritems %}
    • {{ key }} = {{ val }}
    • {% endfor %}
    + {% endif %} +
+ {% endfor %} +
+
+
diff --git a/sahara_dashboard/content/data_processing/job_executions/templates/data_processing.job_executions/job_executions.html b/sahara_dashboard/content/data_processing/job_executions/templates/data_processing.job_executions/job_executions.html new file mode 100644 index 0000000..436624c --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_executions/templates/data_processing.job_executions/job_executions.html @@ -0,0 +1,63 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + +
+ {{ job_executions_table.render }} +
+ + + +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/job_executions/tests.py b/sahara_dashboard/content/data_processing/job_executions/tests.py new file mode 100644 index 0000000..ae43936 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_executions/tests.py @@ -0,0 +1,68 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from django.core.urlresolvers import reverse +from django import http + +from mox3.mox import IsA # noqa +from openstack_dashboard.test import helpers as test +import six + +from sahara_dashboard import api + + +INDEX_URL = reverse('horizon:project:data_processing.job_executions:index') +DETAILS_URL = reverse( + 'horizon:project:data_processing.job_executions:details', args=['id']) + + +class DataProcessingJobExecutionTests(test.TestCase): + @test.create_stubs({api.sahara: ('job_execution_list',)}) + def test_index(self): + api.sahara.job_execution_list(IsA(http.HttpRequest), {}) \ + .AndReturn(self.job_executions.list()) + self.mox.ReplayAll() + res = self.client.get(INDEX_URL) + self.assertEqual( + "cluster-1", + res.context_data["job_executions_table"].data[0].cluster_name) + self.assertEqual( + "job-1", + res.context_data["job_executions_table"].data[0].job_name) + self.assertTemplateUsed( + res, 'project/data_processing.job_executions/job_executions.html') + self.assertContains(res, 'Jobs') + + @test.create_stubs({api.sahara: ('job_execution_get',)}) + def test_details(self): + api.sahara.job_execution_get(IsA(http.HttpRequest), IsA(six.text_type)) \ + .MultipleTimes().AndReturn(self.job_executions.first()) + self.mox.ReplayAll() + res = self.client.get(DETAILS_URL) + self.assertTemplateUsed(res, 'horizon/common/_detail.html') + self.assertContains(res, 'RUNNING') + + @test.create_stubs({api.sahara: ('job_execution_list', + 'job_execution_delete')}) + def test_delete(self): + job_exec = self.job_executions.first() + api.sahara.job_execution_list(IsA(http.HttpRequest), {}) \ + .AndReturn(self.job_executions.list()) + api.sahara.job_execution_delete(IsA(http.HttpRequest), job_exec.id) + self.mox.ReplayAll() + + form_data = {'action': 'job_executions__delete__%s' % job_exec.id} + res = self.client.post(INDEX_URL, form_data) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) diff --git a/sahara_dashboard/content/data_processing/job_executions/urls.py b/sahara_dashboard/content/data_processing/job_executions/urls.py new file mode 100644 index 0000000..0feffe0 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_executions/urls.py @@ -0,0 +1,35 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from django.conf.urls import patterns +from django.conf.urls import url + +import sahara_dashboard.content.data_processing. \ + job_executions.views as views +from sahara_dashboard.content.data_processing. \ + jobs import views as job_views + + +urlpatterns = patterns('', + url(r'^$', views.JobExecutionsView.as_view(), + name='index'), + url(r'^$', views.JobExecutionsView.as_view(), + name='job-executions'), + url(r'^launch-job$', + job_views.LaunchJobView.as_view()), + url(r'^launch-job-new-cluster$', + job_views.LaunchJobNewClusterView.as_view()), + url(r'^(?P[^/]+)$', + views.JobExecutionDetailsView.as_view(), + name='details')) diff --git a/sahara_dashboard/content/data_processing/job_executions/views.py b/sahara_dashboard/content/data_processing/job_executions/views.py new file mode 100644 index 0000000..671abb1 --- /dev/null +++ b/sahara_dashboard/content/data_processing/job_executions/views.py @@ -0,0 +1,83 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import tables +from horizon import tabs +from horizon.utils import memoized +from horizon.utils.urlresolvers import reverse # noqa + +from sahara_dashboard.api import sahara as saharaclient + +from sahara_dashboard.content.data_processing.job_executions \ + import tables as je_tables +import sahara_dashboard.content.data_processing. \ + job_executions.tabs as _tabs + +LOG = logging.getLogger(__name__) + + +class JobExecutionsView(tables.DataTableView): + SEARCH_MAPPING = {"cluster": "cluster.name", + "job": "job.name"} + + table_class = je_tables.JobExecutionsTable + template_name = ( + 'project/data_processing.job_executions/job_executions.html') + page_title = _("Jobs") + + def get_data(self): + try: + search_opts = {} + filter = self.get_server_filter_info(self.request) + if filter['value'] and filter['field']: + if filter['field'] in self.SEARCH_MAPPING: + # Handle special cases for cluster and job + # since they are in different database tables. + search_opts = { + self.SEARCH_MAPPING[filter['field']]: filter['value']} + else: + search_opts = {filter['field']: filter['value']} + jobs = saharaclient.job_execution_list(self.request, search_opts) + except Exception: + jobs = [] + exceptions.handle(self.request, + _("Unable to fetch job executions.")) + return jobs + + +class JobExecutionDetailsView(tabs.TabView): + tab_group_class = _tabs.JobExecutionDetailsTabs + template_name = 'horizon/common/_detail.html' + page_title = "{{ job_execution.name|default:job_execution.id }}" + + @memoized.memoized_method + def get_object(self): + jex_id = self.kwargs["job_execution_id"] + try: + return saharaclient.job_execution_get(self.request, jex_id) + except Exception: + msg = _('Unable to retrieve details for job "%s".') % jex_id + redirect = reverse("horizon:project:data_processing." + "job_executions:job-executions") + exceptions.handle(self.request, msg, redirect=redirect) + + def get_context_data(self, **kwargs): + context = super(JobExecutionDetailsView, self)\ + .get_context_data(**kwargs) + context['job_execution'] = self.get_object() + return context diff --git a/sahara_dashboard/content/data_processing/jobs/__init__.py b/sahara_dashboard/content/data_processing/jobs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/jobs/panel.py b/sahara_dashboard/content/data_processing/jobs/panel.py new file mode 100644 index 0000000..87085fa --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/panel.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +import horizon + +from openstack_dashboard.dashboards.project import dashboard + + +class JobsPanel(horizon.Panel): + name = _("Job Templates") + slug = 'data_processing.jobs' + permissions = (('openstack.services.data-processing', + 'openstack.services.data_processing'),) + + +dashboard.Project.register(JobsPanel) diff --git a/sahara_dashboard/content/data_processing/jobs/tables.py b/sahara_dashboard/content/data_processing/jobs/tables.py new file mode 100644 index 0000000..9468748 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/tables.py @@ -0,0 +1,116 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.core import urlresolvers +from django.utils import http +from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import ungettext_lazy + +from horizon import tables + +from sahara_dashboard.api import sahara as saharaclient + +LOG = logging.getLogger(__name__) + + +class JobsFilterAction(tables.FilterAction): + filter_type = "server" + filter_choices = (('name', _("Name"), True), + ('type', _("Type"), True), + ('description', _("Description"), True)) + + +class CreateJob(tables.LinkAction): + name = "create job" + verbose_name = _("Create Job Template") + url = "horizon:project:data_processing.jobs:create-job" + classes = ("ajax-modal", "create_job_class") + icon = "plus" + + +class DeleteJob(tables.DeleteAction): + @staticmethod + def action_present(count): + return ungettext_lazy( + u"Delete Job Template", + u"Delete Job Templates", + count + ) + + @staticmethod + def action_past(count): + return ungettext_lazy( + u"Deleted Job Template", + u"Deleted Jobs Templates", + count + ) + + def delete(self, request, obj_id): + saharaclient.job_delete(request, obj_id) + + +class LaunchJobExistingCluster(tables.LinkAction): + name = "launch-job-existing" + verbose_name = _("Launch On Existing Cluster") + url = "horizon:project:data_processing.jobs:launch-job" + classes = ('ajax-modal', 'btn-launch') + + def get_link_url(self, datum): + base_url = urlresolvers.reverse(self.url) + + params = http.urlencode({"job_id": datum.id}) + return "?".join([base_url, params]) + + +class LaunchJobNewCluster(tables.LinkAction): + name = "launch-job-new" + verbose_name = _("Launch On New Cluster") + url = "horizon:project:data_processing.jobs:launch-job-new-cluster" + classes = ('ajax-modal', 'btn-launch') + + def get_link_url(self, datum): + base_url = urlresolvers.reverse(self.url) + + params = http.urlencode({"job_id": datum.id}) + return "?".join([base_url, params]) + + +class ChoosePlugin(tables.LinkAction): + name = "launch-job-new" + verbose_name = _("Launch On New Cluster") + url = "horizon:project:data_processing.jobs:choose-plugin" + classes = ('ajax-modal', 'btn-launch') + + def get_link_url(self, datum): + base_url = urlresolvers.reverse(self.url) + + params = http.urlencode({"job_id": datum.id}) + return "?".join([base_url, params]) + + +class JobsTable(tables.DataTable): + name = tables.Column("name", + verbose_name=_("Name"), + link="horizon:project:data_processing.jobs:details") + type = tables.Column("type", + verbose_name=_("Type")) + description = tables.Column("description", + verbose_name=_("Description")) + + class Meta(object): + name = "jobs" + verbose_name = _("Job Templates") + table_actions = (CreateJob, DeleteJob, JobsFilterAction,) + row_actions = (LaunchJobExistingCluster, ChoosePlugin, DeleteJob,) diff --git a/sahara_dashboard/content/data_processing/jobs/tabs.py b/sahara_dashboard/content/data_processing/jobs/tabs.py new file mode 100644 index 0000000..25fe139 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/tabs.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import tabs + +from sahara_dashboard.api import sahara as saharaclient + +LOG = logging.getLogger(__name__) + + +class GeneralTab(tabs.Tab): + name = _("General Info") + slug = "job_details_tab" + template_name = ("project/data_processing.jobs/_details.html") + + def get_context_data(self, request): + job_id = self.tab_group.kwargs['job_id'] + try: + job = saharaclient.job_get(request, job_id) + except Exception as e: + job = {} + LOG.error("Unable to fetch job template details: %s" % str(e)) + return {"job": job} + + +class JobDetailsTabs(tabs.TabGroup): + slug = "job_details" + tabs = (GeneralTab,) + sticky = True diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_create_job_help.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_create_job_help.html new file mode 100644 index 0000000..a9cead2 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_create_job_help.html @@ -0,0 +1,31 @@ +{% load i18n horizon %} +
+

+ {% blocktrans %}Create a job template with a specified name.{% endblocktrans %} +

+

+ {% blocktrans %}Select the type of your job:{% endblocktrans %} +

    +
  • {% blocktrans %}Pig{% endblocktrans %}
  • +
  • {% blocktrans %}Hive{% endblocktrans %}
  • +
  • {% blocktrans %}Spark{% endblocktrans %}
  • +
  • {% blocktrans %}Storm{% endblocktrans %}
  • +
  • {% blocktrans %}MapReduce{% endblocktrans %}
  • +
  • {% blocktrans %}Java Action{% endblocktrans %}
  • +
  • {% blocktrans %}Shell Action{% endblocktrans %}
  • +
+

+

+ {% blocktrans %}Choose or create your main binary. Additional libraries can be added from the "Libs" tab.{% endblocktrans %} +

+

+ {% blocktrans %}For Spark and Shell jobs, only a main is required, "libs" are optional.{% endblocktrans %} +

+

+ {% blocktrans %}For MapReduce or Java Action jobs, "mains" are not applicable. You are required to add one + or more "libs" for these jobs.{% endblocktrans %} +

+

+ {% blocktrans %}You may also enter an optional description for your job template.{% endblocktrans %} +

+
diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_create_job_libs_help.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_create_job_libs_help.html new file mode 100644 index 0000000..97b0f93 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_create_job_libs_help.html @@ -0,0 +1,12 @@ +{% load i18n horizon %} +
+

+ {% blocktrans %}Add libraries to your job template.{% endblocktrans %} +

+

+ {% blocktrans %}Choose from the list of binaries and click "choose" to add the library to your job template. This can be repeated for additional libraries.{% endblocktrans %} +

+

+ {% blocktrans %}For Shell Action jobs, any required files beyond the main script may be added as "libraries".{% endblocktrans %} +

+
\ No newline at end of file diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_details.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_details.html new file mode 100644 index 0000000..723e649 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_details.html @@ -0,0 +1,30 @@ +{% load i18n sizeformat %} + +
+
+
{% trans "Name" %}
+
{{ job.name }}
+
{% trans "ID" %}
+
{{ job.id }}
+
{% trans "Type" %}
+
{{ job.type }}
+
{% trans "Description" %}
+
{{ job.description|default:_("None") }}
+
{% trans "Mains" %}
+ {% for main in job.mains %} +
{{ main.name }}
+ {% empty %} +
{% trans "None" %}
+ {% endfor %} +
{% trans "Libs" %}
+ {% for lib in job.libs %} +
{{ lib.name }}
+ {% empty %} +
{% trans "None" %}
+ {% endfor %} +
{% trans "Created time" %}
+
{{ job.created_at|parse_isotime }}
+
{% trans "Updated time" %}
+
{{ job.updated_at|parse_isotime|default:_("Never") }}
+
+
diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_launch_job_configure_help.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_launch_job_configure_help.html new file mode 100644 index 0000000..2eb3aea --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_launch_job_configure_help.html @@ -0,0 +1,6 @@ +{% load i18n horizon %} +
+

+ {% blocktrans %}Enter any custom configuration required for your job's execution.{% endblocktrans %} +

+
\ No newline at end of file diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_launch_job_help.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_launch_job_help.html new file mode 100644 index 0000000..6b0820c --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_launch_job_help.html @@ -0,0 +1,15 @@ +{% load i18n horizon %} +
+

+ {% blocktrans %}Launch the given job template on a cluster.{% endblocktrans %} +

+

+ {% blocktrans %}Choose the cluster to use for the job.{% endblocktrans %} +

+

+ {% blocktrans %}Choose the Input Data Source (n/a for Java and Shell jobs).{% endblocktrans %} +

+

+ {% blocktrans %}Choose the Output Data Source (n/a for Java and Shell jobs).{% endblocktrans %} +

+
\ No newline at end of file diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/config_template.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/config_template.html new file mode 100644 index 0000000..f4fd6dd --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/config_template.html @@ -0,0 +1,244 @@ +{% load i18n %} + + + + + + + +{% include "horizon/common/_form_fields.html" %} + + + + + + + + +
+
+ +
+
+ +
+
+ +
+
diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/create.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/create.html new file mode 100644 index 0000000..bc29905 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/create.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Create Job Template" %}{% endblock %} + +{% block main %} + {% include 'horizon/common/_workflow.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/job_interface_arguments_template.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/job_interface_arguments_template.html new file mode 100644 index 0000000..db141d6 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/job_interface_arguments_template.html @@ -0,0 +1,43 @@ +{% load i18n %} + + + + + + + + + + +
+
+
+
+ diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/jobs.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/jobs.html new file mode 100644 index 0000000..39ed81f --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/jobs.html @@ -0,0 +1,81 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + +
+ {{ jobs_table.render }} +
+ + + +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/launch.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/launch.html new file mode 100644 index 0000000..0f4aa19 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/launch.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Launch Job" %}{% endblock %} + +{% block main %} + {% include 'horizon/common/_workflow.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/library_template.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/library_template.html new file mode 100644 index 0000000..a3a6b3c --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/library_template.html @@ -0,0 +1,104 @@ +{% load i18n %} + + +
+ {% include "horizon/common/_form_fields.html" %} + +
+
+ {{ step.get_help_text }} +
+ +
+
+ + + + + + + + +
+
diff --git a/sahara_dashboard/content/data_processing/jobs/tests.py b/sahara_dashboard/content/data_processing/jobs/tests.py new file mode 100644 index 0000000..3855ea2 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/tests.py @@ -0,0 +1,200 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from django.core.urlresolvers import reverse +from django import http + +from mox3.mox import IsA # noqa +from openstack_dashboard.test import helpers as test +import six + +from sahara_dashboard import api + + +INDEX_URL = reverse('horizon:project:data_processing.jobs:index') +DETAILS_URL = reverse( + 'horizon:project:data_processing.jobs:details', args=['id']) + + +class DataProcessingJobTests(test.TestCase): + @test.create_stubs({api.sahara: ('job_list',)}) + def test_index(self): + api.sahara.job_list(IsA(http.HttpRequest), {}) \ + .AndReturn(self.jobs.list()) + self.mox.ReplayAll() + res = self.client.get(INDEX_URL) + self.assertTemplateUsed(res, + 'project/data_processing.jobs/jobs.html') + self.assertContains(res, 'Job Templates') + self.assertContains(res, 'Name') + + @test.create_stubs({api.sahara: ('job_get',)}) + def test_details(self): + api.sahara.job_get(IsA(http.HttpRequest), IsA(six.text_type)) \ + .MultipleTimes().AndReturn(self.jobs.first()) + self.mox.ReplayAll() + res = self.client.get(DETAILS_URL) + self.assertTemplateUsed(res, 'horizon/common/_detail.html') + self.assertContains(res, 'pigjob') + + @test.create_stubs({api.sahara: ('job_binary_list', + 'job_create', + 'job_types_list')}) + def test_create(self): + api.sahara.job_binary_list(IsA(http.HttpRequest)).AndReturn([]) + api.sahara.job_binary_list(IsA(http.HttpRequest)).AndReturn([]) + api.sahara.job_create(IsA(http.HttpRequest), + 'test', 'Pig', [], [], 'test create', + interface=[]) + api.sahara.job_types_list(IsA(http.HttpRequest)) \ + .AndReturn(self.job_types.list()) + self.mox.ReplayAll() + form_data = {'job_name': 'test', + 'job_type': 'pig', + 'lib_binaries': [], + 'lib_ids': '[]', + 'job_description': 'test create', + 'hidden_arguments_field': [], + 'argument_ids': '[]'} + url = reverse('horizon:project:data_processing.jobs:create-job') + res = self.client.post(url, form_data) + + self.assertNoFormErrors(res) + + @test.create_stubs({api.sahara: ('job_binary_list', + 'job_create', + 'job_types_list')}) + def test_create_with_interface(self): + api.sahara.job_binary_list(IsA(http.HttpRequest)).AndReturn([]) + api.sahara.job_binary_list(IsA(http.HttpRequest)).AndReturn([]) + api.sahara.job_create(IsA(http.HttpRequest), + 'test_interface', 'Pig', [], [], 'test create', + interface=[ + { + "name": "argument", + "description": None, + "mapping_type": "args", + "location": "0", + "value_type": "number", + "required": True, + "default": None + }, + { + "name": "config", + "description": "Really great config", + "mapping_type": "configs", + "location": "edp.important.config", + "value_type": "string", + "required": False, + "default": "A value" + }]) + api.sahara.job_types_list(IsA(http.HttpRequest)) \ + .AndReturn(self.job_types.list()) + self.mox.ReplayAll() + form_data = {'job_name': 'test_interface', + 'job_type': 'pig', + 'lib_binaries': [], + 'lib_ids': '[]', + 'job_description': 'test create', + 'hidden_arguments_field': [], + 'argument_ids': '["0", "1"]', + 'argument_id_0': '0', + 'argument_name_0': 'argument', + 'argument_description_0': '', + 'argument_mapping_type_0': 'args', + 'argument_location_0': '0', + 'argument_value_type_0': 'number', + 'argument_required_0': True, + 'argument_default_value_0': '', + 'argument_id_1': '1', + 'argument_name_1': 'config', + 'argument_description_1': 'Really great config', + 'argument_mapping_type_1': 'configs', + 'argument_location_1': 'edp.important.config', + 'argument_value_type_1': 'string', + 'argument_default_value_1': 'A value'} + url = reverse('horizon:project:data_processing.jobs:create-job') + res = self.client.post(url, form_data) + + self.assertNoFormErrors(res) + + @test.create_stubs({api.sahara: ('job_list', + 'job_delete')}) + def test_delete(self): + job = self.jobs.first() + api.sahara.job_list(IsA(http.HttpRequest), {}) \ + .AndReturn(self.jobs.list()) + api.sahara.job_delete(IsA(http.HttpRequest), job.id) + self.mox.ReplayAll() + + form_data = {'action': 'jobs__delete__%s' % job.id} + res = self.client.post(INDEX_URL, form_data) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) + + @test.create_stubs({api.sahara: ('job_execution_create', + 'job_get', + 'job_get_configs', + 'job_list', + 'cluster_list', + 'data_source_list')}) + def test_launch(self): + job = self.jobs.first() + job_execution = self.job_executions.first() + cluster = self.clusters.first() + input_ds = self.data_sources.first() + output_ds = self.data_sources.first() + api.sahara.job_get(IsA(http.HttpRequest), IsA(six.text_type)) \ + .AndReturn(job) + api.sahara.job_get_configs(IsA(http.HttpRequest), job.type) \ + .AndReturn(job) + api.sahara.cluster_list(IsA(http.HttpRequest)) \ + .AndReturn(self.clusters.list()) + api.sahara.data_source_list(IsA(http.HttpRequest)) \ + .MultipleTimes().AndReturn(self.data_sources.list()) + api.sahara.job_list(IsA(http.HttpRequest)) \ + .AndReturn(self.jobs.list()) + api.sahara.job_get(IsA(http.HttpRequest), IsA(six.text_type)) \ + .AndReturn(job) + api.sahara.job_execution_create(IsA(http.HttpRequest), + IsA(six.text_type), + IsA(six.text_type), + IsA(six.text_type), + IsA(six.text_type), + IsA(dict), + IsA(dict)).AndReturn(job_execution) + self.mox.ReplayAll() + + url = reverse('horizon:project:data_processing.jobs:launch-job') + form_data = { + 'job': self.jobs.first().id, + 'cluster': cluster.id, + 'job_input': input_ds.id, + 'job_output': output_ds.id, + 'config': {}, + 'argument_ids': '{}', + 'adapt_oozie': 'on', + 'adapt_swift_spark': 'on', + 'hbase_common_lib': 'on', + 'java_opts': '', + 'job_args_array': [[], []], + 'job_configs': [{}, {}], + 'job_params': [{}, {}], + 'job_type': 'Pig', + 'streaming_mapper': '', + 'streaming_reducer': '' + } + + res = self.client.post(url, form_data) + self.assertNoFormErrors(res) diff --git a/sahara_dashboard/content/data_processing/jobs/urls.py b/sahara_dashboard/content/data_processing/jobs/urls.py new file mode 100644 index 0000000..2858a6f --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/urls.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from django.conf.urls import patterns +from django.conf.urls import url + +import sahara_dashboard.content.data_processing. \ + jobs.views as views + + +urlpatterns = patterns('', + url(r'^$', views.JobsView.as_view(), + name='index'), + url(r'^$', views.JobsView.as_view(), + name='jobs'), + url(r'^create-job$', + views.CreateJobView.as_view(), + name='create-job'), + url(r'^launch-job$', + views.LaunchJobView.as_view(), + name='launch-job'), + url(r'^launch-job-new-cluster$', + views.LaunchJobNewClusterView.as_view(), + name='launch-job-new-cluster'), + url(r'^choose-plugin$', + views.ChoosePluginView.as_view(), + name='choose-plugin'), + url(r'^(?P[^/]+)$', + views.JobDetailsView.as_view(), + name='details')) diff --git a/sahara_dashboard/content/data_processing/jobs/views.py b/sahara_dashboard/content/data_processing/jobs/views.py new file mode 100644 index 0000000..032bbf8 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/views.py @@ -0,0 +1,135 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging + +from django import http +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import tables +from horizon import tabs +from horizon.utils import memoized +from horizon.utils.urlresolvers import reverse # noqa +from horizon import workflows + +from sahara_dashboard.api import sahara as saharaclient + +import sahara_dashboard.content.data_processing.jobs.tables \ + as _tables +import sahara_dashboard.content.data_processing.jobs.tabs \ + as _tabs +import sahara_dashboard.content.data_processing.jobs. \ + workflows.create as create_flow +import sahara_dashboard.content.data_processing.jobs. \ + workflows.launch as launch_flow + +LOG = logging.getLogger(__name__) + + +class JobsView(tables.DataTableView): + table_class = _tables.JobsTable + template_name = 'project/data_processing.jobs/jobs.html' + page_title = _("Job Templates") + + def get_data(self): + try: + search_opts = {} + filter = self.get_server_filter_info(self.request) + if filter['value'] and filter['field']: + search_opts = {filter['field']: filter['value']} + jobs = saharaclient.job_list(self.request, search_opts) + except Exception: + jobs = [] + exceptions.handle(self.request, + _("Unable to fetch jobs.")) + + jobs = sorted(jobs, key=lambda job: job.created_at) + return jobs + + +class CreateJobView(workflows.WorkflowView): + workflow_class = create_flow.CreateJob + success_url = "horizon:project:data_processing.jobs:create-job" + classes = ("ajax-modal",) + template_name = "project/data_processing.jobs/create.html" + page_title = _("Create Job Template") + + +class JobDetailsView(tabs.TabView): + tab_group_class = _tabs.JobDetailsTabs + template_name = 'horizon/common/_detail.html' + page_title = "{{ job.name|default:job.id }}" + + @memoized.memoized_method + def get_object(self): + j_id = self.kwargs["job_id"] + try: + return saharaclient.job_get(self.request, j_id) + except Exception: + msg = _('Unable to retrieve details for job template "%s".') % j_id + redirect = reverse( + "horizon:project:data_processing.jobs:jobs") + exceptions.handle(self.request, msg, redirect=redirect) + + def get_context_data(self, **kwargs): + context = super(JobDetailsView, self).get_context_data(**kwargs) + context['job'] = self.get_object() + return context + + +class LaunchJobView(workflows.WorkflowView): + workflow_class = launch_flow.LaunchJob + success_url = "horizon:project:data_processing.jobs" + classes = ("ajax-modal",) + template_name = "project/data_processing.jobs/launch.html" + page_title = _("Launch Job") + + def get(self, request, *args, **kwargs): + if request.is_ajax(): + if request.REQUEST.get("json", None): + job_id = request.REQUEST.get("job_id") + job_type = saharaclient.job_get(request, job_id).type + return http.HttpResponse(json.dumps({"job_type": job_type}), + content_type='application/json') + return super(LaunchJobView, self).get(request, args, kwargs) + + def get_context_data(self, **kwargs): + context = super(LaunchJobView, self).get_context_data(**kwargs) + return context + + +class LaunchJobNewClusterView(workflows.WorkflowView): + workflow_class = launch_flow.LaunchJobNewCluster + success_url = "horizon:project:data_processing.jobs" + classes = ("ajax-modal",) + template_name = "project/data_processing.jobs/launch.html" + page_title = _("Launch Job") + + def get_context_data(self, **kwargs): + context = super(LaunchJobNewClusterView, self).\ + get_context_data(**kwargs) + return context + + +class ChoosePluginView(workflows.WorkflowView): + workflow_class = launch_flow.ChosePluginVersion + success_url = "horizon:project:data_processing.jobs" + classes = ("ajax-modal",) + template_name = "project/data_processing.jobs/launch.html" + page_title = _("Launch Job") + + def get_context_data(self, **kwargs): + context = super(ChoosePluginView, self).get_context_data(**kwargs) + return context diff --git a/sahara_dashboard/content/data_processing/jobs/workflows/__init__.py b/sahara_dashboard/content/data_processing/jobs/workflows/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/jobs/workflows/create.py b/sahara_dashboard/content/data_processing/jobs/workflows/create.py new file mode 100644 index 0000000..364fc58 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/workflows/create.py @@ -0,0 +1,281 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import forms +from horizon.forms import fields +from horizon import workflows + +from sahara_dashboard.content.data_processing \ + .utils import helpers +import sahara_dashboard.content.data_processing \ + .utils.workflow_helpers as whelpers +from sahara_dashboard.api import sahara as saharaclient + + +LOG = logging.getLogger(__name__) + +JOB_BINARY_CREATE_URL = ("horizon:project:data_processing.job_binaries" + ":create-job-binary") + + +class AdditionalLibsAction(workflows.Action): + + lib_binaries = forms.DynamicChoiceField( + label=_("Choose libraries"), + required=False, + add_item_link=JOB_BINARY_CREATE_URL, + widget=forms.Select( + attrs={ + 'class': 'switched', + 'data-switch-on': 'jobtype', + 'data-jobtype-pig': _("Choose libraries"), + 'data-jobtype-hive': _("Choose libraries"), + 'data-jobtype-shell': _("Choose additional files"), + 'data-jobtype-spark': _("Choose libraries"), + 'data-jobtype-java': _("Choose libraries"), + 'data-jobtype-mapreduce.streaming': _("Choose libraries") + })) + + lib_ids = forms.CharField( + required=False, + widget=forms.HiddenInput()) + + def populate_lib_binaries_choices(self, request, context): + job_binaries = saharaclient.job_binary_list(request) + + choices = [(job_binary.id, job_binary.name) + for job_binary in job_binaries] + choices.insert(0, ('', _("-- not selected --"))) + + return choices + + class Meta(object): + name = _("Libs") + help_text_template = ( + "project/data_processing.jobs/_create_job_libs_help.html") + + +class GeneralConfigAction(workflows.Action): + job_name = forms.CharField(label=_("Name")) + + job_type = forms.ChoiceField(label=_("Job Type"), + widget=forms.Select(attrs={ + 'class': 'switchable', + 'data-slug': 'jobtype' + })) + + main_binary = forms.DynamicChoiceField( + label=_("Choose a main binary"), + required=False, + help_text=_("Choose the binary which " + "should be used in this Job."), + add_item_link=JOB_BINARY_CREATE_URL, + widget=fields.DynamicSelectWidget( + attrs={ + 'class': 'switched', + 'data-switch-on': 'jobtype', + 'data-jobtype-pig': _("Choose a main binary"), + 'data-jobtype-hive': _("Choose a main binary"), + 'data-jobtype-shell': _("Choose a shell script"), + 'data-jobtype-spark': _("Choose a main binary"), + 'data-jobtype-storm': _("Choose a main binary"), + 'data-jobtype-mapreduce.streaming': _("Choose a main binary") + })) + + job_description = forms.CharField(label=_("Description"), + required=False, + widget=forms.Textarea(attrs={'rows': 4})) + + def __init__(self, request, context, *args, **kwargs): + super(GeneralConfigAction, + self).__init__(request, context, *args, **kwargs) + if request.REQUEST.get("guide_job_type"): + self.fields["job_type"].initial = ( + request.REQUEST.get("guide_job_type").lower()) + + def populate_job_type_choices(self, request, context): + choices = [] + choices_list = saharaclient.job_types_list(request) + + for choice in choices_list: + job_type = choice.name.lower() + if job_type in helpers.JOB_TYPE_MAP: + choices.append((job_type, helpers.JOB_TYPE_MAP[job_type][0])) + return choices + + def populate_main_binary_choices(self, request, context): + job_binaries = saharaclient.job_binary_list(request) + + choices = [(job_binary.id, job_binary.name) + for job_binary in job_binaries] + choices.insert(0, ('', _("-- not selected --"))) + return choices + + def clean(self): + cleaned_data = super(workflows.Action, self).clean() + job_type = cleaned_data.get("job_type", "") + + if job_type in ["Java", "MapReduce"]: + cleaned_data['main_binary'] = None + + return cleaned_data + + class Meta(object): + name = _("Create Job Template") + help_text_template = ( + "project/data_processing.jobs/_create_job_help.html") + + +class ConfigureInterfaceArgumentsAction(workflows.Action): + hidden_arguments_field = forms.CharField( + required=False, + widget=forms.HiddenInput(attrs={"class": "hidden_arguments_field"})) + argument_ids = forms.CharField( + required=False, + widget=forms.HiddenInput()) + + def __init__(self, request, *args, **kwargs): + super(ConfigureInterfaceArgumentsAction, self).__init__( + request, *args, **kwargs) + request_source = None + if 'argument_ids' in request.POST: + request_source = request.POST + elif 'argument_ids' in request.REQUEST: + request_source = request.REQUEST + if request_source: + self.arguments = [] + for id in json.loads(request_source['argument_ids']): + fields = { + "name": "argument_name_" + str(id), + "description": "argument_description_" + str(id), + "mapping_type": "argument_mapping_type_" + str(id), + "location": "argument_location_" + str(id), + "value_type": "argument_value_type_" + str(id), + "default_value": "argument_default_value_" + str(id)} + argument = {k: request_source[v] + for k, v in fields.items()} + required_field = "argument_required_" + str(id) + fields.update({"required": required_field}) + argument.update( + {"required": required_field in request_source}) + self.arguments.append(argument) + + whelpers.build_interface_argument_fields(self, **fields) + + def clean(self): + cleaned_data = super(ConfigureInterfaceArgumentsAction, self).clean() + return cleaned_data + + class Meta(object): + name = _("Interface Arguments") + + +class ConfigureArguments(workflows.Step): + action_class = ConfigureInterfaceArgumentsAction + contributes = ("hidden_arguments_field", ) + template_name = ("project/data_processing.jobs/" + "job_interface_arguments_template.html") + + def contribute(self, data, context): + for k, v in data.items(): + context[k] = v + return context + + +class GeneralConfig(workflows.Step): + action_class = GeneralConfigAction + contributes = ("job_name", "job_type", "job_description", "main_binary") + + def contribute(self, data, context): + for k, v in data.items(): + if k == "job_type": + context[k] = helpers.JOB_TYPE_MAP[v][1] + else: + context[k] = v + return context + + +class ConfigureLibs(workflows.Step): + action_class = AdditionalLibsAction + template_name = "project/data_processing.jobs/library_template.html" + + def contribute(self, data, context): + chosen_libs = json.loads(data.get("lib_ids", '[]')) + for index, library in enumerate(chosen_libs): + context["lib_%s" % index] = library + return context + + +class CreateJob(workflows.Workflow): + slug = "create_job" + name = _("Create Job Template") + finalize_button_name = _("Create") + success_message = _("Job created") + failure_message = _("Could not create job template") + success_url = "horizon:project:data_processing.jobs:index" + default_steps = (GeneralConfig, ConfigureLibs, ConfigureArguments) + + def handle(self, request, context): + main_locations = [] + lib_locations = [] + + for k in context.keys(): + if k.startswith('lib_'): + lib_locations.append(context.get(k)) + + if context.get("main_binary", None): + main_locations.append(context["main_binary"]) + + argument_ids = json.loads(context['argument_ids']) + interface = [ + { + "name": context['argument_name_' + str(arg_id)], + "description": (context['argument_description_' + str(arg_id)] + or None), + "mapping_type": context['argument_mapping_type_' + + str(arg_id)], + "location": context['argument_location_' + str(arg_id)], + "value_type": context['argument_value_type_' + str(arg_id)], + "required": context['argument_required_' + str(arg_id)], + "default": (context['argument_default_value_' + str(arg_id)] + or None) + } for arg_id in argument_ids + ] + + try: + job = saharaclient.job_create( + request, + context["job_name"], + context["job_type"], + main_locations, + lib_locations, + context["job_description"], + interface=interface) + + hlps = helpers.Helpers(request) + if hlps.is_from_guide(): + request.session["guide_job_id"] = job.id + request.session["guide_job_type"] = context["job_type"] + request.session["guide_job_name"] = context["job_name"] + self.success_url = ( + "horizon:project:data_processing.wizard:jobex_guide") + return True + except Exception: + exceptions.handle(request) + return False diff --git a/sahara_dashboard/content/data_processing/jobs/workflows/launch.py b/sahara_dashboard/content/data_processing/jobs/workflows/launch.py new file mode 100644 index 0000000..3b64842 --- /dev/null +++ b/sahara_dashboard/content/data_processing/jobs/workflows/launch.py @@ -0,0 +1,609 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging + +from django.utils.translation import ugettext_lazy as _ +import six + +from horizon import exceptions +from horizon import forms +from horizon import workflows + +from sahara_dashboard.api import sahara as saharaclient +import sahara_dashboard.content.data_processing. \ + cluster_templates.workflows.create as t_flows +import sahara_dashboard.content.data_processing. \ + clusters.workflows.create as c_flow +import sahara_dashboard.content.data_processing. \ + utils.workflow_helpers as whelpers + + +LOG = logging.getLogger(__name__) + +DATA_SOURCE_CREATE_URL = ("horizon:project:data_processing.data_sources" + ":create-data-source") + + +class JobExecutionGeneralConfigAction(workflows.Action): + job_input = forms.DynamicChoiceField( + label=_("Input"), + initial=(None, "None"), + add_item_link=DATA_SOURCE_CREATE_URL, + required=False) + + job_output = forms.DynamicChoiceField( + label=_("Output"), + initial=(None, "None"), + add_item_link=DATA_SOURCE_CREATE_URL, + required=False) + + def __init__(self, request, *args, **kwargs): + super(JobExecutionGeneralConfigAction, self).__init__(request, + *args, + **kwargs) + + if request.REQUEST.get("job_id", None) is None: + self.fields["job"] = forms.ChoiceField( + label=_("Job")) + self.fields["job"].choices = self.populate_job_choices(request) + else: + self.fields["job"] = forms.CharField( + widget=forms.HiddenInput(), + initial=request.REQUEST.get("job_id", None)) + + def populate_job_input_choices(self, request, context): + return self.get_data_source_choices(request, context) + + def populate_job_output_choices(self, request, context): + return self.get_data_source_choices(request, context) + + def get_data_source_choices(self, request, context): + try: + data_sources = saharaclient.data_source_list(request) + except Exception: + data_sources = [] + exceptions.handle(request, + _("Unable to fetch data sources.")) + + choices = [(data_source.id, data_source.name) + for data_source in data_sources] + choices.insert(0, (None, 'None')) + + return choices + + def populate_job_choices(self, request): + try: + jobs = saharaclient.job_list(request) + except Exception: + jobs = [] + exceptions.handle(request, + _("Unable to fetch jobs.")) + + choices = [(job.id, job.name) + for job in jobs] + + return choices + + class Meta(object): + name = _("Job") + help_text_template = ( + "project/data_processing.jobs/_launch_job_help.html") + + +class JobExecutionExistingGeneralConfigAction(JobExecutionGeneralConfigAction): + cluster = forms.ChoiceField( + label=_("Cluster"), + initial=(None, "None"), + widget=forms.Select(attrs={"class": "cluster_choice"})) + + def populate_cluster_choices(self, request, context): + try: + clusters = saharaclient.cluster_list(request) + except Exception: + clusters = [] + exceptions.handle(request, + _("Unable to fetch clusters.")) + + choices = [(cluster.id, cluster.name) + for cluster in clusters] + + return choices + + class Meta(object): + name = _("Job") + help_text_template = ( + "project/data_processing.jobs/_launch_job_help.html") + + +def _merge_interface_with_configs(interface, job_configs): + interface_by_mapping = {(arg['mapping_type'], arg['location']): arg + for arg in interface} + mapped_types = ("configs", "params") + mapped_configs = { + (mapping_type, key): value for mapping_type in mapped_types + for key, value in job_configs.get(mapping_type, {}).items() + } + for index, arg in enumerate(job_configs.get('args', [])): + mapped_configs['args', str(index)] = arg + free_arguments, interface_arguments = {}, {} + for mapping, value in mapped_configs.items(): + if mapping in interface_by_mapping: + arg = interface_by_mapping[mapping] + interface_arguments[arg['id']] = value + else: + free_arguments[mapping] = value + configs = {"configs": {}, "params": {}, "args": {}} + for mapping, value in free_arguments.items(): + mapping_type, location = mapping + configs[mapping_type][location] = value + configs["args"] = [ + value for key, value in sorted(configs["args"].items(), + key=lambda x: int(x[0]))] + return configs, interface_arguments + + +class JobConfigAction(workflows.Action): + MAIN_CLASS = "edp.java.main_class" + JAVA_OPTS = "edp.java.java_opts" + EDP_MAPPER = "edp.streaming.mapper" + EDP_REDUCER = "edp.streaming.reducer" + EDP_PREFIX = "edp." + EDP_HBASE_COMMON_LIB = "edp.hbase_common_lib" + EDP_ADAPT_FOR_OOZIE = "edp.java.adapt_for_oozie" + EDP_ADAPT_SPARK_SWIFT = "edp.spark.adapt_for_swift" + + property_name = forms.ChoiceField( + required=False, + ) + + job_configs = forms.CharField( + required=False, + widget=forms.HiddenInput()) + + job_params = forms.CharField( + required=False, + widget=forms.HiddenInput()) + + job_args_array = forms.CharField( + required=False, + widget=forms.HiddenInput()) + + job_type = forms.CharField( + required=False, + widget=forms.HiddenInput()) + + main_class = forms.CharField(label=_("Main Class"), + required=False) + + java_opts = forms.CharField(label=_("Java Opts"), + required=False) + + streaming_mapper = forms.CharField(label=_("Mapper")) + + streaming_reducer = forms.CharField(label=_("Reducer")) + + hbase_common_lib = forms.BooleanField( + label=_("Use HBase Common library"), + help_text=_("Run HBase EDP Jobs with common HBase library on HDFS"), + required=False, initial=True) + + adapt_oozie = forms.BooleanField( + label=_("Adapt For Oozie"), + help_text=_("Automatically modify the Hadoop configuration" + " so that job config values are set and so that" + " Oozie will handle exit codes correctly."), + required=False, initial=True) + + adapt_spark_swift = forms.BooleanField( + label=_("Enable Swift Paths"), + help_text=_("Modify the configuration so that swift URLs can " + "be dereferenced through HDFS at runtime."), + required=False, initial=True) + + def __init__(self, request, *args, **kwargs): + super(JobConfigAction, self).__init__(request, *args, **kwargs) + job_ex_id = request.REQUEST.get("job_execution_id") + if job_ex_id is not None: + job_ex = saharaclient.job_execution_get(request, job_ex_id) + job = saharaclient.job_get(request, job_ex.job_id) + job_configs, interface_args = _merge_interface_with_configs( + job.interface, job_ex.job_configs) + edp_configs = {} + + if 'configs' in job_configs: + configs, edp_configs = ( + self.clean_edp_configs(job_configs['configs'])) + self.fields['job_configs'].initial = ( + json.dumps(configs)) + + if 'params' in job_configs: + self.fields['job_params'].initial = ( + json.dumps(job_configs['params'])) + + if 'args' in job_configs: + self.fields['job_args_array'].initial = ( + json.dumps(job_configs['args'])) + + if self.MAIN_CLASS in edp_configs: + self.fields['main_class'].initial = ( + edp_configs[self.MAIN_CLASS]) + if self.JAVA_OPTS in edp_configs: + self.fields['java_opts'].initial = ( + edp_configs[self.JAVA_OPTS]) + + if self.EDP_MAPPER in edp_configs: + self.fields['streaming_mapper'].initial = ( + edp_configs[self.EDP_MAPPER]) + if self.EDP_REDUCER in edp_configs: + self.fields['streaming_reducer'].initial = ( + edp_configs[self.EDP_REDUCER]) + if self.EDP_HBASE_COMMON_LIB in edp_configs: + self.fields['hbase_common_lib'].initial = ( + edp_configs[self.EDP_HBASE_COMMON_LIB]) + if self.EDP_ADAPT_FOR_OOZIE in edp_configs: + self.fields['adapt_oozie'].initial = ( + edp_configs[self.EDP_ADAPT_FOR_OOZIE]) + if self.EDP_ADAPT_SPARK_SWIFT in edp_configs: + self.fields['adapt_spark_swift'].initial = ( + edp_configs[self.EDP_ADAPT_SPARK_SWIFT]) + + def clean(self): + cleaned_data = super(workflows.Action, self).clean() + job_type = cleaned_data.get("job_type", None) + + if job_type != "MapReduce.Streaming": + if "streaming_mapper" in self._errors: + del self._errors["streaming_mapper"] + if "streaming_reducer" in self._errors: + del self._errors["streaming_reducer"] + + return cleaned_data + + def populate_property_name_choices(self, request, context): + job_id = request.REQUEST.get("job_id") or request.REQUEST.get("job") + job_type = saharaclient.job_get(request, job_id).type + job_configs = ( + saharaclient.job_get_configs(request, job_type).job_config) + choices = [(param['value'], param['name']) + for param in job_configs['configs']] + return choices + + def clean_edp_configs(self, configs): + edp_configs = {} + for key, value in six.iteritems(configs): + if key.startswith(self.EDP_PREFIX): + edp_configs[key] = value + for rmkey in edp_configs.keys(): + # remove all configs handled via other controls + # so they do not show up in the free entry inputs + if rmkey in [self.EDP_HBASE_COMMON_LIB, + self.EDP_MAPPER, + self.EDP_REDUCER, + self.MAIN_CLASS, + self.JAVA_OPTS, + self.EDP_ADAPT_FOR_OOZIE, + self.EDP_ADAPT_SPARK_SWIFT]: + del configs[rmkey] + return (configs, edp_configs) + + class Meta(object): + name = _("Configure") + help_text_template = ( + "project/data_processing.jobs/_launch_job_configure_help.html") + + +class JobExecutionGeneralConfig(workflows.Step): + action_class = JobExecutionGeneralConfigAction + + def contribute(self, data, context): + for k, v in data.items(): + if k in ["job_input", "job_output"]: + context["job_general_" + k] = None if (v in [None, ""]) else v + else: + context["job_general_" + k] = v + + return context + + +class JobExecutionExistingGeneralConfig(workflows.Step): + action_class = JobExecutionExistingGeneralConfigAction + + def contribute(self, data, context): + for k, v in data.items(): + if k in ["job_input", "job_output"]: + context["job_general_" + k] = None if (v in [None, ""]) else v + else: + context["job_general_" + k] = v + + return context + + +class JobConfig(workflows.Step): + action_class = JobConfigAction + template_name = 'project/data_processing.jobs/config_template.html' + + def contribute(self, data, context): + job_config = self.clean_configs( + json.loads(data.get("job_configs", '{}'))) + job_params = self.clean_configs( + json.loads(data.get("job_params", '{}'))) + job_args_array = self.clean_configs( + json.loads(data.get("job_args_array", '[]'))) + job_type = data.get("job_type", '') + + context["job_type"] = job_type + context["job_config"] = {"configs": job_config} + context["job_config"]["args"] = job_args_array + + if job_type in ["Java", "Spark", "Storm"]: + context["job_config"]["configs"][JobConfigAction.MAIN_CLASS] = ( + data.get("main_class", "")) + context["job_config"]["configs"][JobConfigAction.JAVA_OPTS] = ( + data.get("java_opts", "")) + context["job_config"]["configs"][ + JobConfigAction.EDP_HBASE_COMMON_LIB] = ( + data.get("hbase_common_lib", True)) + if job_type == "Java": + context["job_config"]["configs"][ + JobConfigAction.EDP_ADAPT_FOR_OOZIE] = ( + data.get("adapt_oozie", True)) + if job_type == "Spark": + context["job_config"]["configs"][ + JobConfigAction.EDP_ADAPT_SPARK_SWIFT] = ( + data.get("adapt_spark_swift", True)) + elif job_type == "MapReduce.Streaming": + context["job_config"]["configs"][JobConfigAction.EDP_MAPPER] = ( + data.get("streaming_mapper", "")) + context["job_config"]["configs"][JobConfigAction.EDP_REDUCER] = ( + data.get("streaming_reducer", "")) + else: + context["job_config"]["params"] = job_params + + return context + + @staticmethod + def clean_configs(configs): + cleaned_conf = None + if isinstance(configs, dict): + cleaned_conf = dict([(k.strip(), v.strip()) + for k, v in configs.items() + if len(v.strip()) > 0 and len(k.strip()) > 0]) + elif isinstance(configs, list): + cleaned_conf = list([v.strip() for v in configs + if len(v.strip()) > 0]) + return cleaned_conf + + +class NewClusterConfigAction(c_flow.GeneralConfigAction): + persist_cluster = forms.BooleanField( + label=_("Persist cluster after job exit"), + required=False) + + class Meta(object): + name = _("Configure Cluster") + help_text_template = ( + "project/data_processing.clusters/_configure_general_help.html") + + +class ClusterGeneralConfig(workflows.Step): + action_class = NewClusterConfigAction + contributes = ("hidden_configure_field", ) + + def contribute(self, data, context): + for k, v in data.items(): + context["cluster_general_" + k] = v + + return context + + +class JobExecutionInterfaceConfigAction(workflows.Action): + + def __init__(self, request, *args, **kwargs): + super(JobExecutionInterfaceConfigAction, self).__init__( + request, *args, **kwargs) + job_id = (request.GET.get("job_id") + or request.POST.get("job")) + job = saharaclient.job_get(request, job_id) + interface = job.interface or [] + interface_args = {} + + job_ex_id = request.REQUEST.get("job_execution_id") + if job_ex_id is not None: + job_ex = saharaclient.job_execution_get(request, job_ex_id) + job = saharaclient.job_get(request, job_ex.job_id) + job_configs, interface_args = _merge_interface_with_configs( + job.interface, job_ex.job_configs) + + for argument in interface: + field = forms.CharField( + required=argument.get('required'), + label=argument['name'], + initial=(interface_args.get(argument['id']) or + argument.get('default')), + help_text=argument.get('description'), + widget=forms.TextInput() + ) + self.fields['argument_%s' % argument['id']] = field + self.fields['argument_ids'] = forms.CharField( + initial=json.dumps({argument['id']: argument['name'] + for argument in interface}), + widget=forms.HiddenInput() + ) + + def clean(self): + cleaned_data = super(JobExecutionInterfaceConfigAction, self).clean() + return cleaned_data + + class Meta(object): + name = _("Interface Arguments") + + +class JobExecutionInterfaceConfig(workflows.Step): + action_class = JobExecutionInterfaceConfigAction + + def contribute(self, data, context): + for k, v in data.items(): + context[k] = v + return context + + +class LaunchJob(workflows.Workflow): + slug = "launch_job" + name = _("Launch Job") + finalize_button_name = _("Launch") + success_message = _("Job launched") + failure_message = _("Could not launch job") + success_url = "horizon:project:data_processing.job_executions:index" + default_steps = (JobExecutionExistingGeneralConfig, JobConfig, + JobExecutionInterfaceConfig) + + def handle(self, request, context): + argument_ids = json.loads(context['argument_ids']) + interface = {name: context["argument_" + str(arg_id)] + for arg_id, name in argument_ids.items()} + + saharaclient.job_execution_create( + request, + context["job_general_job"], + context["job_general_cluster"], + context["job_general_job_input"], + context["job_general_job_output"], + context["job_config"], + interface) + return True + + +class SelectHadoopPluginAction(t_flows.SelectPluginAction): + def __init__(self, request, *args, **kwargs): + super(SelectHadoopPluginAction, self).__init__(request, + *args, + **kwargs) + self.fields["job_id"] = forms.ChoiceField( + label=_("Plugin name"), + initial=request.GET.get("job_id") or request.POST.get("job_id"), + widget=forms.HiddenInput(attrs={"class": "hidden_create_field"})) + + self.fields["job_configs"] = forms.ChoiceField( + label=_("Job configs"), + widget=forms.HiddenInput(attrs={"class": "hidden_create_field"})) + + self.fields["job_args"] = forms.ChoiceField( + label=_("Job args"), + widget=forms.HiddenInput(attrs={"class": "hidden_create_field"})) + + self.fields["job_params"] = forms.ChoiceField( + label=_("Job params"), + widget=forms.HiddenInput(attrs={"class": "hidden_create_field"})) + + job_ex_id = request.REQUEST.get("job_execution_id") + if job_ex_id is not None: + self.fields["job_execution_id"] = forms.ChoiceField( + label=_("Job Execution ID"), + initial=job_ex_id, + widget=forms.HiddenInput( + attrs={"class": "hidden_create_field"})) + + job_configs = ( + saharaclient.job_execution_get(request, + job_ex_id).job_configs) + + if "configs" in job_configs: + self.fields["job_configs"].initial = ( + json.dumps(job_configs["configs"])) + if "params" in job_configs: + self.fields["job_params"].initial = ( + json.dumps(job_configs["params"])) + if "args" in job_configs: + self.fields["job_args"].initial = ( + json.dumps(job_configs["args"])) + + class Meta(object): + name = _("Select plugin and hadoop version for cluster") + help_text_template = ("project/data_processing.clusters/" + "_create_general_help.html") + + +class SelectHadoopPlugin(workflows.Step): + action_class = SelectHadoopPluginAction + + +class ChosePluginVersion(workflows.Workflow): + slug = "lunch_job" + name = _("Launch Job") + finalize_button_name = _("Create") + success_message = _("Created") + failure_message = _("Could not create") + success_url = "horizon:project:data_processing.cluster_templates:index" + default_steps = (SelectHadoopPlugin,) + + +class LaunchJobNewCluster(workflows.Workflow): + slug = "launch_job" + name = _("Launch Job") + finalize_button_name = _("Launch") + success_message = _("Job launched") + failure_message = _("Could not launch job") + success_url = "horizon:project:data_processing.jobs:index" + default_steps = (ClusterGeneralConfig, + JobExecutionGeneralConfig, + JobConfig, + JobExecutionInterfaceConfig) + + def handle(self, request, context): + node_groups = None + + plugin, hadoop_version = ( + whelpers.get_plugin_and_hadoop_version(request)) + + ct_id = context["cluster_general_cluster_template"] or None + user_keypair = context["cluster_general_keypair"] or None + + argument_ids = json.loads(context['argument_ids']) + interface = {name: context["argument_" + str(arg_id)] + for arg_id, name in argument_ids.items()} + + try: + cluster = saharaclient.cluster_create( + request, + context["cluster_general_cluster_name"], + plugin, hadoop_version, + cluster_template_id=ct_id, + default_image_id=context["cluster_general_image"], + description=context["cluster_general_description"], + node_groups=node_groups, + user_keypair_id=user_keypair, + is_transient=not(context["cluster_general_persist_cluster"]), + net_id=context.get( + "cluster_general_neutron_management_network", + None)) + except Exception: + exceptions.handle(request, + _("Unable to create new cluster for job.")) + return False + + try: + saharaclient.job_execution_create( + request, + context["job_general_job"], + cluster.id, + context["job_general_job_input"], + context["job_general_job_output"], + context["job_config"], + interface) + except Exception: + exceptions.handle(request, + _("Unable to launch job.")) + return False + return True diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/__init__.py b/sahara_dashboard/content/data_processing/nodegroup_templates/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/panel.py b/sahara_dashboard/content/data_processing/nodegroup_templates/panel.py new file mode 100644 index 0000000..76d2cf8 --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/panel.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +import horizon + +from openstack_dashboard.dashboards.project import dashboard + + +class NodegroupTemplatesPanel(horizon.Panel): + name = _("Node Group Templates") + slug = 'data_processing.nodegroup_templates' + permissions = (('openstack.services.data-processing', + 'openstack.services.data_processing'),) + + +dashboard.Project.register(NodegroupTemplatesPanel) diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/tables.py b/sahara_dashboard/content/data_processing/nodegroup_templates/tables.py new file mode 100644 index 0000000..766bbf6 --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/tables.py @@ -0,0 +1,110 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.template import defaultfilters as filters +from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import ungettext_lazy + +from horizon import tables +from sahara_dashboard.api import sahara as saharaclient + +LOG = logging.getLogger(__name__) + + +class NodeGroupTemplatesFilterAction(tables.FilterAction): + filter_type = "server" + filter_choices = (('name', _("Name"), True), + ('plugin_name', _("Plugin"), True), + ('hadoop_version', _("Version"), True)) + + +class CreateNodegroupTemplate(tables.LinkAction): + name = "create" + verbose_name = _("Create Template") + url = ("horizon:project:data_processing.nodegroup_templates:" + "create-nodegroup-template") + classes = ("ajax-modal", "create-nodegrouptemplate-btn") + icon = "plus" + + +class ConfigureNodegroupTemplate(tables.LinkAction): + name = "configure" + verbose_name = _("Configure Template") + url = ("horizon:project:data_processing.nodegroup_templates:" + "configure-nodegroup-template") + classes = ("ajax-modal", "configure-nodegrouptemplate-btn") + icon = "plus" + attrs = {"style": "display: none"} + + +class CopyTemplate(tables.LinkAction): + name = "copy" + verbose_name = _("Copy Template") + url = "horizon:project:data_processing.nodegroup_templates:copy" + classes = ("ajax-modal", ) + + +class EditTemplate(tables.LinkAction): + name = "edit" + verbose_name = _("Edit Template") + url = "horizon:project:data_processing.nodegroup_templates:edit" + classes = ("ajax-modal", ) + + +class DeleteTemplate(tables.DeleteAction): + @staticmethod + def action_present(count): + return ungettext_lazy( + u"Delete Template", + u"Delete Templates", + count + ) + + @staticmethod + def action_past(count): + return ungettext_lazy( + u"Deleted Template", + u"Deleted Templates", + count + ) + + def delete(self, request, template_id): + saharaclient.nodegroup_template_delete(request, template_id) + + +class NodegroupTemplatesTable(tables.DataTable): + name = tables.Column( + "name", + verbose_name=_("Name"), + link="horizon:project:data_processing.nodegroup_templates:details") + plugin_name = tables.Column("plugin_name", + verbose_name=_("Plugin")) + hadoop_version = tables.Column("hadoop_version", + verbose_name=_("Version")) + node_processes = tables.Column("node_processes", + verbose_name=_("Node Processes"), + wrap_list=True, + filters=(filters.unordered_list,)) + + class Meta(object): + name = "nodegroup_templates" + verbose_name = _("Node Group Templates") + table_actions = (CreateNodegroupTemplate, + ConfigureNodegroupTemplate, + DeleteTemplate, + NodeGroupTemplatesFilterAction,) + row_actions = (EditTemplate, + CopyTemplate, + DeleteTemplate) diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/tabs.py b/sahara_dashboard/content/data_processing/nodegroup_templates/tabs.py new file mode 100644 index 0000000..68d8569 --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/tabs.py @@ -0,0 +1,100 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import tabs + +from openstack_dashboard.api import network +from openstack_dashboard.api import nova +from sahara_dashboard.api import sahara as saharaclient + + +from sahara_dashboard.content. \ + data_processing.utils import workflow_helpers as helpers + +LOG = logging.getLogger(__name__) + + +class GeneralTab(tabs.Tab): + name = _("General Info") + slug = "nodegroup_template_details_tab" + template_name = ( + "project/data_processing.nodegroup_templates/_details.html") + + def get_context_data(self, request): + template_id = self.tab_group.kwargs['template_id'] + try: + template = saharaclient.nodegroup_template_get( + request, template_id) + except Exception as e: + template = {} + LOG.error( + "Unable to fetch node group template details: %s" % str(e)) + return {"template": template} + + try: + flavor = nova.flavor_get(request, template.flavor_id) + except Exception: + flavor = {} + exceptions.handle(request, + _("Unable to fetch flavor for template.")) + + floating_ip_pool_name = None + if template.floating_ip_pool: + try: + floating_ip_pool_name = self._get_floating_ip_pool_name( + request, template.floating_ip_pool) + except Exception: + exceptions.handle(request, + _("Unable to fetch floating ip pools.")) + + security_groups = helpers.get_security_groups( + request, template.security_groups) + + return {"template": template, "flavor": flavor, + "floating_ip_pool_name": floating_ip_pool_name, + "security_groups": security_groups} + + def _get_floating_ip_pool_name(self, request, pool_id): + pools = [pool for pool in network.floating_ip_pools_list( + request) if pool.id == pool_id] + + return pools[0].name if pools else pool_id + + +class ConfigsTab(tabs.Tab): + name = _("Service Configurations") + slug = "nodegroup_template_service_configs_tab" + template_name = ( + "project/data_processing.nodegroup_templates/_service_confs.html") + + def get_context_data(self, request): + template_id = self.tab_group.kwargs['template_id'] + try: + template = saharaclient.nodegroup_template_get( + request, template_id) + except Exception as e: + template = {} + LOG.error( + "Unable to fetch node group template details: %s" % str(e)) + return {"template": template} + + +class NodegroupTemplateDetailsTabs(tabs.TabGroup): + slug = "nodegroup_template_details" + tabs = (GeneralTab, ConfigsTab, ) + sticky = True diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_configure_general_help.html b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_configure_general_help.html new file mode 100644 index 0000000..97e1cb8 --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_configure_general_help.html @@ -0,0 +1,23 @@ +{% load i18n horizon %} +
+

+ {% blocktrans %}This Node Group Template will be created for:{% endblocktrans %} +
+ {% blocktrans %}Plugin{% endblocktrans %}: {{ plugin_name }} +
+ {% blocktrans %}Version{% endblocktrans %}: {{ hadoop_version }} +
+

+

+ {% blocktrans %}The Node Group Template object specifies the processes + that will be launched on each instance. Check one or more processes. + When processes are selected, you may set node scoped + configurations on corresponding tabs.{% endblocktrans %} +

+

+ {% blocktrans %}You must choose a flavor to determine the size (VCPUs, memory and storage) of all launched VMs.{% endblocktrans %} +

+

+ {% blocktrans %}Data Processing provides different storage location options. You may choose Ephemeral Drive or a Cinder Volume to be attached to instances.{% endblocktrans %} +

+
diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_create_general_help.html b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_create_general_help.html new file mode 100644 index 0000000..019a115 --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_create_general_help.html @@ -0,0 +1,4 @@ +{% load i18n horizon %} +

+ {% blocktrans %}Select a plugin and version for the new Node Group template.{% endblocktrans %} +

diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_details.html b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_details.html new file mode 100644 index 0000000..fe92f18 --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_details.html @@ -0,0 +1,94 @@ +{% load i18n sizeformat %} + +
+
+
{% trans "Name" %}
+
{{ template.name }}
+
{% trans "ID" %}
+
{{ template.id }}
+
{% trans "Description" %}
+
{{ template.description|default:_("None") }}
+
+
+
{% trans "Flavor" %}
+
{{ flavor.name }}
+
+ {% if template.availability_zone %} +
+
{% trans "Availability Zone" %}
+
{{ template.availability_zone }}
+
+ {% endif %} + {% if template.floating_ip_pool %} +
+
{% trans "Floating IP Pool" %}
+
{{ floating_ip_pool_name }}
+
+ {% endif %} +
+
{% trans "Plugin" %}
+
{{ template.plugin_name }}
+
{% trans "Version" %}
+
{{ template.hadoop_version }}
+
+ +
+
{% trans "Use auto-configuration" %}
+
{{ template.use_autoconfig }}
+
+
+
{% trans "Proxy Gateway" %}
+
{{ template.is_proxy_gateway }}
+
+ +
+
{% trans "Auto Security Group" %}
+
{{ template.auto_security_group }}
+
+ +
+
{% trans "Security Groups" %}
+
+
    + {% for group in security_groups %} + {% if group.id %} +
  • {{ group.name }}
  • + {% else %} +
  • {{ group.name }}
  • + {% endif %} + {% endfor %} +
+
+
+ +
+
{% trans "Node Processes" %}
+
+
    + {% for process in template.node_processes %} +
  • {{ process }}
  • + {% endfor %} +
+
+
+
+

{% trans "HDFS placement" %}

+ {% if template.volumes_per_node %} +
{% trans "Cinder volumes" %}
+
{% trans "Volumes per node" %}
+
{{ template.volumes_per_node }}
+
{% trans "Volumes size" %}
+
{{ template.volumes_size }}
+
{% trans "Volumes type" %}
+
{{ template.volume_type }}
+
{% trans "Volumes local to instance" %}
+
{{ template.volume_local_to_instance }}
+ {% if template.volumes_availability_zone %} +
{% trans "Volumes Availability Zone" %}
+
{{ template.volumes_availability_zone }}
+ {% endif %} + {% else %} +
{% trans "Ephemeral drive" %}
+ {% endif %} +
+
diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_fields_help.html b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_fields_help.html new file mode 100644 index 0000000..22dca2f --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_fields_help.html @@ -0,0 +1,60 @@ +{% load i18n %} +
+ + + +
+ diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_service_confs.html b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_service_confs.html new file mode 100644 index 0000000..8f37a9b --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_service_confs.html @@ -0,0 +1,23 @@ +{% load i18n sizeformat %} +

{% trans "Service Configurations" %}

+
+
+ {% for service, config in template.node_configs.items %} +
{{ service }}
+
+ {% if config %} +
    + {% for conf_name, conf_val in config.items %} +
  • + {% blocktrans %}{{ conf_name }}: {{ conf_val }}{% endblocktrans %} +
  • + {% endfor %} +
+ {% else %} +
{% trans "No configurations" %}
+ {% endif %} +
+ {% endfor %} +
+ +
diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/configure.html b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/configure.html new file mode 100644 index 0000000..cecfe0e --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/configure.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Create Node Group Template" %}{% endblock %} + +{% block main %} + {% include 'horizon/common/_workflow.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/create.html b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/create.html new file mode 100644 index 0000000..9d497c5 --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/create.html @@ -0,0 +1,11 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{{ name }}{% endblock %} + +{% block page_header %} + {% include "horizon/common/_page_header.html" with title={{ name }} %} +{% endblock page_header %} + +{% block main %} + {% include 'horizon/common/_workflow.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/nodegroup_templates.html b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/nodegroup_templates.html new file mode 100644 index 0000000..7cdc049 --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/nodegroup_templates.html @@ -0,0 +1,81 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + +
+ {{ nodegroup_templates_table.render }} +
+ + + +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/tests.py b/sahara_dashboard/content/data_processing/nodegroup_templates/tests.py new file mode 100644 index 0000000..cd70855 --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/tests.py @@ -0,0 +1,323 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from django.core.urlresolvers import reverse +from django import http + +from mox3.mox import IgnoreArg # noqa +from mox3.mox import IsA # noqa +import six + +from openstack_dashboard import api as dash_api +from sahara_dashboard import api +from sahara_dashboard.content.data_processing.utils \ + import workflow_helpers +from sahara_dashboard.content.data_processing.\ + nodegroup_templates.workflows import create as create_workflow +from openstack_dashboard.test import helpers as test + + +INDEX_URL = reverse( + 'horizon:project:data_processing.nodegroup_templates:index') +DETAILS_URL = reverse( + 'horizon:project:data_processing.nodegroup_templates:details', + args=['id']) +CREATE_URL = reverse( + 'horizon:project:data_processing.nodegroup_templates:' + + 'configure-nodegroup-template') + + +class DataProcessingNodeGroupTests(test.TestCase): + def _setup_copy_test(self): + ngt = self.nodegroup_templates.first() + configs = self.plugins_configs.first() + dash_api.cinder.extension_supported(IsA(http.HttpRequest), + 'AvailabilityZones') \ + .AndReturn(True) + dash_api.cinder.availability_zone_list(IsA(http.HttpRequest))\ + .AndReturn(self.availability_zones.list()) + dash_api.cinder.volume_type_list(IsA(http.HttpRequest))\ + .AndReturn([]) + api.sahara.nodegroup_template_get(IsA(http.HttpRequest), + ngt.id) \ + .AndReturn(ngt) + api.sahara.plugin_get_version_details(IsA(http.HttpRequest), + ngt.plugin_name, + ngt.hadoop_version) \ + .MultipleTimes().AndReturn(configs) + dash_api.network.floating_ip_pools_list(IsA(http.HttpRequest)) \ + .AndReturn([]) + dash_api.network.security_group_list(IsA(http.HttpRequest)) \ + .AndReturn([]) + + self.mox.ReplayAll() + + url = reverse( + 'horizon:project:data_processing.nodegroup_templates:copy', + args=[ngt.id]) + res = self.client.get(url) + + return ngt, configs, res + + @test.create_stubs({api.sahara: ('nodegroup_template_list',)}) + def test_index(self): + api.sahara.nodegroup_template_list(IsA(http.HttpRequest), {}) \ + .AndReturn(self.nodegroup_templates.list()) + self.mox.ReplayAll() + res = self.client.get(INDEX_URL) + self.assertTemplateUsed(res, + 'project/data_processing.nodegroup_templates/' + 'nodegroup_templates.html') + self.assertContains(res, 'Node Group Templates') + self.assertContains(res, 'Name') + self.assertContains(res, 'Plugin') + + @test.create_stubs({api.sahara: ('nodegroup_template_get',), + dash_api.nova: ('flavor_get',)}) + def test_details(self): + flavor = self.flavors.first() + ngt = self.nodegroup_templates.first() + dash_api.nova.flavor_get(IsA(http.HttpRequest), flavor.id) \ + .AndReturn(flavor) + api.sahara.nodegroup_template_get(IsA(http.HttpRequest), + IsA(six.text_type)) \ + .MultipleTimes().AndReturn(ngt) + self.mox.ReplayAll() + res = self.client.get(DETAILS_URL) + self.assertTemplateUsed(res, 'horizon/common/_detail.html') + self.assertContains(res, 'sample-template') + + @test.create_stubs({api.sahara: ('nodegroup_template_list', + 'nodegroup_template_delete')}) + def test_delete(self): + ngt = self.nodegroup_templates.first() + api.sahara.nodegroup_template_list(IsA(http.HttpRequest), {}) \ + .AndReturn(self.nodegroup_templates.list()) + api.sahara.nodegroup_template_delete(IsA(http.HttpRequest), ngt.id) + self.mox.ReplayAll() + + form_data = {'action': 'nodegroup_templates__delete__%s' % ngt.id} + res = self.client.post(INDEX_URL, form_data) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) + + @test.create_stubs({api.sahara: ('nodegroup_template_get', + 'plugin_get_version_details'), + dash_api.network: ('floating_ip_pools_list', + 'security_group_list'), + dash_api.cinder: ('extension_supported', + 'availability_zone_list', + 'volume_type_list')}) + def test_copy(self): + ngt, configs, res = self._setup_copy_test() + workflow = res.context['workflow'] + step = workflow.get_step("generalconfigaction") + self.assertEqual(step.action['nodegroup_name'].field.initial, + ngt.name + "-copy") + + @test.create_stubs({api.sahara: ('client', + 'nodegroup_template_create', + 'plugin_get_version_details'), + dash_api.network: ('floating_ip_pools_list', + 'security_group_list'), + dash_api.nova: ('flavor_list',), + dash_api.cinder: ('extension_supported', + 'availability_zone_list', + 'volume_type_list')}) + def test_create(self): + flavor = self.flavors.first() + ngt = self.nodegroup_templates.first() + configs = self.plugins_configs.first() + new_name = ngt.name + '-new' + self.mox.StubOutWithMock( + workflow_helpers, 'parse_configs_from_context') + + dash_api.cinder.extension_supported(IsA(http.HttpRequest), + 'AvailabilityZones') \ + .AndReturn(True) + dash_api.cinder.availability_zone_list(IsA(http.HttpRequest))\ + .AndReturn(self.availability_zones.list()) + dash_api.cinder.volume_type_list(IsA(http.HttpRequest))\ + .AndReturn([]) + dash_api.nova.flavor_list(IsA(http.HttpRequest)).AndReturn([flavor]) + api.sahara.plugin_get_version_details(IsA(http.HttpRequest), + ngt.plugin_name, + ngt.hadoop_version) \ + .MultipleTimes().AndReturn(configs) + dash_api.network.floating_ip_pools_list(IsA(http.HttpRequest)) \ + .AndReturn([]) + dash_api.network.security_group_list(IsA(http.HttpRequest)) \ + .AndReturn([]) + workflow_helpers.parse_configs_from_context( + IgnoreArg(), IgnoreArg()).AndReturn({}) + api.sahara.nodegroup_template_create( + IsA(http.HttpRequest), + **{'name': new_name, + 'plugin_name': ngt.plugin_name, + 'hadoop_version': ngt.hadoop_version, + 'description': ngt.description, + 'flavor_id': flavor.id, + 'volumes_per_node': None, + 'volumes_size': None, + 'volume_type': None, + 'volume_local_to_instance': False, + 'volumes_availability_zone': None, + 'node_processes': ['namenode'], + 'node_configs': {}, + 'floating_ip_pool': None, + 'security_groups': [], + 'auto_security_group': True, + 'availability_zone': None, + 'is_proxy_gateway': False, + 'use_autoconfig': True}) \ + .AndReturn(True) + + self.mox.ReplayAll() + + res = self.client.post( + CREATE_URL, + {'nodegroup_name': new_name, + 'plugin_name': ngt.plugin_name, + ngt.plugin_name + '_version': '1.2.1', + 'hadoop_version': ngt.hadoop_version, + 'description': ngt.description, + 'flavor': flavor.id, + 'availability_zone': None, + 'storage': 'ephemeral_drive', + 'volumes_per_node': 0, + 'volumes_size': 0, + 'volume_type': None, + 'volume_local_to_instance': False, + 'volumes_availability_zone': None, + 'floating_ip_pool': None, + 'security_autogroup': True, + 'processes': 'HDFS:namenode', + 'use_autoconfig': True}) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) + + @test.create_stubs({api.sahara: ('client', + 'nodegroup_template_create', + 'nodegroup_template_update', + 'nodegroup_template_get', + 'plugin_get_version_details'), + dash_api.network: ('floating_ip_pools_list', + 'security_group_list'), + dash_api.nova: ('flavor_list',), + dash_api.cinder: ('extension_supported', + 'availability_zone_list', + 'volume_type_list')}) + def test_update(self): + flavor = self.flavors.first() + ngt = self.nodegroup_templates.first() + configs = self.plugins_configs.first() + new_name = ngt.name + '-updated' + UPDATE_URL = reverse( + 'horizon:project:data_processing.nodegroup_templates:edit', + kwargs={'template_id': ngt.id}) + self.mox.StubOutWithMock( + workflow_helpers, 'parse_configs_from_context') + + dash_api.cinder.extension_supported(IsA(http.HttpRequest), + 'AvailabilityZones') \ + .AndReturn(True) + dash_api.cinder.availability_zone_list(IsA(http.HttpRequest)) \ + .AndReturn(self.availability_zones.list()) + dash_api.cinder.volume_type_list(IsA(http.HttpRequest))\ + .AndReturn([]) + dash_api.nova.flavor_list(IsA(http.HttpRequest)).AndReturn([flavor]) + api.sahara.plugin_get_version_details(IsA(http.HttpRequest), + ngt.plugin_name, + ngt.hadoop_version) \ + .MultipleTimes().AndReturn(configs) + dash_api.network.floating_ip_pools_list(IsA(http.HttpRequest)) \ + .AndReturn([]) + dash_api.network.security_group_list(IsA(http.HttpRequest)) \ + .AndReturn([]) + workflow_helpers.parse_configs_from_context( + IgnoreArg(), IgnoreArg()).AndReturn({}) + api.sahara.nodegroup_template_get(IsA(http.HttpRequest), + ngt.id) \ + .AndReturn(ngt) + api.sahara.nodegroup_template_update( + request=IsA(http.HttpRequest), + ngt_id=ngt.id, + name=new_name, + plugin_name=ngt.plugin_name, + hadoop_version=ngt.hadoop_version, + flavor_id=flavor.id, + description=ngt.description, + volumes_per_node=None, + volumes_size=None, + volume_type=None, + volume_local_to_instance=False, + volumes_availability_zone=None, + node_processes=['namenode'], + node_configs={}, + floating_ip_pool=None, + security_groups=[], + auto_security_group=True, + availability_zone=None, + use_autoconfig=True, + is_proxy_gateway=False).AndReturn(True) + + self.mox.ReplayAll() + + res = self.client.post( + UPDATE_URL, + {'ng_id': ngt.id, + 'nodegroup_name': new_name, + 'plugin_name': ngt.plugin_name, + ngt.plugin_name + '_version': '1.2.1', + 'hadoop_version': ngt.hadoop_version, + 'description': ngt.description, + 'flavor': flavor.id, + 'availability_zone': None, + 'storage': 'ephemeral_drive', + 'volumes_per_node': 0, + 'volumes_size': 0, + 'volume_type': None, + 'volume_local_to_instance': False, + 'volumes_availability_zone': None, + 'floating_ip_pool': None, + 'is_proxy_gateway': False, + 'security_autogroup': True, + 'processes': 'HDFS:namenode', + 'use_autoconfig': True}) + + self.assertNoFormErrors(res) + self.assertRedirectsNoFollow(res, INDEX_URL) + self.assertMessageCount(success=1) + + @test.create_stubs({api.sahara: ('nodegroup_template_get', + 'plugin_get_version_details'), + dash_api.network: ('floating_ip_pools_list', + 'security_group_list'), + dash_api.cinder: ('extension_supported', + 'availability_zone_list', + 'volume_type_list')}) + def test_workflow_steps(self): + # since the copy workflow is the child of create workflow + # it's better to test create workflow through copy workflow + ngt, configs, res = self._setup_copy_test() + workflow = res.context['workflow'] + expected_instances = [ + create_workflow.GeneralConfig, + create_workflow.SelectNodeProcesses, + create_workflow.SecurityConfig + ] + for expected, observed in zip(expected_instances, workflow.steps): + self.assertIsInstance(observed, expected) diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/urls.py b/sahara_dashboard/content/data_processing/nodegroup_templates/urls.py new file mode 100644 index 0000000..07229d5 --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/urls.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from django.conf.urls import patterns +from django.conf.urls import url + +import sahara_dashboard.content. \ + data_processing.nodegroup_templates.views as views + + +urlpatterns = patterns('sahara.nodegroup_templates.views', + url(r'^$', views.NodegroupTemplatesView.as_view(), + name='index'), + url(r'^nodegroup-templates$', + views.NodegroupTemplatesView.as_view(), + name='nodegroup-templates'), + url(r'^create-nodegroup-template$', + views.CreateNodegroupTemplateView.as_view(), + name='create-nodegroup-template'), + url(r'^configure-nodegroup-template$', + views.ConfigureNodegroupTemplateView.as_view(), + name='configure-nodegroup-template'), + url(r'^(?P[^/]+)$', + views.NodegroupTemplateDetailsView.as_view(), + name='details'), + url(r'^(?P[^/]+)/copy$', + views.CopyNodegroupTemplateView.as_view(), + name='copy'), + url(r'^(?P[^/]+)/edit$', + views.EditNodegroupTemplateView.as_view(), + name='edit') + ) diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/views.py b/sahara_dashboard/content/data_processing/nodegroup_templates/views.py new file mode 100644 index 0000000..67150a3 --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/views.py @@ -0,0 +1,145 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import tables +from horizon import tabs +from horizon.utils import memoized +from horizon.utils.urlresolvers import reverse # noqa +from horizon import workflows + +from sahara_dashboard.api import sahara as saharaclient + +import sahara_dashboard.content.data_processing. \ + nodegroup_templates.tables as _tables +import sahara_dashboard.content.data_processing. \ + nodegroup_templates.tabs as _tabs +import sahara_dashboard.content.data_processing. \ + nodegroup_templates.workflows.copy as copy_flow +import sahara_dashboard.content.data_processing. \ + nodegroup_templates.workflows.create as create_flow +import sahara_dashboard.content.data_processing. \ + nodegroup_templates.workflows.edit as edit_flow + +LOG = logging.getLogger(__name__) + + +class NodegroupTemplatesView(tables.DataTableView): + table_class = _tables.NodegroupTemplatesTable + template_name = ( + 'project/data_processing.nodegroup_templates/nodegroup_templates.html') + page_title = _("Node Group Templates") + + def get_data(self): + try: + search_opts = {} + filter = self.get_server_filter_info(self.request) + if filter['value'] and filter['field']: + search_opts = {filter['field']: filter['value']} + data = saharaclient.nodegroup_template_list(self.request, + search_opts) + except Exception: + data = [] + exceptions.handle(self.request, + _("Unable to fetch node group template list.")) + return data + + +class NodegroupTemplateDetailsView(tabs.TabView): + tab_group_class = _tabs.NodegroupTemplateDetailsTabs + template_name = 'horizon/common/_detail.html' + page_title = "{{ template.name|default:template.id }}" + + @memoized.memoized_method + def get_object(self): + ngt_id = self.kwargs["template_id"] + try: + return saharaclient.nodegroup_template_get(self.request, ngt_id) + except Exception: + msg = _('Unable to retrieve details for ' + 'node group template "%s".') % ngt_id + redirect = reverse("horizon:project:data_processing." + "nodegroup_templates:nodegroup-templates") + exceptions.handle(self.request, msg, redirect=redirect) + + def get_context_data(self, **kwargs): + context = super(NodegroupTemplateDetailsView, self)\ + .get_context_data(**kwargs) + context['template'] = self.get_object() + return context + + +class CreateNodegroupTemplateView(workflows.WorkflowView): + workflow_class = create_flow.CreateNodegroupTemplate + success_url = ( + "horizon:project:data_processing.nodegroup_templates:" + "create-nodegroup-template") + classes = ("ajax-modal",) + template_name = "project/data_processing.nodegroup_templates/create.html" + page_title = _("Create Node Group Template") + + +class ConfigureNodegroupTemplateView(workflows.WorkflowView): + workflow_class = create_flow.ConfigureNodegroupTemplate + success_url = "horizon:project:data_processing.nodegroup_templates" + template_name = ( + "project/data_processing.nodegroup_templates/configure.html") + page_title = _("Create Node Group Template") + + def get_initial(self): + initial = super(ConfigureNodegroupTemplateView, self).get_initial() + initial.update(self.kwargs) + return initial + + +class CopyNodegroupTemplateView(workflows.WorkflowView): + workflow_class = copy_flow.CopyNodegroupTemplate + success_url = "horizon:project:data_processing.nodegroup_templates" + template_name = ( + "project/data_processing.nodegroup_templates/configure.html") + + def get_context_data(self, **kwargs): + context = super(CopyNodegroupTemplateView, self)\ + .get_context_data(**kwargs) + + context["template_id"] = kwargs["template_id"] + return context + + def get_object(self, *args, **kwargs): + if not hasattr(self, "_object"): + template_id = self.kwargs['template_id'] + try: + template = saharaclient.nodegroup_template_get(self.request, + template_id) + except Exception: + template = None + exceptions.handle(self.request, + _("Unable to fetch template object.")) + self._object = template + return self._object + + def get_initial(self): + initial = super(CopyNodegroupTemplateView, self).get_initial() + initial['template_id'] = self.kwargs['template_id'] + return initial + + +class EditNodegroupTemplateView(CopyNodegroupTemplateView): + workflow_class = edit_flow.EditNodegroupTemplate + success_url = "horizon:project:data_processing.nodegroup_templates" + template_name = ( + "project/data_processing.nodegroup_templates/configure.html") diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/workflows/__init__.py b/sahara_dashboard/content/data_processing/nodegroup_templates/workflows/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/workflows/copy.py b/sahara_dashboard/content/data_processing/nodegroup_templates/workflows/copy.py new file mode 100644 index 0000000..635475c --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/workflows/copy.py @@ -0,0 +1,117 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions + +from sahara_dashboard.api import sahara as saharaclient + +import sahara_dashboard.content.data_processing. \ + nodegroup_templates.workflows.create as create_flow + +LOG = logging.getLogger(__name__) + + +class CopyNodegroupTemplate(create_flow.ConfigureNodegroupTemplate): + success_message = _("Node Group Template copy %s created") + + def __init__(self, request, context_seed, entry_point, *args, **kwargs): + self.template_id = context_seed["template_id"] + self.template = saharaclient.nodegroup_template_get(request, + self.template_id) + self._set_configs_to_copy(self.template.node_configs) + + plugin = self.template.plugin_name + hadoop_version = self.template.hadoop_version + + request.GET = request.GET.copy() + request.GET.update( + {"plugin_name": plugin, "hadoop_version": hadoop_version}) + + super(CopyNodegroupTemplate, self).__init__(request, context_seed, + entry_point, *args, + **kwargs) + + g_fields = None + snp_fields = None + s_fields = None + for step in self.steps: + if isinstance(step, create_flow.GeneralConfig): + g_fields = step.action.fields + if isinstance(step, create_flow.SecurityConfig): + s_fields = step.action.fields + if isinstance(step, create_flow.SelectNodeProcesses): + snp_fields = step.action.fields + + g_fields["nodegroup_name"].initial = self.template.name + "-copy" + g_fields["description"].initial = self.template.description + g_fields["flavor"].initial = self.template.flavor_id + + if hasattr(self.template, "availability_zone"): + g_fields["availability_zone"].initial = ( + self.template.availability_zone) + + if hasattr(self.template, "volumes_availability_zone"): + g_fields["volumes_availability_zone"].initial = \ + self.template.volumes_availability_zone + + storage = "cinder_volume" if self.template.volumes_per_node > 0 \ + else "ephemeral_drive" + volumes_per_node = self.template.volumes_per_node + volumes_size = self.template.volumes_size + volume_type = self.template.volume_type + volume_local_to_instance = self.template.volume_local_to_instance + g_fields["storage"].initial = storage + g_fields["volumes_per_node"].initial = volumes_per_node + g_fields["volumes_size"].initial = volumes_size + g_fields["volumes_availability_zone"].initial = \ + self.template.volumes_availability_zone + g_fields['volume_type'].initial = volume_type + g_fields['volume_local_to_instance'].initial = volume_local_to_instance + g_fields["proxygateway"].initial = self.template.is_proxy_gateway + g_fields["use_autoconfig"].initial = self.template.use_autoconfig + + if self.template.floating_ip_pool: + g_fields['floating_ip_pool'].initial = ( + self.template.floating_ip_pool) + + s_fields["security_autogroup"].initial = ( + self.template.auto_security_group) + + if self.template.security_groups: + s_fields["security_groups"].initial = dict( + [(sg, sg) for sg in self.template.security_groups]) + + processes_dict = dict() + try: + plugin_details = saharaclient.plugin_get_version_details( + request, + plugin, + hadoop_version) + plugin_node_processes = plugin_details.node_processes + except Exception: + plugin_node_processes = dict() + exceptions.handle(request, + _("Unable to fetch plugin details.")) + for process in self.template.node_processes: + # need to know the service + _service = None + for service, processes in plugin_node_processes.items(): + if process in processes: + _service = service + break + processes_dict["%s:%s" % (_service, process)] = process + snp_fields["processes"].initial = processes_dict diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/workflows/create.py b/sahara_dashboard/content/data_processing/nodegroup_templates/workflows/create.py new file mode 100644 index 0000000..903ad3e --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/workflows/create.py @@ -0,0 +1,520 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools +import logging +import uuid + +from django.utils import encoding +from django.utils import html +from django.utils import safestring +from django.utils.translation import ugettext_lazy as _ + +from saharaclient.api import base as api_base + +from horizon import exceptions +from horizon import forms +from horizon import workflows +from openstack_dashboard.api import cinder +from openstack_dashboard.api import network +from openstack_dashboard.dashboards.project.instances \ + import utils as nova_utils +from openstack_dashboard.dashboards.project.volumes \ + import utils as cinder_utils + +from sahara_dashboard.api import sahara as saharaclient +from sahara_dashboard.content.data_processing.utils \ + import helpers +from sahara_dashboard.content.data_processing.utils \ + import workflow_helpers + +LOG = logging.getLogger(__name__) + + +class GeneralConfigAction(workflows.Action): + nodegroup_name = forms.CharField(label=_("Template Name")) + + description = forms.CharField(label=_("Description"), + required=False, + widget=forms.Textarea(attrs={'rows': 4})) + + flavor = forms.ChoiceField(label=_("OpenStack Flavor")) + + availability_zone = forms.ChoiceField( + label=_("Availability Zone"), + help_text=_("Launch instances in this availability zone."), + required=False, + widget=forms.Select(attrs={"class": "availability_zone_field"}) + ) + + storage = forms.ChoiceField( + label=_("Storage location"), + help_text=_("Choose a storage location"), + choices=[("ephemeral_drive", "Ephemeral Drive"), + ("cinder_volume", "Cinder Volume")], + widget=forms.Select(attrs={ + "class": "storage_field switchable", + 'data-slug': 'storage_loc' + })) + + volumes_per_node = forms.IntegerField( + label=_("Volumes per node"), + required=False, + initial=1, + widget=forms.TextInput(attrs={ + "class": "volume_per_node_field switched", + "data-switch-on": "storage_loc", + "data-storage_loc-cinder_volume": _('Volumes per node') + }) + ) + + volumes_size = forms.IntegerField( + label=_("Volumes size (GB)"), + required=False, + initial=10, + widget=forms.TextInput(attrs={ + "class": "volume_size_field switched", + "data-switch-on": "storage_loc", + "data-storage_loc-cinder_volume": _('Volumes size (GB)') + }) + ) + + volume_type = forms.ChoiceField( + label=_("Volumes type"), + required=False, + widget=forms.Select(attrs={ + "class": "volume_type_field switched", + "data-switch-on": "storage_loc", + "data-storage_loc-cinder_volume": _('Volumes type') + }) + ) + + volume_local_to_instance = forms.BooleanField( + label=_("Volume local to instance"), + required=False, + help_text=_("Instance and attached volumes will be created on the " + "same physical host"), + widget=forms.CheckboxInput(attrs={ + "class": "volume_local_to_instance_field switched", + "data-switch-on": "storage_loc", + "data-storage_loc-cinder_volume": _('Volume local to instance') + }) + ) + + volumes_availability_zone = forms.ChoiceField( + label=_("Volumes Availability Zone"), + help_text=_("Create volumes in this availability zone."), + required=False, + widget=forms.Select(attrs={ + "class": "volumes_availability_zone_field switched", + "data-switch-on": "storage_loc", + "data-storage_loc-cinder_volume": _('Volumes Availability Zone') + }) + ) + + hidden_configure_field = forms.CharField( + required=False, + widget=forms.HiddenInput(attrs={"class": "hidden_configure_field"})) + + def __init__(self, request, *args, **kwargs): + super(GeneralConfigAction, self).__init__(request, *args, **kwargs) + + hlps = helpers.Helpers(request) + + plugin, hadoop_version = ( + workflow_helpers.get_plugin_and_hadoop_version(request)) + + if not saharaclient.SAHARA_AUTO_IP_ALLOCATION_ENABLED: + pools = network.floating_ip_pools_list(request) + pool_choices = [(pool.id, pool.name) for pool in pools] + pool_choices.insert(0, (None, "Do not assign floating IPs")) + + self.fields['floating_ip_pool'] = forms.ChoiceField( + label=_("Floating IP Pool"), + choices=pool_choices, + required=False) + + self.fields["use_autoconfig"] = forms.BooleanField( + label=_("Auto-configure"), + help_text=_("If selected, instances of a node group will be " + "automatically configured during cluster " + "creation. Otherwise you should manually specify " + "configuration values."), + required=False, + widget=forms.CheckboxInput(), + initial=True, + ) + + self.fields["proxygateway"] = forms.BooleanField( + label=_("Proxy Gateway"), + widget=forms.CheckboxInput(), + help_text=_("Sahara will use instances of this node group to " + "access other cluster instances."), + required=False) + + self.fields["plugin_name"] = forms.CharField( + widget=forms.HiddenInput(), + initial=plugin + ) + self.fields["hadoop_version"] = forms.CharField( + widget=forms.HiddenInput(), + initial=hadoop_version + ) + node_parameters = hlps.get_general_node_group_configs(plugin, + hadoop_version) + for param in node_parameters: + self.fields[param.name] = workflow_helpers.build_control(param) + + if request.REQUEST.get("guide_template_type"): + self.fields["guide_template_type"] = forms.CharField( + required=False, + widget=forms.HiddenInput(), + initial=request.REQUEST.get("guide_template_type")) + + try: + volume_types = cinder.volume_type_list(request) + except Exception: + exceptions.handle(request, + _("Unable to get volume type list.")) + + self.fields['volume_type'].choices = [(None, _("No volume type"))] + \ + [(type.name, type.name) + for type in volume_types] + + def populate_flavor_choices(self, request, context): + flavors = nova_utils.flavor_list(request) + if flavors: + return nova_utils.sort_flavor_list(request, flavors) + return [] + + def populate_availability_zone_choices(self, request, context): + # The default is None, i.e. not specifying any availability zone + az_list = [(None, _('No availability zone specified'))] + az_list.extend([(az.zoneName, az.zoneName) + for az in nova_utils.availability_zone_list(request) + if az.zoneState['available']]) + return az_list + + def populate_volumes_availability_zone_choices(self, request, context): + az_list = [(None, _('No availability zone specified'))] + az_list.extend([(az.zoneName, az.zoneName) + for az in cinder_utils.availability_zone_list(request) + if az.zoneState['available']]) + return az_list + + def get_help_text(self): + extra = dict() + plugin, hadoop_version = ( + workflow_helpers.get_plugin_and_hadoop_version(self.request)) + extra["plugin_name"] = plugin + extra["hadoop_version"] = hadoop_version + return super(GeneralConfigAction, self).get_help_text(extra) + + class Meta(object): + name = _("Configure Node Group Template") + help_text_template = ( + "project/data_processing.nodegroup_templates" + "/_configure_general_help.html") + + +class SecurityConfigAction(workflows.Action): + def __init__(self, request, *args, **kwargs): + super(SecurityConfigAction, self).__init__(request, *args, **kwargs) + + self.fields["security_autogroup"] = forms.BooleanField( + label=_("Auto Security Group"), + widget=forms.CheckboxInput(), + help_text=_("Create security group for this Node Group."), + required=False, + initial=True) + + try: + groups = network.security_group_list(request) + except Exception: + exceptions.handle(request, + _("Unable to get security group list.")) + raise + + security_group_list = [(sg.id, sg.name) for sg in groups] + self.fields["security_groups"] = forms.MultipleChoiceField( + label=_("Security Groups"), + widget=forms.CheckboxSelectMultiple(), + help_text=_("Launch instances in these security groups."), + choices=security_group_list, + required=False) + + class Meta(object): + name = _("Security") + help_text = _("Control access to instances of the node group.") + + +class CheckboxSelectMultiple(forms.CheckboxSelectMultiple): + def render(self, name, value, attrs=None, choices=()): + if value is None: + value = [] + has_id = attrs and 'id' in attrs + final_attrs = self.build_attrs(attrs, name=name) + output = [] + initial_service = uuid.uuid4() + str_values = set([encoding.force_text(v) for v in value]) + for i, (option_value, option_label) in enumerate( + itertools.chain(self.choices, choices)): + current_service = option_value.split(':')[0] + if current_service != initial_service: + if i > 0: + output.append("") + service_description = _("%s processes: ") % current_service + service_description = html.conditional_escape( + encoding.force_text(service_description)) + output.append( + "".format(service_description)) + initial_service = current_service + output.append(encoding.force_text("
    ")) + if has_id: + final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i)) + label_for = ' for="%s"' % final_attrs['id'] + else: + label_for = '' + + cb = forms.CheckboxInput( + final_attrs, check_test=lambda value: value in str_values) + option_value = encoding.force_text(option_value) + rendered_cb = cb.render(name, option_value) + option_label = html.conditional_escape( + encoding.force_text(option_label)) + output.append( + '
  • {1} {2}
  • '.format( + label_for, rendered_cb, option_label)) + output.append('
') + return safestring.mark_safe('\n'.join(output)) + + +class SelectNodeProcessesAction(workflows.Action): + def __init__(self, request, *args, **kwargs): + super(SelectNodeProcessesAction, self).__init__( + request, *args, **kwargs) + + plugin, hadoop_version = ( + workflow_helpers.get_plugin_and_hadoop_version(request)) + node_processes = {} + try: + version_details = saharaclient.plugin_get_version_details( + request, plugin, hadoop_version) + node_processes = version_details.node_processes + except Exception: + exceptions.handle(request, + _("Unable to generate process choices.")) + process_choices = [] + for service, processes in node_processes.items(): + for process in processes: + choice_label = str(service) + ":" + str(process) + process_choices.append((choice_label, process)) + + self.fields["processes"] = forms.MultipleChoiceField( + label=_("Select Node Group Processes"), + widget=CheckboxSelectMultiple(), + choices=process_choices, + required=True) + + class Meta(object): + name = _("Node Processes") + help_text = _("Select node processes for the node group") + + +class GeneralConfig(workflows.Step): + action_class = GeneralConfigAction + contributes = ("general_nodegroup_name", ) + + def contribute(self, data, context): + for k, v in data.items(): + if "hidden" in k: + continue + context["general_" + k] = v if v != "None" else None + return context + + +class SecurityConfig(workflows.Step): + action_class = SecurityConfigAction + contributes = ("security_autogroup", "security_groups") + + +class SelectNodeProcesses(workflows.Step): + action_class = SelectNodeProcessesAction + + def contribute(self, data, context): + post = self.workflow.request.POST + context['general_processes'] = post.getlist('processes') + return context + + +class ConfigureNodegroupTemplate(workflow_helpers.ServiceParametersWorkflow, + workflow_helpers.StatusFormatMixin): + slug = "configure_nodegroup_template" + name = _("Create Node Group Template") + finalize_button_name = _("Create") + success_message = _("Created Node Group Template %s") + name_property = "general_nodegroup_name" + success_url = "horizon:project:data_processing.nodegroup_templates:index" + default_steps = (GeneralConfig, SelectNodeProcesses, SecurityConfig) + + def __init__(self, request, context_seed, entry_point, *args, **kwargs): + hlps = helpers.Helpers(request) + + plugin, hadoop_version = ( + workflow_helpers.get_plugin_and_hadoop_version(request)) + + general_parameters = hlps.get_general_node_group_configs( + plugin, + hadoop_version) + service_parameters = hlps.get_targeted_node_group_configs( + plugin, + hadoop_version) + + self._populate_tabs(general_parameters, service_parameters) + + super(ConfigureNodegroupTemplate, self).__init__(request, + context_seed, + entry_point, + *args, **kwargs) + + def is_valid(self): + missing = self.depends_on - set(self.context.keys()) + if missing: + raise exceptions.WorkflowValidationError( + "Unable to complete the workflow. The values %s are " + "required but not present." % ", ".join(missing)) + checked_steps = [] + + if "general_processes" in self.context: + checked_steps = self.context["general_processes"] + enabled_services = set([]) + for process_name in checked_steps: + enabled_services.add(str(process_name).split(":")[0]) + + steps_valid = True + for step in self.steps: + process_name = str(getattr(step, "process_name", None)) + if process_name not in enabled_services and \ + not isinstance(step, GeneralConfig): + continue + if not step.action.is_valid(): + steps_valid = False + step.has_errors = True + if not steps_valid: + return steps_valid + return self.validate(self.context) + + def handle(self, request, context): + try: + processes = [] + for service_process in context["general_processes"]: + processes.append(str(service_process).split(":")[1]) + + configs_dict = ( + workflow_helpers.parse_configs_from_context( + context, self.defaults)) + + plugin, hadoop_version = ( + workflow_helpers.get_plugin_and_hadoop_version(request)) + + volumes_per_node = None + volumes_size = None + volumes_availability_zone = None + volume_type = None + volume_local_to_instance = False + + if context["general_storage"] == "cinder_volume": + volumes_per_node = context["general_volumes_per_node"] + volumes_size = context["general_volumes_size"] + volumes_availability_zone = \ + context["general_volumes_availability_zone"] + volume_type = context["general_volume_type"] + volume_local_to_instance = \ + context["general_volume_local_to_instance"] + + ngt = saharaclient.nodegroup_template_create( + request, + name=context["general_nodegroup_name"], + plugin_name=plugin, + hadoop_version=hadoop_version, + description=context["general_description"], + flavor_id=context["general_flavor"], + volumes_per_node=volumes_per_node, + volumes_size=volumes_size, + volumes_availability_zone=volumes_availability_zone, + volume_type=volume_type, + volume_local_to_instance=volume_local_to_instance, + node_processes=processes, + node_configs=configs_dict, + floating_ip_pool=context.get("general_floating_ip_pool"), + security_groups=context["security_groups"], + auto_security_group=context["security_autogroup"], + is_proxy_gateway=context["general_proxygateway"], + availability_zone=context["general_availability_zone"], + use_autoconfig=context['general_use_autoconfig']) + + hlps = helpers.Helpers(request) + if hlps.is_from_guide(): + guide_type = context["general_guide_template_type"] + request.session[guide_type + "_name"] = ( + context["general_nodegroup_name"]) + request.session[guide_type + "_id"] = ngt.id + self.success_url = ( + "horizon:project:data_processing.wizard:cluster_guide") + + return True + except api_base.APIException as e: + self.error_description = str(e) + return False + except Exception: + exceptions.handle(request) + + +class SelectPluginAction(workflows.Action, + workflow_helpers.PluginAndVersionMixin): + hidden_create_field = forms.CharField( + required=False, + widget=forms.HiddenInput(attrs={"class": "hidden_create_field"})) + + def __init__(self, request, *args, **kwargs): + super(SelectPluginAction, self).__init__(request, *args, **kwargs) + + sahara = saharaclient.client(request) + self._generate_plugin_version_fields(sahara) + + class Meta(object): + name = _("Select plugin and hadoop version") + help_text_template = ("project/data_processing.nodegroup_templates" + "/_create_general_help.html") + + +class SelectPlugin(workflows.Step): + action_class = SelectPluginAction + contributes = ("plugin_name", "hadoop_version") + + def contribute(self, data, context): + context = super(SelectPlugin, self).contribute(data, context) + context["plugin_name"] = data.get('plugin_name', None) + context["hadoop_version"] = \ + data.get(context["plugin_name"] + "_version", None) + return context + + +class CreateNodegroupTemplate(workflows.Workflow): + slug = "create_nodegroup_template" + name = _("Create Node Group Template") + finalize_button_name = _("Next") + success_message = _("Created") + failure_message = _("Could not create") + success_url = "horizon:project:data_processing.nodegroup_templates:index" + default_steps = (SelectPlugin,) diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/workflows/edit.py b/sahara_dashboard/content/data_processing/nodegroup_templates/workflows/edit.py new file mode 100644 index 0000000..2cebbcc --- /dev/null +++ b/sahara_dashboard/content/data_processing/nodegroup_templates/workflows/edit.py @@ -0,0 +1,109 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ +from saharaclient.api import base as api_base + +from horizon import exceptions +from horizon import forms + +from sahara_dashboard.api import sahara as saharaclient + +import sahara_dashboard.content.data_processing. \ + nodegroup_templates.workflows.create as create_flow +import sahara_dashboard.content.data_processing. \ + nodegroup_templates.workflows.copy as copy_flow +from sahara_dashboard.content.data_processing.utils \ + import workflow_helpers + +LOG = logging.getLogger(__name__) + + +class EditNodegroupTemplate(copy_flow.CopyNodegroupTemplate): + success_message = _("Node Group Template %s updated") + finalize_button_name = _("Update") + name = _("Edit Node Group Template") + + def __init__(self, request, context_seed, entry_point, *args, **kwargs): + super(EditNodegroupTemplate, self).__init__(request, context_seed, + entry_point, *args, + **kwargs) + + for step in self.steps: + if not isinstance(step, create_flow.GeneralConfig): + continue + fields = step.action.fields + fields["nodegroup_name"].initial = self.template.name + + fields["template_id"] = forms.CharField( + widget=forms.HiddenInput(), + initial=self.template_id + ) + + def handle(self, request, context): + try: + processes = [] + for service_process in context["general_processes"]: + processes.append(str(service_process).split(":")[1]) + + configs_dict = ( + workflow_helpers.parse_configs_from_context( + context, self.defaults)) + + plugin, hadoop_version = ( + workflow_helpers.get_plugin_and_hadoop_version(request)) + + volumes_per_node = None + volumes_size = None + volumes_availability_zone = None + volume_type = None + volume_local_to_instance = False + + if context["general_storage"] == "cinder_volume": + volumes_per_node = context["general_volumes_per_node"] + volumes_size = context["general_volumes_size"] + volume_type = context["general_volume_type"] + volume_local_to_instance = \ + context["general_volume_local_to_instance"] + volumes_availability_zone = \ + context["general_volumes_availability_zone"] + + saharaclient.nodegroup_template_update( + request=request, + ngt_id=self.template_id, + name=context["general_nodegroup_name"], + plugin_name=plugin, + hadoop_version=hadoop_version, + flavor_id=context["general_flavor"], + description=context["general_description"], + volumes_per_node=volumes_per_node, + volumes_size=volumes_size, + volume_type=volume_type, + volume_local_to_instance=volume_local_to_instance, + volumes_availability_zone=volumes_availability_zone, + node_processes=processes, + node_configs=configs_dict, + floating_ip_pool=context.get("general_floating_ip_pool"), + security_groups=context["security_groups"], + auto_security_group=context["security_autogroup"], + availability_zone=context["general_availability_zone"], + use_autoconfig=context['general_use_autoconfig'], + is_proxy_gateway=context["general_proxygateway"]) + return True + except api_base.APIException as e: + self.error_description = str(e.message) + return False + except Exception: + exceptions.handle(request) diff --git a/sahara_dashboard/content/data_processing/static/dashboard/project/data_processing/data_processing.event_log.js b/sahara_dashboard/content/data_processing/static/dashboard/project/data_processing/data_processing.event_log.js new file mode 100644 index 0000000..fbb3acd --- /dev/null +++ b/sahara_dashboard/content/data_processing/static/dashboard/project/data_processing/data_processing.event_log.js @@ -0,0 +1,145 @@ +horizon.event_log = { + cluster_id: null, + data_update_url: null, + cached_data: null, + modal_step_id: null, + + fetch_update_events: function() { + var url = this.data_update_url + "/events"; + $.get(url).done(function (data) { + horizon.event_log.cached_data = data; + horizon.event_log.update_view(data); + horizon.event_log.schedule_next_update(data); + }).fail(function() { + // Event log is not available for some reason. + horizon.alert("error", gettext("Event log is not available.")); + }); + }, + + update_view: function (data) { + this.update_step_rows(data.provision_steps); + this.update_events_rows(data); + }, + + update_step_rows: function (steps) { + // Clear steps + $("#steps_body").find("tr").remove(); + + $(steps).each(function (i, step) { + horizon.event_log.create_step_row(step); + }); + }, + + create_step_row: function (step) { + var step_row_template = "" + + "" + + "%step_descr%" + + "%started_at%" + + "%duration%" + + "%progress%" + + "%result% " + + "" + + gettext('Show events') + "" + + "" + + ""; + + + var started_at = new Date(step.created_at).toString(); + var progress = "" + step.completed + " / " + step.total; + var description = step.step_type + "
" + step.step_name; + + var row = step_row_template + .replace(/%step_id%/g, step.id) + .replace(/%step_descr%/g, description) + .replace(/%started_at%/g, started_at) + .replace(/%duration%/g, step.duration) + .replace(/%progress%/g, progress) + .replace(/%result%/g, step.result); + + $("#steps_body").append(row); + if (step.successful === true) { + $("#" + step.id + "_show_events_btn").hide(); + } + }, + + update_events_rows: function(data) { + if (!this.modal_step_id) { + return; + } + var current_step = null; + $(data.provision_steps).each(function (i, step) { + if (step.id === horizon.event_log.modal_step_id) { + current_step = step; + } + }); + + var header = current_step.step_type + "
" + current_step.step_name; + $("#events_modal_header").html(header); + + // Clear events + this.clear_events(); + this.clear_modal_status(); + + if (current_step.successful === true) { + this.mark_modal_as_successful(); + return; + } + var events = current_step.events; + $(events).each(function (i, event) { + event.step_name = current_step.step_name; + }); + + $(events).each(function (i, event) { + horizon.event_log.create_event_row(event); + }); + + }, + + clear_events: function() { + $("#events_body").find("tr").remove(); + }, + + clear_modal_status: function() { + $("#modal_status_marker").text(""); + }, + + mark_modal_as_successful: function() { + $("#modal_status_marker").text(gettext( + "The step has completed successfully. No events to display.")); + }, + + create_event_row: function(event) { + var step_row_template = "" + + "" + + "%node_group_name%" + + "%instance%" + + "%time%" + + "%info%" + + "%result%" + + ""; + + var event_time = new Date(event.created_at).toString(); + + var row = step_row_template + .replace(/%event_id%/g, event.id) + .replace(/%node_group_name%/g, event.node_group_name) + .replace(/%instance%/g, event.instance_name) + .replace(/%time%/g, event_time) + .replace(/%info%/g, event.event_info) + .replace(/%result%/g, event.result); + + $("#events_body").append(row); + }, + + schedule_next_update: function(data) { + // 2-3 sec delay so that if there are multiple tabs polling the backed + // the requests are spread in time + var delay = 2000 + Math.floor((Math.random() * 1000) + 1); + + if (data.need_update) { + setTimeout(function () { + horizon.event_log.fetch_update_events(); + }, delay); + } + } +}; diff --git a/sahara_dashboard/content/data_processing/static/dashboard/project/data_processing/data_processing.job_interface_arguments.js b/sahara_dashboard/content/data_processing/static/dashboard/project/data_processing/data_processing.job_interface_arguments.js new file mode 100644 index 0000000..18b90ac --- /dev/null +++ b/sahara_dashboard/content/data_processing/static/dashboard/project/data_processing/data_processing.job_interface_arguments.js @@ -0,0 +1,175 @@ +horizon.job_interface_arguments = { + + argument_template: '' + + '
' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '
' + + '' + + '' + + '' + + '' + + '' + + '' + + '
' + + '' + + '' + + '' + + '
' + + '' + + '' + + '' + + '' + + '' + + '' + + '
' + + '' + + '' + + '' + + '' + + '' + + '' + + '
' + + '' + + '' + + '' + + '
' + + '' + + '' + + '' + + '
' + + '' + + '' + + '' + + '' + + '' + + '' + + '
' + + '
', + + job_interface: null, + argument_ids: null, + value_type: null, + add_argument_button: null, + value_type_default: null, + + current_value_type: function() { + return this.value_type.find("option:selected").html(); + }, + + mark_argument_element_as_wrong: function(id) { + $("#" + id).addClass("error"); + }, + + get_next_argument_id: function() { + var max = -1; + $(".argument-form").each(function () { + max = Math.max(max, parseInt($(this).attr("id_attr"))); + }); + return max + 1; + }, + + set_argument_ids: function() { + var ids = []; + $(".argument-form").each(function () { + var id = parseInt($(this).attr("id_attr")); + if (!isNaN(id)) { + ids.push(id); + } + }); + this.argument_ids.val(JSON.stringify(ids)); + }, + + add_argument_node: function(id, name, description, mapping_type, location, value_type, required, default_value) { + var tmp = this.argument_template. + replace(/\$id/g, id). + replace(/\$name/g, name). + replace(/\$description/g, description). + replace(/\$mapping_type/g, mapping_type). + replace(/\$location/g, location). + replace(/\$value_type/g, value_type). + replace(/\$required/g, required). + replace(/\$default_value/g, default_value); + this.job_interface.find("div:last").after(tmp); + this.job_interface.show(); + this.set_argument_ids(); + }, + + add_interface_argument: function() { + var value_type = this.current_value_type(); + if (value_type === this.value_type_default) { + return; + } + this.add_argument_node(this.get_next_argument_id(), "", "", "args", "", value_type, true, ""); + $(".count-field").change(); + }, + + delete_interface_argument: function(el) { + $(el).closest("div").remove(); + var id = this.get_next_argument_id(); + if (id === 0) { + this.job_interface.hide(); + } + this.set_argument_ids(); + }, + + init_arguments: function() { + // This line enables tooltips on this tab to properly display their help text. + $("body").tooltip({selector: ".help-icon"}); + this.job_interface = $("#job_interface_arguments"); + this.argument_ids = $("#argument_ids"); + this.value_type = $("#value_type"); + this.add_argument_button = $("#add_argument_button"); + this.value_type_default = this.current_value_type(); + this.value_type.change(function () { + if (horizon.job_interface_arguments.current_value_type() === this.value_type_default) { + horizon.job_interface_arguments.add_argument_button.addClass("disabled"); + } else { + horizon.job_interface_arguments.add_argument_button.removeClass("disabled"); + } + }); + this.job_interface.hide(); + } +}; diff --git a/sahara_dashboard/content/data_processing/utils/__init__.py b/sahara_dashboard/content/data_processing/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/utils/anti_affinity.py b/sahara_dashboard/content/data_processing/utils/anti_affinity.py new file mode 100644 index 0000000..699cddb --- /dev/null +++ b/sahara_dashboard/content/data_processing/utils/anti_affinity.py @@ -0,0 +1,67 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import forms + +from sahara_dashboard.api import sahara as saharaclient +import sahara_dashboard.content.data_processing. \ + utils.workflow_helpers as whelpers + + +LOG = logging.getLogger(__name__) + + +def anti_affinity_field(): + return forms.MultipleChoiceField( + label=_("Use anti-affinity groups for: "), + required=False, + help_text=_("Use anti-affinity groups for processes"), + widget=forms.CheckboxSelectMultiple() + ) + + +def populate_anti_affinity_choices(self, request, context): + try: + sahara = saharaclient.client(request) + plugin, version = whelpers.get_plugin_and_hadoop_version(request) + + version_details = sahara.plugins.get_version_details(plugin, version) + process_choices = [] + for processes in version_details.node_processes.values(): + for process in processes: + process_choices.append((process, process)) + + cluster_template_id = request.REQUEST.get("cluster_template_id", None) + if cluster_template_id is None: + selected_processes = request.REQUEST.get("aa_groups", []) + else: + cluster_template = ( + sahara.cluster_templates.get(cluster_template_id)) + selected_processes = cluster_template.anti_affinity + + checked_dict = dict() + + for process in selected_processes: + checked_dict[process] = process + + self.fields['anti_affinity'].initial = checked_dict + except Exception: + process_choices = [] + exceptions.handle(request, + _("Unable to populate anti-affinity processes.")) + return process_choices diff --git a/sahara_dashboard/content/data_processing/utils/helpers.py b/sahara_dashboard/content/data_processing/utils/helpers.py new file mode 100644 index 0000000..501e560 --- /dev/null +++ b/sahara_dashboard/content/data_processing/utils/helpers.py @@ -0,0 +1,134 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +import sahara_dashboard.content.data_processing. \ + utils.workflow_helpers as work_helpers + +from sahara_dashboard.api import sahara as saharaclient + + +class Helpers(object): + def __init__(self, request): + self.request = request + + def _get_node_processes(self, plugin): + processes = [] + for proc_lst in plugin.node_processes.values(): + processes += proc_lst + + return [(proc_name, proc_name) for proc_name in processes] + + def get_node_processes(self, plugin_name, hadoop_version): + plugin = saharaclient.plugin_get_version_details(self.request, + plugin_name, + hadoop_version) + + return self._get_node_processes(plugin) + + def _extract_parameters(self, configs, scope, applicable_target): + parameters = [] + for config in configs: + if (config['scope'] == scope and + config['applicable_target'] == applicable_target): + + parameters.append(work_helpers.Parameter(config)) + + return parameters + + def get_cluster_general_configs(self, plugin_name, hadoop_version): + plugin = saharaclient.plugin_get_version_details(self.request, + plugin_name, + hadoop_version) + + return self._extract_parameters(plugin.configs, 'cluster', "general") + + def get_general_node_group_configs(self, plugin_name, hadoop_version): + plugin = saharaclient.plugin_get_version_details(self.request, + plugin_name, + hadoop_version) + + return self._extract_parameters(plugin.configs, 'node', 'general') + + def get_targeted_node_group_configs(self, plugin_name, hadoop_version): + plugin = saharaclient.plugin_get_version_details(self.request, + plugin_name, + hadoop_version) + + parameters = {} + + for service in plugin.node_processes.keys(): + parameters[service] = self._extract_parameters(plugin.configs, + 'node', service) + + return parameters + + def get_targeted_cluster_configs(self, plugin_name, hadoop_version): + plugin = saharaclient.plugin_get_version_details(self.request, + plugin_name, + hadoop_version) + + parameters = {} + + for service in plugin.node_processes.keys(): + parameters[service] = self._extract_parameters(plugin.configs, + 'cluster', service) + + return parameters + + def is_from_guide(self): + referer = self.request.environ.get("HTTP_REFERER") + if referer and "/wizard/" in referer: + return True + return False + + def reset_guide(self): + try: + self.request.session.update( + {"plugin_name": None, + "plugin_version": None, + "master_name": None, + "master_id": None, + "worker_name": None, + "worker_id": None, + "guide_cluster_template_name": None}) + except Exception: + return False + return True + + def reset_job_guide(self): + try: + self.request.session.update( + {"guide_job_type": None, + "guide_job_name": None, + "guide_job_id": None, + "guide_datasource_id": None, + "guide_datasource_name": None, }) + except Exception: + return False + return True + +# Map needed because switchable fields need lower case +# and our server is expecting upper case. We will be +# using the 0 index as the display name and the 1 index +# as the value to pass to the server. +JOB_TYPE_MAP = {"pig": [_("Pig"), "Pig"], + "hive": [_("Hive"), "Hive"], + "spark": [_("Spark"), "Spark"], + "storm": [_("Storm"), "Storm"], + "mapreduce": [_("MapReduce"), "MapReduce"], + "mapreduce.streaming": [_("Streaming MapReduce"), + "MapReduce.Streaming"], + "java": [_("Java"), "Java"], + "shell": [_("Shell"), "Shell"]} diff --git a/sahara_dashboard/content/data_processing/utils/neutron_support.py b/sahara_dashboard/content/data_processing/utils/neutron_support.py new file mode 100644 index 0000000..49ba555 --- /dev/null +++ b/sahara_dashboard/content/data_processing/utils/neutron_support.py @@ -0,0 +1,32 @@ +# Copyright (c) 2013 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from openstack_dashboard.api import neutron + + +def populate_neutron_management_network_choices(self, request, context): + try: + tenant_id = self.request.user.tenant_id + networks = neutron.network_list_for_tenant(request, tenant_id) + network_list = [(network.id, network.name_or_id) + for network in networks] + except Exception: + network_list = [] + exceptions.handle(request, + _('Unable to retrieve networks.')) + return network_list diff --git a/sahara_dashboard/content/data_processing/utils/workflow_helpers.py b/sahara_dashboard/content/data_processing/utils/workflow_helpers.py new file mode 100644 index 0000000..82b4be3 --- /dev/null +++ b/sahara_dashboard/content/data_processing/utils/workflow_helpers.py @@ -0,0 +1,326 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging + +from django.utils.translation import ugettext_lazy as _ + +import six + +from horizon import forms +from horizon import workflows + +from openstack_dashboard.api import network + +LOG = logging.getLogger(__name__) + + +class Parameter(object): + def __init__(self, config): + self.name = config['name'] + self.description = config.get('description', "No description") + self.required = not config['is_optional'] + self.default_value = config.get('default_value', None) + self.initial_value = self.default_value + self.param_type = config['config_type'] + self.priority = int(config.get('priority', 2)) + self.choices = config.get('config_values', None) + + +def build_control(parameter): + attrs = {"priority": parameter.priority, + "placeholder": parameter.default_value} + if parameter.param_type == "string": + return forms.CharField( + widget=forms.TextInput(attrs=attrs), + label=parameter.name, + required=(parameter.required and + parameter.default_value is None), + help_text=parameter.description, + initial=parameter.initial_value) + + if parameter.param_type == "int": + return forms.IntegerField( + widget=forms.TextInput(attrs=attrs), + label=parameter.name, + required=parameter.required, + help_text=parameter.description, + initial=parameter.initial_value) + + elif parameter.param_type == "bool": + return forms.BooleanField( + widget=forms.CheckboxInput(attrs=attrs), + label=parameter.name, + required=False, + initial=parameter.initial_value, + help_text=parameter.description) + + elif parameter.param_type == "dropdown": + return forms.ChoiceField( + widget=forms.Select(attrs=attrs), + label=parameter.name, + required=parameter.required, + choices=parameter.choices, + help_text=parameter.description) + + +def _create_step_action(name, title, parameters, advanced_fields=None, + service=None): + class_fields = {} + contributes_field = () + for param in parameters: + field_name = "CONF:" + service + ":" + param.name + contributes_field += (field_name,) + class_fields[field_name] = build_control(param) + + if advanced_fields is not None: + for ad_field_name, ad_field_value in advanced_fields: + class_fields[ad_field_name] = ad_field_value + + action_meta = type('Meta', (object, ), + dict(help_text_template=("project" + "/data_processing." + "nodegroup_templates/" + "_fields_help.html"))) + + class_fields['Meta'] = action_meta + action = type(str(title), + (workflows.Action,), + class_fields) + + step_meta = type('Meta', (object,), dict(name=title)) + step = type(str(name), + (workflows.Step, ), + dict(name=name, + process_name=name, + action_class=action, + contributes=contributes_field, + Meta=step_meta)) + + return step + + +def build_node_group_fields(action, name, template, count, serialized=None): + action.fields[name] = forms.CharField( + label=_("Name"), + widget=forms.TextInput()) + + action.fields[template] = forms.CharField( + label=_("Node group cluster"), + widget=forms.HiddenInput()) + + action.fields[count] = forms.IntegerField( + label=_("Count"), + min_value=0, + widget=forms.HiddenInput()) + action.fields[serialized] = forms.CharField( + widget=forms.HiddenInput()) + + +def build_interface_argument_fields( + action, name, description, mapping_type, location, value_type, + required, default_value): + action.fields[name] = forms.CharField( + label=_("Name"), + widget=forms.TextInput(), + required=True) + action.fields[description] = forms.CharField( + label=_("Description"), + widget=forms.TextInput(), + required=False) + action.fields[mapping_type] = forms.ChoiceField( + label=_("Mapping Type"), + widget=forms.Select(), + required=True, + choices=[("args", _("Positional Argument")), + ("configs", _("Configuration Value")), + ("params", _("Named Parameter"))]) + action.fields[location] = forms.CharField( + label=_("Location"), + widget=forms.TextInput(), + required=True) + action.fields[value_type] = forms.ChoiceField( + label=_("Value Type"), + widget=forms.Select(), + required=True, + choices=[("string", _("String")), + ("number", _("Number")), + ("data_source", _("Data Source"))]) + action.fields[required] = forms.BooleanField( + widget=forms.CheckboxInput(), + label=_("Required"), + required=False, + initial=True) + action.fields[default_value] = forms.CharField( + label=_("Default Value"), + widget=forms.TextInput(), + required=False) + + +def parse_configs_from_context(context, defaults): + configs_dict = dict() + for key, val in context.items(): + if str(key).startswith("CONF"): + key_split = str(key).split(":") + service = key_split[1] + config = key_split[2] + if service not in configs_dict: + configs_dict[service] = dict() + if val is None: + continue + if six.text_type(defaults[service][config]) == six.text_type(val): + continue + configs_dict[service][config] = val + return configs_dict + + +def get_security_groups(request, security_group_ids): + security_groups = [] + for group in security_group_ids or []: + try: + security_groups.append(network.security_group_get( + request, group)) + except Exception: + LOG.info(_('Unable to retrieve security group %(group)s.') % + {'group': group}) + security_groups.append({'name': group}) + + return security_groups + + +def get_plugin_and_hadoop_version(request): + plugin_name = None + hadoop_version = None + if request.REQUEST.get("plugin_name"): + plugin_name = request.REQUEST["plugin_name"] + hadoop_version = request.REQUEST["hadoop_version"] + return (plugin_name, hadoop_version) + + +def clean_node_group(node_group): + node_group_copy = dict((key, value) + for key, value in node_group.items() if value) + + for key in ["id", "created_at", "updated_at"]: + if key in node_group_copy: + node_group_copy.pop(key) + + return node_group_copy + + +class PluginAndVersionMixin(object): + def _generate_plugin_version_fields(self, sahara): + plugins = sahara.plugins.list() + plugin_choices = [(plugin.name, plugin.title) for plugin in plugins] + + self.fields["plugin_name"] = forms.ChoiceField( + label=_("Plugin Name"), + choices=plugin_choices, + widget=forms.Select(attrs={"class": "plugin_name_choice"})) + + for plugin in plugins: + field_name = plugin.name + "_version" + choice_field = forms.ChoiceField( + label=_("Version"), + choices=[(version, version) for version in plugin.versions], + widget=forms.Select( + attrs={"class": "plugin_version_choice " + + field_name + "_choice"}) + ) + self.fields[field_name] = choice_field + + +class PatchedDynamicWorkflow(workflows.Workflow): + """Overrides Workflow to fix its issues.""" + + def _ensure_dynamic_exist(self): + if not hasattr(self, 'dynamic_steps'): + self.dynamic_steps = list() + + def _register_step(self, step): + # Use that method instead of 'register' to register step. + # Note that a step could be registered in descendant class constructor + # only before this class constructor is invoked. + self._ensure_dynamic_exist() + self.dynamic_steps.append(step) + + def _order_steps(self): + # overrides method of Workflow + # crutch to fix https://bugs.launchpad.net/horizon/+bug/1196717 + # and another not filed issue that dynamic creation of tabs is + # not thread safe + self._ensure_dynamic_exist() + + self._registry = dict([(step, step(self)) + for step in self.dynamic_steps]) + + return list(self.default_steps) + self.dynamic_steps + + +class ServiceParametersWorkflow(PatchedDynamicWorkflow): + """Base class for Workflows having services tabs with parameters.""" + + def _populate_tabs(self, general_parameters, service_parameters): + # Populates tabs for 'general' and service parameters + # Also populates defaults and initial values + self.defaults = dict() + + self._init_step('general', 'General Parameters', general_parameters) + + for service, parameters in service_parameters.items(): + self._init_step(service, service + ' Parameters', parameters) + + def _init_step(self, service, title, parameters): + if not parameters: + return + + self._populate_initial_values(service, parameters) + + step = _create_step_action(service, title=title, parameters=parameters, + service=service) + + self.defaults[service] = dict() + for param in parameters: + self.defaults[service][param.name] = param.default_value + + self._register_step(step) + + def _set_configs_to_copy(self, configs): + self.configs_to_copy = configs + + def _populate_initial_values(self, service, parameters): + if not hasattr(self, 'configs_to_copy'): + return + + configs = self.configs_to_copy + + for param in parameters: + if (service in configs and + param.name in configs[service]): + param.initial_value = configs[service][param.name] + + +class StatusFormatMixin(workflows.Workflow): + def __init__(self, request, context_seed, entry_point, *args, **kwargs): + super(StatusFormatMixin, self).__init__(request, + context_seed, + entry_point, + *args, + **kwargs) + + def format_status_message(self, message): + error_description = getattr(self, 'error_description', None) + + if error_description: + return error_description + else: + return message % self.context[self.name_property] diff --git a/sahara_dashboard/content/data_processing/wizard/__init__.py b/sahara_dashboard/content/data_processing/wizard/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/content/data_processing/wizard/forms.py b/sahara_dashboard/content/data_processing/wizard/forms.py new file mode 100644 index 0000000..a39aa8c --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/forms.py @@ -0,0 +1,121 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django import template +from django.template import defaultfilters +from django.utils.encoding import force_text +from django.utils.translation import ugettext_lazy as _ + +from horizon import exceptions +from horizon import forms +from horizon import messages + +from sahara_dashboard.api import sahara as saharaclient +from sahara_dashboard.content.data_processing.utils \ + import helpers + + +class ChoosePluginForm(forms.SelfHandlingForm): + def __init__(self, request, *args, **kwargs): + super(ChoosePluginForm, self).__init__(request, *args, **kwargs) + self._generate_plugin_version_fields(request) + self.help_text_template = ("project/data_processing.wizard/" + "_plugin_select_help.html") + + def handle(self, request, context): + try: + hlps = helpers.Helpers(request) + hlps.reset_guide() + plugin_name = context["plugin_name"] + request.session["plugin_name"] = plugin_name + request.session["plugin_version"] = ( + context[plugin_name + "_version"]) + messages.success(request, _("Cluster type chosen")) + return True + except Exception: + exceptions.handle(request, + _("Unable to set cluster type")) + return False + + def _generate_plugin_version_fields(self, request): + sahara = saharaclient.client(request) + plugins = sahara.plugins.list() + plugin_choices = [(plugin.name, plugin.title) for plugin in plugins] + + self.fields["plugin_name"] = forms.ChoiceField( + label=_("Plugin Name"), + choices=plugin_choices, + widget=forms.Select(attrs={"class": "switchable", + "data-slug": "plugin"})) + + for plugin in plugins: + field_name = plugin.name + "_version" + choice_field = forms.ChoiceField( + label=_("Version"), + required=False, + choices=[(version, version) for version in plugin.versions], + widget=forms.Select( + attrs={"class": "switched", + "data-switch-on": "plugin", + "data-plugin-" + plugin.name: plugin.title}) + ) + self.fields[field_name] = choice_field + + def get_help_text(self, extra_context=None): + text = "" + extra_context = extra_context or {} + if self.help_text_template: + tmpl = template.loader.get_template(self.help_text_template) + context = template.RequestContext(self.request, extra_context) + text += tmpl.render(context) + else: + text += defaultfilters.linebreaks(force_text(self.help_text)) + return defaultfilters.safe(text) + + class Meta(object): + name = _("Choose plugin type and version") + + +class ChooseJobTypeForm(forms.SelfHandlingForm): + guide_job_type = forms.ChoiceField( + label=_("Job Type"), + widget=forms.Select()) + + def __init__(self, request, *args, **kwargs): + super(ChooseJobTypeForm, self).__init__(request, *args, **kwargs) + self.help_text_template = ("project/data_processing.wizard/" + "_job_type_select_help.html") + + self.fields["guide_job_type"].choices = \ + self.populate_guide_job_type_choices() + + def populate_guide_job_type_choices(self): + choices = [(x, helpers.JOB_TYPE_MAP[x][0]) + for x in helpers.JOB_TYPE_MAP] + return choices + + def handle(self, request, context): + try: + hlps = helpers.Helpers(request) + job_type = context["guide_job_type"] + if force_text(request.session.get("guide_job_type")) != ( + force_text(helpers.JOB_TYPE_MAP[job_type][0])): + hlps.reset_job_guide() + request.session["guide_job_type"] = ( + helpers.JOB_TYPE_MAP[job_type][0]) + messages.success(request, _("Job type chosen")) + return True + except Exception: + exceptions.handle(request, + _("Unable to set job type")) + return False diff --git a/sahara_dashboard/content/data_processing/wizard/panel.py b/sahara_dashboard/content/data_processing/wizard/panel.py new file mode 100644 index 0000000..0267154 --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/panel.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.utils.translation import ugettext_lazy as _ + +import horizon + +from openstack_dashboard.dashboards.project import dashboard + + +class WizardPanel(horizon.Panel): + name = _("Guides") + slug = 'data_processing.wizard' + permissions = (('openstack.services.data-processing', + 'openstack.services.data_processing'),) + + +dashboard.Project.register(WizardPanel) diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_job_type_select.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_job_type_select.html new file mode 100644 index 0000000..e49c35d --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_job_type_select.html @@ -0,0 +1,30 @@ +{% extends "horizon/common/_modal_form.html" %} + + +{% load i18n %} + +{% block form_id %}create-job-binary{% endblock %} +{% block form_action %} + {% url 'horizon:project:data_processing.wizard:job_type_select' %} +{% endblock %} +{% block form_attrs %}enctype="multipart/form-data"{% endblock %} + +{% block modal-header %}{% trans "Choose job type" %}{% endblock %} + +{% block modal-body %} +
+
+ {% include "horizon/common/_form_fields.html" %} +
+
+
+ {{ form.get_help_text }} +
+{% endblock %} + +{% block modal-footer %} + + {% trans "Cancel" %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_job_type_select_help.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_job_type_select_help.html new file mode 100644 index 0000000..b7aa613 --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_job_type_select_help.html @@ -0,0 +1,7 @@ +{% load i18n horizon %} +

+ {% blocktrans %}Select which type of job that you want to run. + This choice will dictate which steps are required to successfully + execute your job. + {% endblocktrans %} +

diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_plugin_select.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_plugin_select.html new file mode 100644 index 0000000..a385de5 --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_plugin_select.html @@ -0,0 +1,30 @@ +{% extends "horizon/common/_modal_form.html" %} + + +{% load i18n %} + +{% block form_id %}create-job-binary{% endblock %} +{% block form_action %} + {% url 'horizon:project:data_processing.wizard:plugin_select' %} +{% endblock %} +{% block form_attrs %}enctype="multipart/form-data"{% endblock %} + +{% block modal-header %}{% trans "Choose plugin and version" %}{% endblock %} + +{% block modal-body %} +
+
+ {% include "horizon/common/_form_fields.html" %} +
+
+
+ {{ form.get_help_text }} +
+{% endblock %} + +{% block modal-footer %} + + {% trans "Cancel" %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_plugin_select_help.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_plugin_select_help.html new file mode 100644 index 0000000..f51043f --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/_plugin_select_help.html @@ -0,0 +1,5 @@ +{% load i18n horizon %} +

+ {% blocktrans %}Select which plugin and version that you + want to use to create your cluster.{% endblocktrans %} +

diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/cluster_guide.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/cluster_guide.html new file mode 100644 index 0000000..a7409c8 --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/cluster_guide.html @@ -0,0 +1,163 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + +
+
    +
  1. +
    +
    {% blocktrans %}The first step is to determine which type of + cluster you want to run. You may have several choices + available depending on the configuration of your system. + Click on "choose plugin" to bring up the list of data + processing plugins. There you will be able to choose the + data processing plugin along with the version number. + Choosing this up front will allow the rest of the cluster + creation steps to focus only on options that are pertinent + to your desired cluster type.{% endblocktrans %} +
    + {% trans "Choose plugin" %} +
    {% trans "Current choice:" %} + {% if request.session.plugin_name and request.session.plugin_version %} + + {% trans "Plugin:" %} + {{ request.session.plugin_name }} + {% trans "Version:" %} + {{ request.session.plugin_version }} + + {% else %} + + {% trans "No plugin chosen" %} + + {% endif %} +
    +
    +
  2. +
    +
  3. +
    +
    {% blocktrans %}Next, you need to define the different + types of machines in your cluster. This is done by + defining a Node Group Template for each type of + machine. A very common case is where you + need to have one or more machines running a "master" + set of processes while another set of machines need + to be running the "worker" processes. Here, + you will define the Node Group Template for your + "master" node(s). + {% endblocktrans %} +
    + +
    {% trans "Current choice:" %} + {% if request.session.master_name %} + + {% trans "Master Node Group Template:" %} + {{ request.session.master_name }} + + {% else %} + + {% trans "No Master Node Group Template Created" %} + + {% endif %} +
    +
    +
  4. +
    +
  5. +
    +
    {% blocktrans %}Repeat the Node Group Template + creation process, but this time you are creating + your "worker" Node Group Template.{% endblocktrans %} +
    + +
    {% trans "Current choice:" %} + {% if request.session.worker_name %} + + {% trans "Worker Node Group Template:" %} + {{ request.session.worker_name }} + + {% else %} + + {% trans "No Worker Node Group Template Created" %} + + {% endif %} +
    +
    +
  6. +
    +
  7. +
    +
    {% blocktrans %}Now you need to set the layout of your + cluster. By + creating a Cluster Template, you will be choosing the + number of instances of each Node Group Template that + will appear in your cluster. Additionally, + you will have a chance to set any cluster-specific + configuration items in the additional tabs on the + create Cluster Template form.{% endblocktrans %} +
    + +
    {% trans "Current choice:" %} + {% if request.session.guide_cluster_template_name %} + + {% trans "Worker Node Group Template:" %} + {{ request.session.guide_cluster_template_name }} + + {% else %} + + {% trans "No Cluster Template Created" %} + + {% endif %} +
    +
    +
  8. +
    +
  9. +
    +
    {% blocktrans %}You are now ready to + launch your cluster. When you click on the link + below, you will need to give your cluster a name, + choose the Cluster Template to use and choose which + image to use to build your instances. After you + click on "Create", your instances will begin to + spawn. Your cluster should be operational in a few + minutes.{% endblocktrans %} +
    + +
    +
  10. +
+ +
+ +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/job_type_select.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/job_type_select.html new file mode 100644 index 0000000..7897a6c --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/job_type_select.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Choose job type" %}{% endblock %} + +{% block main %} + {% include 'project/data_processing.wizard/_job_type_select.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/jobex_guide.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/jobex_guide.html new file mode 100644 index 0000000..70af347 --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/jobex_guide.html @@ -0,0 +1,114 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + +
+
    +
  1. +
    +
    + {% blocktrans %}First, select which type of job that + you want to run. This choice will determine which + other steps are required + {% endblocktrans %} +
    + +
    {% trans "Current type:" %} + {% if request.session.guide_job_type %} + + {{ request.session.guide_job_type}} + + {% else %} + + {% trans "No type chosen" %} + + {% endif %} +
    +
    +
  2. +
    + {% if request.session.guide_job_type %} + {% if view.show_data_sources %} +
  3. +
    +
    {% blocktrans %}Data Sources are what your + job uses for input and output. Depending on the type + of job you will be running, you may need to define one + or more data sources. You can create multiple data + sources by repeating this step. + {% endblocktrans %} +
    + +
    +
  4. +
    + {% endif %} +
  5. +
    +
    {% blocktrans %}Define your Job Template. + This is where you choose the type of job that you + want to run (Pig, Java Action, Spark, etc) and choose + or upload the files necessary to run it. The inputs + and outputs will be defined later. + {% endblocktrans %} +
    + +
    {% trans "Job template:" %} + {% if request.session.guide_job_name %} + + {{ request.session.guide_job_name }} + + {% else %} + + {% trans "No job template created" %} + + {% endif %} +
    +
    +
  6. +
    +
  7. +
    +
    {% blocktrans %}Launch your job. When + launching, you may need to choose your input and + output data sources. This is where you would also + add any special configuration values, parameters, + or arguments that you need to pass along + to your job. + {% endblocktrans %} +
    + +
    +
  8. + {% endif %} +
+ +
+ +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/plugin_select.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/plugin_select.html new file mode 100644 index 0000000..4af7eeb --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/plugin_select.html @@ -0,0 +1,7 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Choose plugin and version" %}{% endblock %} + +{% block main %} + {% include 'project/data_processing.wizard/_plugin_select.html' %} +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/wizard.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/wizard.html new file mode 100644 index 0000000..555aa23 --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/wizard.html @@ -0,0 +1,56 @@ +{% extends 'base.html' %} +{% load i18n %} +{% block title %}{% trans "Data Processing" %}{% endblock %} + +{% block main %} + +
+
    +
  • +
    +
    {% blocktrans %} + Each of the Data Processing frameworks require a cluster of machines + in order to do the work they are assigned. A cluster is + formed by creating a set of Node Group Templates, combining + those into a Cluster Template and then launching a Cluster. + You can do each of those steps manually, or you can follow + this guide to help take you through the steps of + Cluster creation. + {% endblocktrans %} +
    + +
    +
  • +
  • +
    +
    {% blocktrans %} + In order to run a Data Processing job, you need to make + the files for your program available to the + Data Processing system, define where the input and output + need to go and create a Job Template that describes + how to run your job. Each of those steps can be done + manually or you can follow this guide to help take you + through the steps to run a job on an existing cluster. + {% endblocktrans %} +
    + +
    +
  • +
+
+ +{% endblock %} diff --git a/sahara_dashboard/content/data_processing/wizard/tests.py b/sahara_dashboard/content/data_processing/wizard/tests.py new file mode 100644 index 0000000..eba596e --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/tests.py @@ -0,0 +1,59 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from django.core.urlresolvers import reverse + +from openstack_dashboard.test import helpers as test + + +INDEX_URL = reverse( + 'horizon:project:data_processing.wizard:index') +CLUSTER_GUIDE_URL = reverse( + 'horizon:project:data_processing.wizard:cluster_guide') +CLUSTER_GUIDE_RESET_URL = reverse( + 'horizon:project:data_processing.wizard:reset_cluster_guide', + kwargs={"reset_cluster_guide": "true"}) +JOB_GUIDE_URL = reverse( + 'horizon:project:data_processing.wizard:jobex_guide') +JOB_GUIDE_RESET_URL = reverse( + 'horizon:project:data_processing.wizard:reset_jobex_guide', + kwargs={"reset_jobex_guide": "true"}) + + +class DataProcessingClusterGuideTests(test.TestCase): + def test_index(self): + res = self.client.get(INDEX_URL) + self.assertTemplateUsed( + res, 'project/data_processing.wizard/wizard.html') + self.assertContains(res, 'Data Processing Guides') + self.assertContains(res, 'Cluster Creation Guide') + + def test_cluster_guide(self): + res = self.client.get(CLUSTER_GUIDE_URL) + self.assertTemplateUsed( + res, 'project/data_processing.wizard/cluster_guide.html') + self.assertContains(res, 'Guided Cluster Creation') + self.assertContains(res, 'Current choice') + + def test_cluster_guide_reset(self): + res = self.client.get(CLUSTER_GUIDE_RESET_URL) + self.assertRedirectsNoFollow(res, CLUSTER_GUIDE_URL) + + def test_jobex_guide(self): + res = self.client.get(JOB_GUIDE_URL) + self.assertTemplateUsed( + res, 'project/data_processing.wizard/jobex_guide.html') + self.assertContains(res, 'Guided Job Execution') + + def test_jobex_guide_reset(self): + res = self.client.get(JOB_GUIDE_RESET_URL) + self.assertRedirectsNoFollow(res, JOB_GUIDE_URL) diff --git a/sahara_dashboard/content/data_processing/wizard/urls.py b/sahara_dashboard/content/data_processing/wizard/urls.py new file mode 100644 index 0000000..cec57a8 --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/urls.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.conf.urls import patterns +from django.conf.urls import url + +from sahara_dashboard.content. \ + data_processing.wizard import views + + +urlpatterns = patterns('', + url(r'^$', views.WizardView.as_view(), name='index'), + url(r'^cluster_guide$', + views.ClusterGuideView.as_view(), + name='cluster_guide'), + url(r'^cluster_guide/(?P[^/]+)/$', + views.ResetClusterGuideView.as_view(), + name='reset_cluster_guide'), + url(r'^jobex_guide$', + views.JobExecutionGuideView.as_view(), + name='jobex_guide'), + url(r'^jobex_guide/(?P[^/]+)/$', + views.ResetJobExGuideView.as_view(), + name='reset_jobex_guide'), + url(r'^plugin_select$', + views.PluginSelectView.as_view(), + name='plugin_select'), + url(r'^job_type_select$', + views.JobTypeSelectView.as_view(), + name='job_type_select'), + ) diff --git a/sahara_dashboard/content/data_processing/wizard/views.py b/sahara_dashboard/content/data_processing/wizard/views.py new file mode 100644 index 0000000..3c753ca --- /dev/null +++ b/sahara_dashboard/content/data_processing/wizard/views.py @@ -0,0 +1,102 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging + +from django.core.urlresolvers import reverse_lazy +from django import http +from django.utils.translation import ugettext_lazy as _ +from django.views import generic + +from horizon import exceptions +from horizon import forms +from horizon import views as horizon_views + +from sahara_dashboard.content.data_processing.utils \ + import helpers +import sahara_dashboard.content.data_processing.wizard \ + .forms as wizforms + + +LOG = logging.getLogger(__name__) + + +class WizardView(horizon_views.APIView): + template_name = 'project/data_processing.wizard/wizard.html' + page_title = _("Data Processing Guides") + + def get_data(self, request, context, *args, **kwargs): + try: + context["test"] = "test data" + except Exception: + msg = _('Unable to show guides') + exceptions.handle(self.request, msg) + return context + + +class ClusterGuideView(horizon_views.APIView): + template_name = 'project/data_processing.wizard/cluster_guide.html' + page_title = _("Guided Cluster Creation") + + +class ResetClusterGuideView(generic.RedirectView): + pattern_name = 'horizon:project:data_processing.wizard:cluster_guide' + permanent = True + + def get(self, request, *args, **kwargs): + if kwargs["reset_cluster_guide"]: + hlps = helpers.Helpers(request) + hlps.reset_guide() + return http.HttpResponseRedirect(reverse_lazy(self.pattern_name)) + + +class JobExecutionGuideView(horizon_views.APIView): + template_name = 'project/data_processing.wizard/jobex_guide.html' + page_title = _("Guided Job Execution") + + def show_data_sources(self): + try: + if self.request.session["guide_job_type"] in ["Spark", "Storm", + "Java"]: + return False + return True + except Exception: + return True + + +class ResetJobExGuideView(generic.RedirectView): + pattern_name = 'horizon:project:data_processing.wizard:jobex_guide' + permanent = True + + def get(self, request, *args, **kwargs): + if kwargs["reset_jobex_guide"]: + hlps = helpers.Helpers(request) + hlps.reset_job_guide() + return http.HttpResponseRedirect(reverse_lazy(self.pattern_name)) + + +class PluginSelectView(forms.ModalFormView): + form_class = wizforms.ChoosePluginForm + success_url = reverse_lazy( + 'horizon:project:data_processing.wizard:cluster_guide') + classes = ("ajax-modal") + template_name = "project/data_processing.wizard/plugin_select.html" + page_title = _("Choose plugin and version") + + +class JobTypeSelectView(forms.ModalFormView): + form_class = wizforms.ChooseJobTypeForm + success_url = reverse_lazy( + 'horizon:project:data_processing.wizard:jobex_guide') + classes = ("ajax-modal") + template_name = "project/data_processing.wizard/job_type_select.html" + page_title = _("Choose job type") diff --git a/sahara_dashboard/enabled/_1810_data_processing_panel_group.py b/sahara_dashboard/enabled/_1810_data_processing_panel_group.py new file mode 100644 index 0000000..1cb03bc --- /dev/null +++ b/sahara_dashboard/enabled/_1810_data_processing_panel_group.py @@ -0,0 +1,8 @@ +from django.utils.translation import ugettext_lazy as _ + +# The slug of the panel group to be added to HORIZON_CONFIG. Required. +PANEL_GROUP = 'data_processing' +# The display name of the PANEL_GROUP. Required. +PANEL_GROUP_NAME = _('Data Processing') +# The slug of the dashboard the PANEL_GROUP associated with. Required. +PANEL_GROUP_DASHBOARD = 'project' diff --git a/sahara_dashboard/enabled/_1815_data_processing_wizard_panel.py b/sahara_dashboard/enabled/_1815_data_processing_wizard_panel.py new file mode 100644 index 0000000..642586e --- /dev/null +++ b/sahara_dashboard/enabled/_1815_data_processing_wizard_panel.py @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# The slug of the panel to be added to HORIZON_CONFIG. Required. +PANEL = 'data_processing.wizard' +# The slug of the dashboard the PANEL associated with. Required. +PANEL_DASHBOARD = 'project' +# The slug of the panel group the PANEL is associated with. +PANEL_GROUP = 'data_processing' + +# Python panel class of the PANEL to be added. +ADD_PANEL = \ + ('sahara_dashboard.' + 'content.data_processing.wizard.panel.WizardPanel') diff --git a/sahara_dashboard/enabled/_1820_data_processing_clusters_panel.py b/sahara_dashboard/enabled/_1820_data_processing_clusters_panel.py new file mode 100644 index 0000000..c0e4b56 --- /dev/null +++ b/sahara_dashboard/enabled/_1820_data_processing_clusters_panel.py @@ -0,0 +1,31 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# The slug of the panel to be added to HORIZON_CONFIG. Required. +PANEL = 'data_processing.clusters' +# The slug of the dashboard the PANEL associated with. Required. +PANEL_DASHBOARD = 'project' +# The slug of the panel group the PANEL is associated with. +PANEL_GROUP = 'data_processing' + +# Python panel class of the PANEL to be added. +ADD_PANEL = \ + ('sahara_dashboard.' + 'content.data_processing.clusters.panel.ClustersPanel') + +ADD_INSTALLED_APPS = \ + ["sahara_dashboard.content.data_processing", ] + +ADD_JS_FILES = [ + 'dashboard/project/data_processing/data_processing.event_log.js' +] diff --git a/sahara_dashboard/enabled/_1825_data_processing_job_executions_panel.py b/sahara_dashboard/enabled/_1825_data_processing_job_executions_panel.py new file mode 100644 index 0000000..04cb5de --- /dev/null +++ b/sahara_dashboard/enabled/_1825_data_processing_job_executions_panel.py @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# The slug of the panel to be added to HORIZON_CONFIG. Required. +PANEL = 'data_processing.job_executions' +# The slug of the dashboard the PANEL associated with. Required. +PANEL_DASHBOARD = 'project' +# The slug of the panel group the PANEL is associated with. +PANEL_GROUP = 'data_processing' + +# Python panel class of the PANEL to be added. +ADD_PANEL = \ + ('sahara_dashboard.' + 'content.data_processing.job_executions.panel.JobExecutionsPanel') diff --git a/sahara_dashboard/enabled/_1830_data_processing_cluster_templates_panel.py b/sahara_dashboard/enabled/_1830_data_processing_cluster_templates_panel.py new file mode 100644 index 0000000..a8877d0 --- /dev/null +++ b/sahara_dashboard/enabled/_1830_data_processing_cluster_templates_panel.py @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# The slug of the panel to be added to HORIZON_CONFIG. Required. +PANEL = 'data_processing.cluster_templates' +# The slug of the dashboard the PANEL associated with. Required. +PANEL_DASHBOARD = 'project' +# The slug of the panel group the PANEL is associated with. +PANEL_GROUP = 'data_processing' + +# Python panel class of the PANEL to be added. +ADD_PANEL = \ + ('sahara_dashboard.' + 'content.data_processing.cluster_templates.panel.ClusterTemplatesPanel') diff --git a/sahara_dashboard/enabled/_1835_data_processing_nodegroup_templates_panel.py b/sahara_dashboard/enabled/_1835_data_processing_nodegroup_templates_panel.py new file mode 100644 index 0000000..ae14fbf --- /dev/null +++ b/sahara_dashboard/enabled/_1835_data_processing_nodegroup_templates_panel.py @@ -0,0 +1,25 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# The slug of the panel to be added to HORIZON_CONFIG. Required. +PANEL = 'data_processing.nodegroup_templates' +# The slug of the dashboard the PANEL associated with. Required. +PANEL_DASHBOARD = 'project' +# The slug of the panel group the PANEL is associated with. +PANEL_GROUP = 'data_processing' + +# Python panel class of the PANEL to be added. +ADD_PANEL = \ + ('sahara_dashboard.' + 'content.data_processing.nodegroup_templates.panel.' + 'NodegroupTemplatesPanel') diff --git a/sahara_dashboard/enabled/_1840_data_processing_jobs_panel.py b/sahara_dashboard/enabled/_1840_data_processing_jobs_panel.py new file mode 100644 index 0000000..7ec47bf --- /dev/null +++ b/sahara_dashboard/enabled/_1840_data_processing_jobs_panel.py @@ -0,0 +1,27 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# The slug of the panel to be added to HORIZON_CONFIG. Required. +PANEL = 'data_processing.jobs' +# The slug of the dashboard the PANEL associated with. Required. +PANEL_DASHBOARD = 'project' +# The slug of the panel group the PANEL is associated with. +PANEL_GROUP = 'data_processing' + +# Python panel class of the PANEL to be added. +ADD_PANEL = ('sahara_dashboard.content.data_processing.jobs.panel.JobsPanel') + +ADD_JS_FILES = [ + 'dashboard/project/data_processing/' + 'data_processing.job_interface_arguments.js' +] diff --git a/sahara_dashboard/enabled/_1845_data_processing_job_binaries_panel.py b/sahara_dashboard/enabled/_1845_data_processing_job_binaries_panel.py new file mode 100644 index 0000000..d330c62 --- /dev/null +++ b/sahara_dashboard/enabled/_1845_data_processing_job_binaries_panel.py @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# The slug of the panel to be added to HORIZON_CONFIG. Required. +PANEL = 'data_processing.job_binaries' +# The slug of the dashboard the PANEL associated with. Required. +PANEL_DASHBOARD = 'project' +# The slug of the panel group the PANEL is associated with. +PANEL_GROUP = 'data_processing' + +# Python panel class of the PANEL to be added. +ADD_PANEL = \ + ('sahara_dashboard.' + 'content.data_processing.job_binaries.panel.JobBinariesPanel') diff --git a/sahara_dashboard/enabled/_1850_data_processing_data_sources_panel.py b/sahara_dashboard/enabled/_1850_data_processing_data_sources_panel.py new file mode 100644 index 0000000..66476c2 --- /dev/null +++ b/sahara_dashboard/enabled/_1850_data_processing_data_sources_panel.py @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# The slug of the panel to be added to HORIZON_CONFIG. Required. +PANEL = 'data_processing.data_sources' +# The slug of the dashboard the PANEL associated with. Required. +PANEL_DASHBOARD = 'project' +# The slug of the panel group the PANEL is associated with. +PANEL_GROUP = 'data_processing' + +# Python panel class of the PANEL to be added. +ADD_PANEL = \ + ('sahara_dashboard.' + 'content.data_processing.data_sources.panel.DataSourcesPanel') diff --git a/sahara_dashboard/enabled/_1855_data_processing_data_image_registry_panel.py b/sahara_dashboard/enabled/_1855_data_processing_data_image_registry_panel.py new file mode 100644 index 0000000..d375523 --- /dev/null +++ b/sahara_dashboard/enabled/_1855_data_processing_data_image_registry_panel.py @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# The slug of the panel to be added to HORIZON_CONFIG. Required. +PANEL = 'data_processing.data_image_registry' +# The slug of the dashboard the PANEL associated with. Required. +PANEL_DASHBOARD = 'project' +# The slug of the panel group the PANEL is associated with. +PANEL_GROUP = 'data_processing' + +# Python panel class of the PANEL to be added. +ADD_PANEL = \ + ('sahara_dashboard.' + 'content.data_processing.data_image_registry.panel.ImageRegistryPanel') diff --git a/sahara_dashboard/enabled/_1860_data_processing_data_plugins_panel.py b/sahara_dashboard/enabled/_1860_data_processing_data_plugins_panel.py new file mode 100644 index 0000000..45773c3 --- /dev/null +++ b/sahara_dashboard/enabled/_1860_data_processing_data_plugins_panel.py @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# The slug of the panel to be added to HORIZON_CONFIG. Required. +PANEL = 'data_processing.data_plugins' +# The slug of the dashboard the PANEL associated with. Required. +PANEL_DASHBOARD = 'project' +# The slug of the panel group the PANEL is associated with. +PANEL_GROUP = 'data_processing' + +# Python panel class of the PANEL to be added. +ADD_PANEL = \ + ('sahara_dashboard.' + 'content.data_processing.data_plugins.panel.PluginsPanel') diff --git a/sahara_dashboard/enabled/__init__.py b/sahara_dashboard/enabled/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/test/__init__.py b/sahara_dashboard/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/test/api_tests/__init__.py b/sahara_dashboard/test/api_tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/test/api_tests/sahara_tests.py b/sahara_dashboard/test/api_tests/sahara_tests.py new file mode 100644 index 0000000..9a1c75b --- /dev/null +++ b/sahara_dashboard/test/api_tests/sahara_tests.py @@ -0,0 +1,48 @@ +# Copyright 2015, Telles Nobrega +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from sahara_dashboard import api +from sahara_dashboard.test import helpers as test + + +class SaharaApiTest(test.SaharaAPITestCase): + # + # Cluster + # + def test_cluster_create_count(self): + saharaclient = self.stub_saharaclient() + saharaclient.clusters = self.mox.CreateMockAnything() + saharaclient.clusters.create(anti_affinity=None, + cluster_configs=None, + cluster_template_id=None, + count=2, + use_autoconfig=None, + default_image_id=None, + description=None, + hadoop_version='1.0.0', + is_transient=None, + name='name', + net_id=None, + node_groups=None, + plugin_name='fake_plugin', + user_keypair_id=None) \ + .AndReturn({"Clusters": ['cluster1', 'cluster2']}) + self.mox.ReplayAll() + ret_val = api.sahara.cluster_create(self.request, + 'name', + 'fake_plugin', + '1.0.0', + count=2) + + self.assertEqual(2, len(ret_val['Clusters'])) diff --git a/sahara_dashboard/test/helpers.py b/sahara_dashboard/test/helpers.py new file mode 100644 index 0000000..abc017b --- /dev/null +++ b/sahara_dashboard/test/helpers.py @@ -0,0 +1,57 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from saharaclient import client as sahara_client + +from openstack_dashboard.test import helpers + +from sahara_dashboard import api +from sahara_dashboard.test.test_data import utils + + +def create_stubs(stubs_to_create={}): + return helpers.create_stubs(stubs_to_create) + + +class SaharaTestsMixin(object): + def _setup_test_data(self): + super(SaharaTestsMixin, self)._setup_test_data() + utils.load_test_data(self) + + +class TestCase(SaharaTestsMixin, helpers.TestCase): + pass + + +class BaseAdminViewTests(SaharaTestsMixin, helpers.TestCase): + pass + + +class SaharaAPITestCase(helpers.APITestCase): + + def setUp(self): + super(SaharaAPITestCase, self).setUp() + + self._original_saharaclient = api.sahara.client + api.sahara.client = lambda request: self.stub_saharaclient() + + def tearDown(self): + super(SaharaAPITestCase, self).tearDown() + + api.sahara.client = self._original_saharaclient + + def stub_saharaclient(self): + if not hasattr(self, "saharaclient"): + self.mox.StubOutWithMock(sahara_client, 'Client') + self.saharaclient = self.mox.CreateMock(sahara_client.Client) + return self.saharaclient diff --git a/sahara-dashboard/test/settings.py b/sahara_dashboard/test/settings.py similarity index 97% rename from sahara-dashboard/test/settings.py rename to sahara_dashboard/test/settings.py index 28dc23d..0a08eee 100644 --- a/sahara-dashboard/test/settings.py +++ b/sahara_dashboard/test/settings.py @@ -33,7 +33,7 @@ STATIC_URL = '/static/' SECRET_KEY = secret_key.generate_or_read_from_file( os.path.join(TEST_DIR, '.secret_key_store')) -ROOT_URLCONF = 'sahara-dashboard.test.urls' +ROOT_URLCONF = 'sahara_dashboard.test.urls' TEMPLATE_DIRS = ( os.path.join(TEST_DIR, 'templates'), ) @@ -80,7 +80,7 @@ from openstack_dashboard.utils import settings dashboard_module_names = [ 'openstack_dashboard.enabled', 'openstack_dashboard.local.enabled', - 'sahara-dashboard.enabled', + 'sahara_dashboard.enabled', ] dashboard_modules = [] # All dashboards must be enabled for the namespace to get registered, which is @@ -149,7 +149,7 @@ LOGGING['loggers']['selenium'] = { 'propagate': False, } -LOGGING['loggers']['sahara-dashboard'] = { +LOGGING['loggers']['sahara_dashboard'] = { 'handlers': ['test'], 'propagate': False, } diff --git a/sahara_dashboard/test/test_data/__init__.py b/sahara_dashboard/test/test_data/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sahara_dashboard/test/test_data/keystone_data.py b/sahara_dashboard/test/test_data/keystone_data.py new file mode 100644 index 0000000..0ad5761 --- /dev/null +++ b/sahara_dashboard/test/test_data/keystone_data.py @@ -0,0 +1,26 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +def data(TEST): + + # Add sahara to the keystone data + TEST.service_catalog.append( + {"type": "data-processing", + "name": "Sahara", + "endpoints_links": [], + "endpoints": [ + {"region": "RegionOne", + "adminURL": "http://admin.sahara.example.com:8386/v1.1", + "publicURL": "http://public.sahara.example.com:8386/v1.1", + "internalURL": "http://int.sahara.example.com:8386/v1.1"}]} + ) diff --git a/sahara_dashboard/test/test_data/sahara_data.py b/sahara_dashboard/test/test_data/sahara_data.py new file mode 100644 index 0000000..2a870e5 --- /dev/null +++ b/sahara_dashboard/test/test_data/sahara_data.py @@ -0,0 +1,598 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import copy + +from openstack_dashboard.test.test_data import utils + +from saharaclient.api import cluster_templates +from saharaclient.api import clusters +from saharaclient.api import data_sources +from saharaclient.api import job_binaries +from saharaclient.api import job_executions +from saharaclient.api import job_types +from saharaclient.api import jobs +from saharaclient.api import node_group_templates +from saharaclient.api import plugins + + +def data(TEST): + TEST.plugins = utils.TestDataContainer() + TEST.plugins_configs = utils.TestDataContainer() + TEST.nodegroup_templates = utils.TestDataContainer() + TEST.cluster_templates = utils.TestDataContainer() + TEST.clusters = utils.TestDataContainer() + TEST.data_sources = utils.TestDataContainer() + TEST.job_binaries = utils.TestDataContainer() + TEST.jobs = utils.TestDataContainer() + TEST.job_executions = utils.TestDataContainer() + TEST.registered_images = copy.copy(TEST.images) + TEST.job_types = utils.TestDataContainer() + + plugin1_dict = { + "description": "vanilla plugin", + "name": "vanilla", + "title": "Vanilla Apache Hadoop", + "versions": ["2.3.0", "1.2.1"] + } + + plugin1 = plugins.Plugin(plugins.PluginManager(None), plugin1_dict) + + TEST.plugins.add(plugin1) + + plugin_config1_dict = { + "node_processes": { + "HDFS": [ + "namenode", + "datanode", + "secondarynamenode" + ], + "MapReduce": [ + "tasktracker", + "jobtracker" + ] + }, + "description": "This plugin provides an ability to launch vanilla " + "Apache Hadoop cluster without any management " + "consoles.", + "versions": [ + "1.2.1" + ], + "required_image_tags": [ + "vanilla", + "1.2.1" + ], + "configs": [ + { + "default_value": "/tmp/hadoop-${user.name}", + "name": "hadoop.tmp.dir", + "priority": 2, + "config_type": "string", + "applicable_target": "HDFS", + "is_optional": True, + "scope": "node", + "description": "A base for other temporary directories." + }, + { + "default_value": True, + "name": "hadoop.native.lib", + "priority": 2, + "config_type": "bool", + "applicable_target": "HDFS", + "is_optional": True, + "scope": "node", + "description": "Should native hadoop libraries, if present, " + "be used." + }, + ], + "title": "Vanilla Apache Hadoop", + "name": "vanilla" + } + + TEST.plugins_configs.add(plugins.Plugin(plugins.PluginManager(None), + plugin_config1_dict)) + + # Nodegroup_Templates. + ngt1_dict = { + "created_at": "2014-06-04 14:01:03.701243", + "description": None, + "flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "availability_zone": None, + "floating_ip_pool": None, + "auto_security_group": True, + "hadoop_version": "1.2.1", + "id": "c166dfcc-9cc7-4b48-adc9-f0946169bb36", + "image_id": None, + "name": "sample-template", + "node_configs": {}, + "node_processes": [ + "namenode", + "jobtracker", + "secondarynamenode", + "hiveserver", + "oozie" + ], + "plugin_name": "vanilla", + "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df", + "updated_at": None, + "volume_mount_prefix": "/volumes/disk", + "volumes_per_node": 0, + "volumes_size": 0, + "volume_type": None, + "volume_local_to_instance": False, + "security_groups": [], + "volumes_availability_zone": None, + "is_proxy_gateway": False, + "use_autoconfig": True, + } + + ngt1 = node_group_templates.NodeGroupTemplate( + node_group_templates.NodeGroupTemplateManager(None), ngt1_dict) + + TEST.nodegroup_templates.add(ngt1) + + # Cluster_templates. + ct1_dict = { + "anti_affinity": [], + "cluster_configs": {}, + "created_at": "2014-06-04 14:01:06.460711", + "default_image_id": None, + "description": "Sample description", + "hadoop_version": "1.2.1", + "id": "a2c3743f-31a2-4919-8d02-792138a87a98", + "name": "sample-cluster-template", + "neutron_management_network": None, + "use_autoconfig": True, + "node_groups": [ + { + "count": 1, + "created_at": "2014-06-04 14:01:06.462512", + "flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "floating_ip_pool": None, + "image_id": None, + "name": "master", + "node_configs": {}, + "node_group_template_id": "c166dfcc-9cc7-4b48-adc9", + "node_processes": [ + "namenode", + "jobtracker", + "secondarynamenode", + "hiveserver", + "oozie" + ], + "updated_at": None, + "volume_mount_prefix": "/volumes/disk", + "volumes_per_node": 0, + "volumes_size": 0, + "volume_type": None, + "volume_local_to_instance": False, + "volumes_availability_zone": None, + "use_autoconfig": True, + "is_proxy_gateway": False, + }, + { + "count": 2, + "created_at": "2014-06-04 14:01:06.463214", + "flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "floating_ip_pool": None, + "image_id": None, + "name": "workers", + "node_configs": {}, + "node_group_template_id": "4eb5504c-94c9-4049-a440", + "node_processes": [ + "datanode", + "tasktracker" + ], + "updated_at": None, + "volume_mount_prefix": "/volumes/disk", + "volumes_per_node": 0, + "volumes_size": 0, + "volume_type": None, + "volume_local_to_instance": False, + "volumes_availability_zone": None, + "use_autoconfig": True, + "is_proxy_gateway": False + } + ], + "plugin_name": "vanilla", + "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df", + "updated_at": None + } + + ct1 = cluster_templates.ClusterTemplate( + cluster_templates.ClusterTemplateManager(None), ct1_dict) + TEST.cluster_templates.add(ct1) + + # Clusters. + cluster1_dict = { + "anti_affinity": [], + "cluster_configs": {}, + "cluster_template_id": "a2c3743f-31a2-4919-8d02-792138a87a98", + "created_at": "2014-06-04 20:02:14.051328", + "default_image_id": "9eb4643c-dca8-4ea7-92d2-b773f88a8dc6", + "description": "", + "hadoop_version": "1.2.1", + "id": "ec9a0d28-5cfb-4028-a0b5-40afe23f1533", + "info": {}, + "is_transient": False, + "management_public_key": "fakekey", + "name": "cercluster", + "neutron_management_network": None, + "use_autoconfig": True, + "node_groups": [ + { + "count": 1, + "created_at": "2014-06-04 20:02:14.053153", + "flavor_id": "0", + "floating_ip_pool": None, + "image_id": None, + "instances": [ + { + "created_at": "2014-06-04 20:02:14.834529", + "id": "c3b8004b-7063-4b99-a082-820cdc6e961c", + "instance_id": "a45f5495-4a10-4f17-8fae", + "instance_name": "cercluster-master-001", + "internal_ip": None, + "management_ip": None, + "updated_at": None, + "volumes": [] + } + ], + "name": "master", + "node_configs": {}, + "node_group_template_id": "c166dfcc-9cc7-4b48-adc9", + "node_processes": [ + "namenode", + "jobtracker", + "secondarynamenode", + "hiveserver", + "oozie" + ], + "updated_at": "2014-06-04 20:02:14.841760", + "volume_mount_prefix": "/volumes/disk", + "volumes_per_node": 0, + "volumes_size": 0, + "volume_type": None, + "volume_local_to_instance": False, + "security_groups": [], + "volumes_availability_zone": None, + "id": "ng1", + "use_autoconfig": True, + "is_proxy_gateway": False + }, + { + "count": 2, + "created_at": "2014-06-04 20:02:14.053849", + "flavor_id": "0", + "floating_ip_pool": None, + "image_id": None, + "instances": [ + { + "created_at": "2014-06-04 20:02:15.097655", + "id": "6a8ae0b1-bb28-4de2-bfbb-bdd3fd2d72b2", + "instance_id": "38bf8168-fb30-483f-8d52", + "instance_name": "cercluster-workers-001", + "internal_ip": None, + "management_ip": None, + "updated_at": None, + "volumes": [] + }, + { + "created_at": "2014-06-04 20:02:15.344515", + "id": "17b98ed3-a776-467a-90cf-9f46a841790b", + "instance_id": "85606938-8e53-46a5-a50b", + "instance_name": "cercluster-workers-002", + "internal_ip": None, + "management_ip": None, + "updated_at": None, + "volumes": [] + } + ], + "name": "workers", + "node_configs": {}, + "node_group_template_id": "4eb5504c-94c9-4049-a440", + "node_processes": [ + "datanode", + "tasktracker" + ], + "updated_at": "2014-06-04 20:02:15.355745", + "volume_mount_prefix": "/volumes/disk", + "volumes_per_node": 0, + "volumes_size": 0, + "volume_type": None, + "volume_local_to_instance": False, + "security_groups": ["b7857890-09bf-4ee0-a0d5-322d7a6978bf"], + "volumes_availability_zone": None, + "id": "ng2", + "use_autoconfig": True, + "is_proxy_gateway": False + } + ], + "plugin_name": "vanilla", + "status": "Active", + "status_description": "", + "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df", + "trust_id": None, + "updated_at": "2014-06-04 20:02:15.446087", + "user_keypair_id": "stackboxkp" + } + + cluster1 = clusters.Cluster( + clusters.ClusterManager(None), cluster1_dict) + TEST.clusters.add(cluster1) + + cluster2_dict = copy.deepcopy(cluster1_dict) + cluster2_dict.update({ + "id": "cl2", + "name": "cl2_name", + "provision_progress": [ + { + "created_at": "2015-03-27T15:51:54", + "updated_at": "2015-03-27T15:59:34", + "step_name": "first_step", + "step_type": "some_type", + "successful": True, + "events": [], + "total": 3 + }, + { + "created_at": "2015-03-27T16:01:54", + "updated_at": "2015-03-27T16:10:22", + "step_name": "second_step", + "step_type": "some_other_type", + "successful": None, + "events": [ + { + "id": "evt1", + "created_at": "2015-03-27T16:01:22", + "node_group_id": "ng1", + "instance_name": "cercluster-master-001", + "successful": True, + "event_info": None + }, + { + "id": "evt2", + "created_at": "2015-03-27T16:04:51", + "node_group_id": "ng2", + "instance_name": "cercluster-workers-001", + "successful": True, + "event_info": None + } + ], + "total": 3 + } + ] + }) + + cluster2 = clusters.Cluster( + clusters.ClusterManager(None), cluster2_dict) + TEST.clusters.add(cluster2) + + # Data Sources. + data_source1_dict = { + "created_at": "2014-06-04 14:01:10.371562", + "description": "sample output", + "id": "426fb01c-5c7e-472d-bba2-b1f0fe7e0ede", + "name": "sampleOutput", + "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df", + "type": "swift", + "updated_at": None, + "url": "swift://example.sahara/output" + } + + data_source2_dict = { + "created_at": "2014-06-05 15:01:12.331361", + "description": "second sample output", + "id": "ab3413-adfb-bba2-123456785675", + "name": "sampleOutput2", + "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df", + "type": "hdfs", + "updated_at": None, + "url": "hdfs://example.sahara/output" + } + + data_source1 = data_sources.DataSources( + data_sources.DataSourceManager(None), data_source1_dict) + data_source2 = data_sources.DataSources( + data_sources.DataSourceManager(None), data_source2_dict) + TEST.data_sources.add(data_source1) + TEST.data_sources.add(data_source2) + + # Job Binaries. + job_binary1_dict = { + "created_at": "2014-06-05 18:15:15.581285", + "description": "", + "id": "3f3a07ac-7d6f-49e8-8669-40b25ee891b7", + "name": "example.pig", + "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df", + "updated_at": None, + "url": "internal-db://80121dea-f8bd-4ad3-bcc7-096f4bfc722d" + } + + job_binary2_dict = { + "created_at": "2014-10-10 13:12:15.583631", + "description": "Test for spaces in name", + "id": "abcdef56-1234-abcd-abcd-defabcdaedcb", + "name": "example with spaces.pig", + "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df", + "updated_at": None, + "url": "internal-db://abcdef56-1234-abcd-abcd-defabcdaedcb" + } + + job_binary1 = job_binaries.JobBinaries( + job_binaries.JobBinariesManager(None), job_binary1_dict) + job_binary2 = job_binaries.JobBinaries( + job_binaries.JobBinariesManager(None), job_binary2_dict) + + TEST.job_binaries.add(job_binary1) + TEST.job_binaries.add(job_binary2) + + # Jobs. + job1_dict = { + "created_at": "2014-06-05 19:23:59.637165", + "description": "", + "id": "a077b851-46be-4ad7-93c3-2d83894546ef", + "libs": [ + { + "created_at": "2014-06-05 19:23:42.742057", + "description": "", + "id": "ab140807-59f8-4235-b4f2-e03daf946256", + "name": "udf.jar", + "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df", + "updated_at": None, + "url": "internal-db://d186e2bb-df93-47eb-8c0e-ce21e7ecb78b" + } + ], + "mains": [ + { + "created_at": "2014-06-05 18:15:15.581285", + "description": "", + "id": "3f3a07ac-7d6f-49e8-8669-40b25ee891b7", + "name": "example.pig", + "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df", + "updated_at": None, + "url": "internal-db://80121dea-f8bd-4ad3-bcc7-096f4bfc722d" + } + ], + "interface": [], + "name": "pigjob", + "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df", + "type": "Pig", + "updated_at": None, + "job_config": {"configs": {}} + } + + job1 = jobs.Job(jobs.JobsManager(None), job1_dict) + TEST.jobs.add(job1) + + # Job Executions. + jobex1_dict = { + "cluster_id": "ec9a0d28-5cfb-4028-a0b5-40afe23f1533", + "created_at": "2014-06-05 20:03:06.195937", + "end_time": None, + "id": "4b6c1cbf-c713-49d3-8025-808a87c514a6", + "info": { + "acl": None, + "actions": [ + { + "consoleUrl": "-", + "cred": "None", + "data": None, + "endTime": "Thu,05 Jun 2014 20:03:32 GMT", + "errorCode": None, + "errorMessage": None, + "externalChildIDs": None, + "externalId": "-", + "externalStatus": "OK", + "id": "0000000-140604200538581-oozie-hado-W@:start:", + "name": ":start:", + "retries": 0, + "startTime": "Thu,05 Jun 2014 20:03:32 GMT", + "stats": None, + "status": "OK", + "toString": "Action name[:start:] status[OK]", + "trackerUri": "-", + "transition": "job-node", + "type": ":START:" + }, + { + "consoleUrl": "fake://console.url", + "cred": "None", + "data": None, + "endTime": None, + "errorCode": None, + "errorMessage": None, + "externalChildIDs": None, + "externalId": "job_201406042004_0001", + "externalStatus": "RUNNING", + "id": "0000000-140604200538581-oozie-hado-W@job-node", + "name": "job-node", + "retries": 0, + "startTime": "Thu,05 Jun 2014 20:03:33 GMT", + "stats": None, + "status": "RUNNING", + "toString": "Action name[job-node] status[RUNNING]", + "trackerUri": "cercluster-master-001:8021", + "transition": None, + "type": "pig" + } + ], + "appName": "job-wf", + "appPath": "hdfs://fakepath/workflow.xml", + "conf": "fakeconfig", + "consoleUrl": "fake://consoleURL", + "createdTime": "Thu,05 Jun 2014 20:03:32 GMT", + "endTime": None, + "externalId": None, + "group": None, + "id": "0000000-140604200538581-oozie-hado-W", + "lastModTime": "Thu,05 Jun 2014 20:03:35 GMT", + "parentId": None, + "run": 0, + "startTime": "Thu,05 Jun 2014 20:03:32 GMT", + "status": "RUNNING", + "toString": "Workflow ...status[RUNNING]", + "user": "hadoop" + }, + "input_id": "85884883-3083-49eb-b442-71dd3734d02c", + "job_configs": { + "args": [], + "configs": {}, + "params": {} + }, + "interface": {}, + "job_id": "a077b851-46be-4ad7-93c3-2d83894546ef", + "oozie_job_id": "0000000-140604200538581-oozie-hado-W", + "output_id": "426fb01c-5c7e-472d-bba2-b1f0fe7e0ede", + "progress": None, + "return_code": None, + "start_time": "2014-06-05T16:03:32", + "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df", + "updated_at": "2014-06-05 20:03:46.438248", + "cluster_name_set": True, + "job_name_set": True, + "cluster_name": "cluster-1", + "job_name": "job-1", + "data_source_urls": { + "85884883-3083-49eb-b442-71dd3734d02c": "swift://a.sahara/input", + "426fb01c-5c7e-472d-bba2-b1f0fe7e0ede": "hdfs://a.sahara/output" + } + } + + jobex1 = job_executions.JobExecution( + job_executions.JobExecutionsManager(None), jobex1_dict) + TEST.job_executions.add(jobex1) + + augmented_image = TEST.registered_images.first() + augmented_image.tags = {} + augmented_image.username = 'myusername' + augmented_image.description = 'mydescription' + + job_type1_dict = { + "name": "Pig", + "plugins": [ + { + "description": "Fake description", + "versions": { + "2.6.0": { + }, + "1.2.1": { + } + }, + "name": "vanilla", + "title": "Vanilla Apache Hadoop" + }, + ] + } + job_types1 = job_types.JobType( + job_types.JobTypesManager(None), job_type1_dict) + TEST.job_types.add(job_types1) diff --git a/sahara_dashboard/test/test_data/utils.py b/sahara_dashboard/test/test_data/utils.py new file mode 100644 index 0000000..e92e61a --- /dev/null +++ b/sahara_dashboard/test/test_data/utils.py @@ -0,0 +1,53 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from openstack_dashboard.test.test_data import utils + + +def load_test_data(load_onto=None): + from openstack_dashboard.test.test_data import ceilometer_data + from openstack_dashboard.test.test_data import cinder_data + from openstack_dashboard.test.test_data import exceptions + from openstack_dashboard.test.test_data import glance_data + from openstack_dashboard.test.test_data import heat_data + from openstack_dashboard.test.test_data import keystone_data + from openstack_dashboard.test.test_data import neutron_data + from openstack_dashboard.test.test_data import nova_data + from openstack_dashboard.test.test_data import swift_data + from openstack_dashboard.test.test_data import trove_data + + from sahara_dashboard.test.test_data import keystone_data \ + as sahara_keystone_data + from sahara_dashboard.test.test_data import sahara_data + + # The order of these loaders matters, some depend on others. + loaders = ( + exceptions.data, + keystone_data.data, + glance_data.data, + nova_data.data, + cinder_data.data, + neutron_data.data, + swift_data.data, + heat_data.data, + ceilometer_data.data, + trove_data.data, + sahara_data.data, + sahara_keystone_data.data, + ) + if load_onto: + for data_func in loaders: + data_func(load_onto) + return load_onto + else: + return utils.TestData(*loaders) diff --git a/sahara_dashboard/test/urls.py b/sahara_dashboard/test/urls.py new file mode 100644 index 0000000..9bef20f --- /dev/null +++ b/sahara_dashboard/test/urls.py @@ -0,0 +1,20 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from django.conf import urls +import openstack_dashboard.urls + +urlpatterns = urls.patterns( + '', + urls.url(r'', urls.include(openstack_dashboard.urls)) +) diff --git a/setup.cfg b/setup.cfg index e62b72c..d471440 100644 --- a/setup.cfg +++ b/setup.cfg @@ -22,7 +22,7 @@ classifier = [files] packages = - sahara-dashboard + sahara_dashboard [build_sphinx] source-dir = doc/source @@ -33,15 +33,15 @@ all_files = 1 upload-dir = doc/build/html [compile_catalog] -directory = sahara-dashboard/locale +directory = sahara_dashboard/locale domain = sahara-dashboard [update_catalog] -domain = manila-ui -output_dir = manila_ui/locale -input_file = manila_ui/locale/manila-ui.pot +domain = sahara-dashboard +output_dir = sahara_dashboard/locale +input_file = sahara_dashboard/locale/sahara_dashboard.pot [extract_messages] keywords = _ gettext ngettext l_ lazy_gettext mapping_file = babel.cfg -output_file = manila_ui/locale/manila-ui.pot +output_file = sahara_dashboard/locale/sahara_dashboard.pot diff --git a/tools/clean_enabled_files.py b/tools/clean_enabled_files.py new file mode 100644 index 0000000..25d384c --- /dev/null +++ b/tools/clean_enabled_files.py @@ -0,0 +1,45 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This file is temporarily needed to allow the conversion from integrated +# Sahara content in Horizon to plugin based content. Horizon currently defines +# the same module name data_processing and imports it by default. This utility +# removes the configuration files that are responsible for importing the old +# version of the module. Only Sahara content configuration files are effected +# in Horizon. + +import os + +from openstack_dashboard import enabled as local_enabled + +from sahara_dashboard import enabled + +ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) +WITH_VENV = os.path.join(ROOT, 'tools', 'with_venv.sh') + +def main(): + src_path = os.path.dirname(enabled.__file__) + dest_path = os.path.dirname(local_enabled.__file__) + + src_files = os.listdir(src_path) + for file in src_files: + # skip the __init__.py or bad things happen + if file == "__init__.py": + continue + + file_path = os.path.join(dest_path, file) + if os.path.isfile(file_path): + print ("removing ", file_path) + os.remove(file_path) + +if __name__ == '__main__': + main() diff --git a/tools/install_venv.py b/tools/install_venv.py new file mode 100644 index 0000000..8550e2c --- /dev/null +++ b/tools/install_venv.py @@ -0,0 +1,154 @@ +# Copyright 2012 United States Government as represented by the +# Administrator of the National Aeronautics and Space Administration. +# All Rights Reserved. +# +# Copyright 2012 OpenStack, LLC +# +# Copyright 2012 Nebula, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Installation script for the OpenStack Dashboard development virtualenv. +""" + +import os +import subprocess +import sys + + +ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) +VENV = os.path.join(ROOT, '.venv') +WITH_VENV = os.path.join(ROOT, 'tools', 'with_venv.sh') +PIP_REQUIRES = os.path.join(ROOT, 'requirements.txt') +TEST_REQUIRES = os.path.join(ROOT, 'test-requirements.txt') + + +def die(message, *args): + print >> sys.stderr, message % args + sys.exit(1) + + +def run_command(cmd, redirect_output=True, check_exit_code=True, cwd=ROOT, + die_message=None): + """ + Runs a command in an out-of-process shell, returning the + output of that command. Working directory is ROOT. + """ + if redirect_output: + stdout = subprocess.PIPE + else: + stdout = None + + proc = subprocess.Popen(cmd, cwd=cwd, stdout=stdout) + output = proc.communicate()[0] + if check_exit_code and proc.returncode != 0: + if die_message is None: + die('Command "%s" failed.\n%s', ' '.join(cmd), output) + else: + die(die_message) + return output + + +HAS_EASY_INSTALL = bool(run_command(['which', 'easy_install'], + check_exit_code=False).strip()) +HAS_VIRTUALENV = bool(run_command(['which', 'virtualenv'], + check_exit_code=False).strip()) + + +def check_dependencies(): + """Make sure virtualenv is in the path.""" + + print 'Checking dependencies...' + if not HAS_VIRTUALENV: + print 'Virtual environment not found.' + # Try installing it via easy_install... + if HAS_EASY_INSTALL: + print 'Installing virtualenv via easy_install...', + run_command(['easy_install', 'virtualenv'], + die_message='easy_install failed to install virtualenv' + '\ndevelopment requires virtualenv, please' + ' install it using your favorite tool') + if not run_command(['which', 'virtualenv']): + die('ERROR: virtualenv not found in path.\n\ndevelopment ' + ' requires virtualenv, please install it using your' + ' favorite package management tool and ensure' + ' virtualenv is in your path') + print 'virtualenv installation done.' + else: + die('easy_install not found.\n\nInstall easy_install' + ' (python-setuptools in ubuntu) or virtualenv by hand,' + ' then rerun.') + print 'dependency check done.' + + +def create_virtualenv(venv=VENV): + """Creates the virtual environment and installs PIP only into the + virtual environment + """ + print 'Creating venv...', + run_command(['virtualenv', '-q', '--no-site-packages', VENV]) + print 'done.' + print 'Installing pip in virtualenv...', + if not run_command([WITH_VENV, 'easy_install', 'pip']).strip(): + die("Failed to install pip.") + print 'done.' + print 'Installing distribute in virtualenv...' + pip_install('distribute>=0.6.24') + print 'done.' + + +def pip_install(*args): + args = [WITH_VENV, 'pip', 'install', '--upgrade'] + list(args) + run_command(args, redirect_output=False) + + +def install_dependencies(venv=VENV): + print "Installing dependencies..." + print "(This may take several minutes, don't panic)" + pip_install('-r', TEST_REQUIRES) + pip_install('-r', PIP_REQUIRES) + + # Tell the virtual env how to "import dashboard" + py = 'python%d.%d' % (sys.version_info[0], sys.version_info[1]) + pthfile = os.path.join(venv, "lib", py, "site-packages", "dashboard.pth") + f = open(pthfile, 'w') + f.write("%s\n" % ROOT) + + +def install_horizon(): + print 'Installing horizon module in development mode...' + run_command([WITH_VENV, 'python', 'setup.py', 'develop'], cwd=ROOT) + + +def print_summary(): + summary = """ +Horizon development environment setup is complete. + +To activate the virtualenv for the extent of your current shell session you +can run: + +$ source .venv/bin/activate +""" + print summary + + +def main(): + check_dependencies() + create_virtualenv() + install_dependencies() + install_horizon() + print_summary() + +if __name__ == '__main__': + main() diff --git a/tools/with_venv.sh b/tools/with_venv.sh new file mode 100755 index 0000000..7303990 --- /dev/null +++ b/tools/with_venv.sh @@ -0,0 +1,7 @@ +#!/bin/bash +TOOLS_PATH=${TOOLS_PATH:-$(dirname $0)} +VENV_PATH=${VENV_PATH:-${TOOLS_PATH}} +VENV_DIR=${VENV_NAME:-/../.venv} +TOOLS=${TOOLS_PATH} +VENV=${VENV:-${VENV_PATH}/${VENV_DIR}} +source ${VENV}/bin/activate && "$@" diff --git a/tox.ini b/tox.ini index 61f6151..1234f8f 100644 --- a/tox.ini +++ b/tox.ini @@ -13,7 +13,7 @@ deps = -r{toxinidir}/requirements.txt commands = /bin/bash run_tests.sh -N --no-pep8 {posargs} [testenv:py27] -setenv = DJANGO_SETTINGS_MODULE=manila_ui.test.settings +setenv = DJANGO_SETTINGS_MODULE=sahara_dashboard.test.settings [testenv:pep8] commands = flake8