From 8da7fef2c016df9a73c3edb21cd9bc236613d976 Mon Sep 17 00:00:00 2001 From: iberezovskiy Date: Tue, 11 Mar 2014 16:21:50 +0400 Subject: [PATCH] Rename Savanna to Sahara Implements: blueprint savanna-renaming-extra Change-Id: I5db8125aee11e617b5aa3b16eb7691b11e3d7281 --- HACKING.rst | 8 ++++---- README.rst | 10 +++++----- edp-examples/edp-wordcount/README.rst | 14 +++++++------- edp-examples/edp-wordcount/wordcount/workflow.xml | 8 ++++---- setup.cfg | 8 ++++---- 5 files changed, 24 insertions(+), 24 deletions(-) diff --git a/HACKING.rst b/HACKING.rst index e94865e..6568acd 100644 --- a/HACKING.rst +++ b/HACKING.rst @@ -1,12 +1,12 @@ -Savanna Style Commandments -========================== +Sahara Style Commandments +========================= - Step 1: Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/ - Step 2: Read on -Savanna Specific Commandments ------------------------------ +Sahara Specific Commandments +---------------------------- None so far diff --git a/README.rst b/README.rst index 6e92544..76ece32 100644 --- a/README.rst +++ b/README.rst @@ -1,9 +1,9 @@ -Savanna-extra project -===================== +Sahara-extra project +==================== -Savanna-extra is place for Savanna components not included into the main `Savanna repository `_ +Sahara-extra is place for Sahara components not included into the main `Sahara repository `_ Here is the list of components: -* Sources for Swift filesystem implementation for Hadoop: https://github.com/stackforge/savanna-extra/blob/master/hadoop-swiftfs/README.rst -* `Diskimage-builder `_ elements moved to the new repo: https://github.com/stackforge/savanna-image-elements +* Sources for Swift filesystem implementation for Hadoop: https://github.com/openstack/sahara-extra/blob/master/hadoop-swiftfs/README.rst +* `Diskimage-builder `_ elements moved to the new repo: https://github.com/openstack/sahara-image-elements diff --git a/edp-examples/edp-wordcount/README.rst b/edp-examples/edp-wordcount/README.rst index 5d586f9..d043e6e 100644 --- a/edp-examples/edp-wordcount/README.rst +++ b/edp-examples/edp-wordcount/README.rst @@ -43,8 +43,8 @@ To run this example from Oozie, you will need to modify the ``job.properties`` f to specify the correct ``jobTracker`` and ``nameNode`` addresses for your cluster. You will also need to modify the ``workflow.xml`` file to contain the correct input -and output paths. These paths may be Savanna swift urls or hdfs paths. If swift -urls are used, set the ``fs.swift.service.savanna.username`` and ``fs.swift.service.savanna.password`` +and output paths. These paths may be Sahara swift urls or hdfs paths. If swift +urls are used, set the ``fs.swift.service.sahara.username`` and ``fs.swift.service.sahara.password`` properties in the ```` section. 1) Upload the ``wordcount`` directory to hdfs @@ -55,12 +55,12 @@ properties in the ```` section. ``$ oozie job -oozie http://oozie_server:port/oozie -config wordcount/job.properties -run`` -3) Don't forget to create your swift input path! A Savanna swift url looks like *swift://container.savanna/object* +3) Don't forget to create your swift input path! A Sahara swift url looks like *swift://container.sahara/object* -Running from the Savanna UI +Running from the Sahara UI =========================== -Running the WordCount example from the Savanna UI is very similar to running a Pig, Hive, +Running the WordCount example from the Sahara UI is very similar to running a Pig, Hive, or MapReduce job. 1) Create a job binary that points to the ``edp-wordcount.jar`` file @@ -69,8 +69,8 @@ or MapReduce job. a) Add the input and output paths to ``args`` - b) If swift input or output paths are used, set the ``fs.swift.service.savanna.username`` and ``fs.swift.service.savanna.password`` + b) If swift input or output paths are used, set the ``fs.swift.service.sahara.username`` and ``fs.swift.service.sahara.password`` configuration values - c) The Savanna UI will prompt for the required ``main_class`` value and the optional ``java_opts`` value + c) The Sahara UI will prompt for the required ``main_class`` value and the optional ``java_opts`` value diff --git a/edp-examples/edp-wordcount/wordcount/workflow.xml b/edp-examples/edp-wordcount/wordcount/workflow.xml index 2c0195a..844687d 100644 --- a/edp-examples/edp-wordcount/wordcount/workflow.xml +++ b/edp-examples/edp-wordcount/wordcount/workflow.xml @@ -27,17 +27,17 @@ ${queueName} - fs.swift.service.savanna.username + fs.swift.service.sahara.username swiftuser - fs.swift.service.savanna.password + fs.swift.service.sahara.password swiftpassword org.apache.hadoop.examples.WordCount - swift://user.savanna/input - swift://user.savanna/output + swift://user.sahara/input + swift://user.sahara/output diff --git a/setup.cfg b/setup.cfg index 6e08cad..19cd925 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,7 @@ [metadata] -name = savanna-extra +name = sahara-extra version = 2014.1 -summary = Extras for Savanna: elements, hadoop-swiftfs +summary = Extras for Sahara: hadoop-swiftfs description-file = README.rst license = Apache Software License classifiers = @@ -11,11 +11,11 @@ classifiers = Operating System :: POSIX :: Linux author = OpenStack author-email = openstack-dev@lists.openstack.org -home-page = https://savanna.readthedocs.org +home-page = https://sahara.readthedocs.org [files] data_files = - share/savanna-elements = elements/* + share/sahara-elements = elements/* [global] setup-hooks = pbr.hooks.setup_hook