Replaced or removed Savanna words in comments
Partially implements: blueprint savanna-renaming-service Change-Id: I36914f7648a12c470a20509fb53467c40b483b7c
This commit is contained in:
parent
21bb2866b9
commit
5b2c58afaf
@ -24,7 +24,7 @@ from eventlet import wsgi
|
||||
from oslo.config import cfg
|
||||
|
||||
|
||||
# If ../savanna/__init__.py exists, add ../ to Python search path, so that
|
||||
# If ../sahara/__init__.py exists, add ../ to Python search path, so that
|
||||
# it will override what happens to be installed in /usr/(local/)lib/python...
|
||||
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
os.pardir,
|
||||
|
@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Handles database requests from other savanna services."""
|
||||
"""Handles database requests from other Sahara services."""
|
||||
|
||||
import copy
|
||||
|
||||
@ -57,7 +57,7 @@ def _apply_defaults(values, defaults):
|
||||
class ConductorManager(db_base.Base):
|
||||
"""This class aimed to conduct things.
|
||||
|
||||
The methods in the base API for savanna-conductor are various proxy
|
||||
The methods in the base API for sahara-conductor are various proxy
|
||||
operations that allows other services to get specific work done without
|
||||
locally accessing the database.
|
||||
|
||||
|
@ -223,7 +223,7 @@ class JobBinary(object):
|
||||
tenant_id
|
||||
name
|
||||
description
|
||||
url - URLs may be the following: savanna-db://URL, internal-swift://,
|
||||
url - URLs may be the following: internal-db://URL, internal-swift://,
|
||||
external-swift://
|
||||
extra - extra may contain not only user-password but e.g. auth-token
|
||||
"""
|
||||
|
@ -99,7 +99,7 @@ def parse_configs(argv=None, conf_files=None):
|
||||
CONF(ARGV, project='savanna', version=version_string,
|
||||
default_config_files=conf_files)
|
||||
except cfg.RequiredOptError as roe:
|
||||
# TODO(slukjanov): replace RuntimeError with Savanna-specific exception
|
||||
# TODO(slukjanov): replace RuntimeError with concrete exception
|
||||
raise RuntimeError("Option '%s' is required for config group "
|
||||
"'%s'" % (roe.opt_name, roe.group.name))
|
||||
validate_configs()
|
||||
|
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
DB abstraction for Savanna
|
||||
DB abstraction for Sahara
|
||||
"""
|
||||
|
||||
from savanna.db.api import * # noqa
|
||||
|
@ -15,8 +15,8 @@
|
||||
|
||||
"""Defines interface for DB access.
|
||||
|
||||
Functions in this module are imported into the savanna.db namespace. Call these
|
||||
functions from savanna.db namespace, not the savanna.db.api namespace.
|
||||
Functions in this module are imported into the sahara.db namespace. Call these
|
||||
functions from sahara.db namespace, not the sahara.db.api namespace.
|
||||
|
||||
All functions in this module return objects that implement a dictionary-like
|
||||
interface.
|
||||
@ -27,7 +27,7 @@ interface.
|
||||
`sqlalchemy` is the only supported backend right now.
|
||||
|
||||
:sql_connection: string specifying the sqlalchemy connection to use, like:
|
||||
`sqlite:///var/lib/savanna/savanna.sqlite`.
|
||||
`sqlite:///var/lib/sahara/sahara.sqlite`.
|
||||
|
||||
"""
|
||||
|
||||
|
@ -104,7 +104,7 @@ def main():
|
||||
)
|
||||
config.set_main_option('script_location',
|
||||
'savanna.db.migration:alembic_migrations')
|
||||
# attach the Savanna conf to the Alembic conf
|
||||
# attach the Sahara conf to the Alembic conf
|
||||
config.savanna_config = CONF
|
||||
|
||||
CONF(project='savanna')
|
||||
|
@ -20,7 +20,7 @@ from savanna.openstack.common.db.sqlalchemy import models as oslo_models
|
||||
|
||||
|
||||
class _SaharaBase(oslo_models.ModelBase, oslo_models.TimestampMixin):
|
||||
"""Base class for all Savanna SQLAlchemy DB Models."""
|
||||
"""Base class for all SQLAlchemy DB Models."""
|
||||
|
||||
def to_dict(self):
|
||||
"""sqlalchemy based automatic to_dict method."""
|
||||
|
@ -83,7 +83,7 @@ CONF.register_opts(opts)
|
||||
def make_app():
|
||||
"""App builder (wsgi)
|
||||
|
||||
Entry point for Savanna REST API server
|
||||
Entry point for Sahara REST API server
|
||||
"""
|
||||
app = flask.Flask('savanna.api')
|
||||
|
||||
@ -152,7 +152,7 @@ def make_app():
|
||||
|
||||
def _get_infrastructure_engine():
|
||||
"""That should import and return one of
|
||||
savanna.service.instances*.py modules
|
||||
sahara.service.instances*.py modules
|
||||
"""
|
||||
|
||||
LOG.info("Loading '%s' infrastructure engine" %
|
||||
|
@ -24,8 +24,8 @@ Initializing:
|
||||
Example::
|
||||
|
||||
session.set_defaults(
|
||||
sql_connection="sqlite:///var/lib/savanna/sqlite.db",
|
||||
sqlite_db="/var/lib/savanna/sqlite.db")
|
||||
sql_connection="sqlite:///var/lib/sahara/sqlite.db",
|
||||
sqlite_db="/var/lib/sahara/sqlite.db")
|
||||
|
||||
Recommended ways to use sessions within this framework:
|
||||
|
||||
@ -837,7 +837,7 @@ def _patch_mysqldb_with_stacktrace_comments():
|
||||
# db/api is just a wrapper around db/sqlalchemy/api
|
||||
if filename.endswith('db/api.py'):
|
||||
continue
|
||||
# only trace inside savanna
|
||||
# only trace inside sahara
|
||||
index = filename.rfind('savanna')
|
||||
if index == -1:
|
||||
continue
|
||||
|
@ -19,7 +19,7 @@ gettext for openstack-common modules.
|
||||
|
||||
Usual usage in an openstack.common module:
|
||||
|
||||
from savanna.openstack.common.gettextutils import _
|
||||
from sahara.openstack.common.gettextutils import _
|
||||
"""
|
||||
|
||||
import copy
|
||||
|
@ -72,7 +72,7 @@ class AmbariPlugin(p.ProvisioningPluginBase):
|
||||
def _get_servers(self, cluster):
|
||||
servers = []
|
||||
if hasattr(cluster, 'node_groups') and cluster.node_groups is not None:
|
||||
# code for a savanna cluster object
|
||||
# code for a cluster object
|
||||
for node_group in cluster.node_groups:
|
||||
servers += node_group.instances
|
||||
else:
|
||||
@ -232,7 +232,7 @@ class AmbariPlugin(p.ProvisioningPluginBase):
|
||||
LOG.info('Using "{0}" as admin user for scaling of cluster'
|
||||
.format(ambari_info.user))
|
||||
|
||||
# SAVANNA PLUGIN SPI METHODS:
|
||||
# PLUGIN SPI METHODS:
|
||||
def get_versions(self):
|
||||
return self.version_factory.get_versions()
|
||||
|
||||
@ -243,7 +243,7 @@ class AmbariPlugin(p.ProvisioningPluginBase):
|
||||
handler = self.version_factory.get_version_handler(hadoop_version)
|
||||
return handler.get_config_items()
|
||||
|
||||
# cluster name argument supports the non-savanna cluster creation mode
|
||||
# cluster name argument supports the non-sahara cluster creation mode
|
||||
def start_cluster(self, cluster):
|
||||
client = self.version_factory.get_version_handler(
|
||||
cluster.hadoop_version).get_ambari_client()
|
||||
|
@ -27,7 +27,7 @@ def get(ctx, session_id):
|
||||
|
||||
|
||||
def wait(ctx, session_id):
|
||||
#TODO(lazarev) add check on savanna cluster state (exit on delete)
|
||||
#TODO(alazarev) add check on Hadoop cluster state (exit on delete)
|
||||
#TODO(alazarev) make configurable (bug #1262897)
|
||||
timeout = 4*60*60 # 4 hours
|
||||
cur_time = 0
|
||||
|
@ -160,7 +160,7 @@ class Config(resources.BaseResource):
|
||||
|
||||
|
||||
class UserInput(object):
|
||||
"""Value provided by the Savanna user for a specific config entry."""
|
||||
"""Value provided by the user for a specific config entry."""
|
||||
|
||||
def __init__(self, config, value):
|
||||
self.config = config
|
||||
|
@ -20,7 +20,7 @@ conductor = c.API
|
||||
|
||||
|
||||
def get_raw_data(context, job_binary):
|
||||
# url example: 'savanna-db://JobBinaryInternal-UUID
|
||||
# url example: 'internal-db://JobBinaryInternal-UUID
|
||||
binary_internal_id = job_binary.url[len("savanna-db://"):]
|
||||
return conductor.job_binary_internal_get_raw_data(context,
|
||||
binary_internal_id)
|
||||
|
@ -212,14 +212,14 @@ def get_possible_job_config(job_type):
|
||||
return {'job_config': {'configs': [], 'args': []}}
|
||||
|
||||
if edp.compare_job_type(job_type, 'MapReduce', 'Pig'):
|
||||
#TODO(nmakhotkin) Savanna should return config based on specific plugin
|
||||
#TODO(nmakhotkin) Here we should return config based on specific plugin
|
||||
cfg = xmlutils.load_hadoop_xml_defaults(
|
||||
'plugins/vanilla/v1_2_1/resources/mapred-default.xml')
|
||||
if edp.compare_job_type(job_type, 'MapReduce'):
|
||||
cfg += xmlutils.load_hadoop_xml_defaults(
|
||||
'service/edp/resources/mapred-job-config.xml')
|
||||
elif edp.compare_job_type(job_type, 'Hive'):
|
||||
#TODO(nmakhotkin) Savanna should return config based on specific plugin
|
||||
#TODO(nmakhotkin) Here we should return config based on specific plugin
|
||||
cfg = xmlutils.load_hadoop_xml_defaults(
|
||||
'plugins/vanilla/v1_2_1/resources/hive-default.xml')
|
||||
|
||||
|
@ -449,11 +449,11 @@ class ITestCase(unittest2.TestCase):
|
||||
)
|
||||
# If plugin_config.IMAGE_ID, plugin_config.IMAGE_NAME and
|
||||
# plugin_config.IMAGE_TAG are None then image is chosen
|
||||
# by tag "savanna_i_tests". If image has tag "savanna_i_tests"
|
||||
# by tag "sahara_i_tests". If image has tag "sahara_i_tests"
|
||||
# (at the same time image ID, image name and image tag were not
|
||||
# specified in configuration file of integration tests) then return
|
||||
# its ID and username. Found image will be chosen as image for tests.
|
||||
# If image with tag "savanna_i_tests" not found then handle error
|
||||
# If image with tag "sahara_i_tests" not found then handle error
|
||||
for image in images:
|
||||
if (image.metadata.get('_savanna_tag_savanna_i_tests')) and (
|
||||
image.metadata.get('_savanna_tag_'
|
||||
|
@ -182,10 +182,10 @@ class BaseMigrationTestCase(unittest2.TestCase):
|
||||
self.DEFAULT_CONFIG_FILE = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'test_migrations.conf')
|
||||
# Test machines can set the SAVANNA_TEST_MIGRATIONS_CONF variable
|
||||
# Test machines can set the SAHARA_TEST_MIGRATIONS_CONF variable
|
||||
# to override the location of the config file for migration testing
|
||||
self.CONFIG_FILE_PATH = os.environ.get(
|
||||
'SAVANNA_TEST_MIGRATIONS_CONF',
|
||||
'SAHARA_TEST_MIGRATIONS_CONF',
|
||||
self.DEFAULT_CONFIG_FILE)
|
||||
|
||||
self.ALEMBIC_CONFIG = alembic_config.Config(
|
||||
|
@ -62,7 +62,7 @@ class TestHeat(unittest2.TestCase):
|
||||
|
||||
class TestClusterTemplate(base.SaharaWithDbTestCase):
|
||||
"""This test checks valid structure of Resources
|
||||
section in Heat templates generated by Savanna:
|
||||
section in generated Heat templates:
|
||||
1. It checks templates generation with different OpenStack
|
||||
network installations: Neutron, NovaNetwork with floating Ip auto
|
||||
assignment set to True or False.
|
||||
|
@ -30,7 +30,7 @@ from six.moves import cStringIO as StringIO
|
||||
# Note(maoy): E1103 is error code related to partial type inference
|
||||
ignore_codes = ["E1103"]
|
||||
# Note(maoy): the error message is the pattern of E0202. It should be ignored
|
||||
# for savanna.tests modules
|
||||
# for sahara.tests modules
|
||||
ignore_messages = ["An attribute affected in savanna.tests"]
|
||||
# We ignore all errors in openstack.common because it should be checked
|
||||
# elsewhere.
|
||||
|
Loading…
Reference in New Issue
Block a user