Rename lib/ceph to lib/charms_ceph

The new python3-ceph-common deb package (introduced in ceph octopus)
adds a new ceph directory (a parent package in python terms) in
/usr/lib/python3/dist-packages/ceph/. This results in a conflict with
charm-ceph-osd/lib/ceph/. For example, with the current import of
ceph.utils in hooks/ceph_hooks.py, Python finds no utils.py in
/usr/lib/python3/dist-packages/ceph/ and then stops searching.
Therefore, rename lib/ceph to lib/charms_ceph to avoid the conflict.

Depends-On: https://review.opendev.org/#/c/709226
Change-Id: I13ae7c048d8f1eef2ea64b13ae14b51dbfaaf3cd
This commit is contained in:
Corey Bryant
2020-02-21 18:22:58 +00:00
parent 81a11f4d7f
commit fcfa499f11
13 changed files with 29 additions and 26 deletions

View File

@@ -27,19 +27,19 @@ import charmhelpers.core.hookenv as hookenv
from charmhelpers.core.unitdata import kv from charmhelpers.core.unitdata import kv
import ceph_hooks import ceph_hooks
import ceph.utils import charms_ceph.utils
def add_device(request, device_path, bucket=None): def add_device(request, device_path, bucket=None):
ceph.utils.osdize(device_path, hookenv.config('osd-format'), charms_ceph.utils.osdize(device_path, hookenv.config('osd-format'),
ceph_hooks.get_journal_devices(), ceph_hooks.get_journal_devices(),
hookenv.config('ignore-device-errors'), hookenv.config('ignore-device-errors'),
hookenv.config('osd-encrypt'), hookenv.config('osd-encrypt'),
hookenv.config('bluestore'), hookenv.config('bluestore'),
hookenv.config('osd-encrypt-keymanager')) hookenv.config('osd-encrypt-keymanager'))
# Make it fast! # Make it fast!
if hookenv.config('autotune'): if hookenv.config('autotune'):
ceph.utils.tune_dev(device_path) charms_ceph.utils.tune_dev(device_path)
mounts = filter(lambda disk: device_path mounts = filter(lambda disk: device_path
in disk.device, psutil.disk_partitions()) in disk.device, psutil.disk_partitions())
for osd in mounts: for osd in mounts:

View File

@@ -36,7 +36,7 @@ sys.path.append('lib/')
import charmhelpers.core.hookenv as hookenv import charmhelpers.core.hookenv as hookenv
import ceph.utils import charms_ceph.utils
import utils import utils
@@ -46,13 +46,15 @@ def list_disk():
for journal in utils.get_journal_devices(): for journal in utils.get_journal_devices():
osd_journal.append(os.path.realpath(journal)) osd_journal.append(os.path.realpath(journal))
for dev in list(set(ceph.utils.unmounted_disks()) - set(osd_journal)): for dev in list(set(charms_ceph.utils.unmounted_disks()) -
if (not ceph.utils.is_active_bluestore_device(dev) and set(osd_journal)):
not ceph.utils.is_pristine_disk(dev)): if (not charms_ceph.utils.is_active_bluestore_device(dev) and
not charms_ceph.utils.is_pristine_disk(dev)):
non_pristine.append(dev) non_pristine.append(dev)
hookenv.action_set({ hookenv.action_set({
'disks': list(set(ceph.utils.unmounted_disks()) - set(osd_journal)), 'disks': list(set(charms_ceph.utils.unmounted_disks()) -
set(osd_journal)),
'blacklist': utils.get_blacklist(), 'blacklist': utils.get_blacklist(),
'non-pristine': non_pristine, 'non-pristine': non_pristine,
}) })

View File

@@ -27,7 +27,7 @@ from charmhelpers.core.hookenv import (
action_fail, action_fail,
) )
from ceph.utils import get_local_osd_ids from charms_ceph.utils import get_local_osd_ids
from ceph_hooks import assess_status from ceph_hooks import assess_status

View File

@@ -27,8 +27,8 @@ from charmhelpers.contrib.storage.linux.utils import (
zap_disk, zap_disk,
) )
from charmhelpers.core.unitdata import kv from charmhelpers.core.unitdata import kv
from ceph.utils import is_active_bluestore_device from charms_ceph.utils import is_active_bluestore_device
from ceph.utils import is_mapped_luks_device from charms_ceph.utils import is_mapped_luks_device
def get_devices(): def get_devices():

View File

@@ -25,7 +25,7 @@ import subprocess
import sys import sys
sys.path.append('lib') sys.path.append('lib')
import ceph.utils as ceph import charms_ceph.utils as ceph
from charmhelpers.core import hookenv from charmhelpers.core import hookenv
from charmhelpers.core.hookenv import ( from charmhelpers.core.hookenv import (
log, log,

View File

@@ -19,7 +19,7 @@ import subprocess
import sys import sys
sys.path.append('lib') sys.path.append('lib')
import ceph.utils as ceph import charms_ceph.utils as ceph
from charmhelpers.core.hookenv import ( from charmhelpers.core.hookenv import (
unit_get, unit_get,

View File

@@ -18,11 +18,11 @@ import os
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
from ceph.utils import ( from charms_ceph.utils import (
get_cephfs, get_cephfs,
get_osd_weight get_osd_weight
) )
from ceph.crush_utils import Crushmap from charms_ceph.crush_utils import Crushmap
from charmhelpers.core.hookenv import ( from charmhelpers.core.hookenv import (
log, log,

View File

@@ -26,7 +26,7 @@ class AddDiskActionTests(CharmTestCase):
self.kv.return_value = self.kv self.kv.return_value = self.kv
@mock.patch.object(add_disk.ceph_hooks, 'get_journal_devices') @mock.patch.object(add_disk.ceph_hooks, 'get_journal_devices')
@mock.patch.object(add_disk.ceph.utils, 'osdize') @mock.patch.object(add_disk.charms_ceph.utils, 'osdize')
def test_add_device(self, mock_osdize, mock_get_journal_devices): def test_add_device(self, mock_osdize, mock_get_journal_devices):
def fake_config(key): def fake_config(key):

View File

@@ -7,16 +7,17 @@ class ListDisksActionTests(CharmTestCase):
def setUp(self): def setUp(self):
super(ListDisksActionTests, self).setUp( super(ListDisksActionTests, self).setUp(
list_disks, ['hookenv', list_disks, ['hookenv',
'ceph', 'charms_ceph',
'utils', 'utils',
'os']) 'os'])
self.ceph.utils.unmounted_disks.return_value = ['/dev/sda', '/dev/sdm'] self.charms_ceph.utils.unmounted_disks.return_value = ['/dev/sda',
'/dev/sdm']
def test_list_disks_journal_symbol_link(self): def test_list_disks_journal_symbol_link(self):
self.utils.get_journal_devices.return_value = {'/dev/disk/ceph/sdm'} self.utils.get_journal_devices.return_value = {'/dev/disk/ceph/sdm'}
self.os.path.realpath.return_value = '/dev/sdm' self.os.path.realpath.return_value = '/dev/sdm'
self.ceph.utils.is_active_bluestore_device.return_value = False self.charms_ceph.utils.is_active_bluestore_device.return_value = False
self.ceph.utils.is_pristine_disk.return_value = False self.charms_ceph.utils.is_pristine_disk.return_value = False
self.utils.get_blacklist.return_value = [] self.utils.get_blacklist.return_value = []
list_disks.list_disk() list_disks.list_disk()
self.hookenv.action_set.assert_called_with({ self.hookenv.action_set.assert_called_with({

View File

@@ -1,7 +1,7 @@
__author__ = 'Chris Holcombe <chris.holcombe@canonical.com>' __author__ = 'Chris Holcombe <chris.holcombe@canonical.com>'
from mock import patch, call from mock import patch, call
import test_utils import test_utils
import ceph.utils as ceph import charms_ceph.utils as ceph
TO_PATCH = [ TO_PATCH = [
'hookenv', 'hookenv',