Convert PrettyTable usage to tabulate

PrettyTable is no longer maintained and the last release was in 2013.
There are starting to be deprecation warnings emitted with newer Python
releases.

Various attempts to revive a fork haven't gained much traction. A common
recommendation is to move away from PrettyTable to tabulate. This
switches our usage to a close equivalent using that library instead.

Depends-on: https://review.opendev.org/694093

Change-Id: I9c164f02a78fd6b228e1e3adc3a09a182fb82de9
Signed-off-by: Sean McGinnis <sean.mcginnis@gmail.com>
This commit is contained in:
Sean McGinnis 2019-11-13 09:47:37 -06:00
parent 821cc703c0
commit 66581bacb2
No known key found for this signature in database
GPG Key ID: CE7EE4BFAF8D70C8
5 changed files with 47 additions and 34 deletions

View File

@ -58,7 +58,6 @@ except ImportError:
import collections
import logging as python_logging
import prettytable
import sys
import time
@ -67,6 +66,7 @@ from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import migration
from oslo_log import log as logging
from oslo_utils import timeutils
import tabulate
# Need to register global_opts
from cinder.backup import rpcapi as backup_rpcapi
@ -384,11 +384,11 @@ class DbCommands(object):
headers = ["{}".format(_('Migration')),
"{}".format(_('Total Needed')),
"{}".format(_('Completed')), ]
t = prettytable.PrettyTable(headers)
rows = []
for name in sorted(migration_info.keys()):
info = migration_info[name]
t.add_row([name, info[0], info[1]])
print(t)
rows.append([name, info[0], info[1]])
print(tabulate.tabulate(rows, headers=headers, tablefmt='psql'))
# NOTE(imacdonn): In the "unlimited" case, the loop above will only
# terminate when all possible migrations have been effected. If we're

View File

@ -12,6 +12,7 @@
import datetime
import iso8601
import re
import sys
import time
@ -421,22 +422,28 @@ class TestCinderManageCmd(test.TestCase):
exit = self.assertRaises(SystemExit,
command.online_data_migrations, 10)
self.assertEqual(1, exit.code)
expected = """\
5 rows matched query mock_mig_1, 4 migrated
6 rows matched query mock_mig_2, 6 migrated
+------------+--------------+-----------+
| Migration | Total Needed | Completed |
+------------+--------------+-----------+
| mock_mig_1 | 5 | 4 |
| mock_mig_2 | 6 | 6 |
+------------+--------------+-----------+
"""
command.online_migrations[0].assert_has_calls([mock.call(ctxt,
10)])
command.online_migrations[1].assert_has_calls([mock.call(ctxt,
6)])
self.assertEqual(expected, sys.stdout.getvalue())
output = sys.stdout.getvalue()
matches = re.findall(
'5 rows matched query mock_mig_1, 4 migrated',
output, re.MULTILINE)
self.assertEqual(len(matches), 1)
matches = re.findall(
'6 rows matched query mock_mig_2, 6 migrated',
output, re.MULTILINE)
self.assertEqual(len(matches), 1)
matches = re.findall(
'mock_mig_1 .* 5 .* 4',
output, re.MULTILINE)
self.assertEqual(len(matches), 1)
matches = re.findall(
'mock_mig_2 .* 6 .* 6',
output, re.MULTILINE)
self.assertEqual(len(matches), 1)
@mock.patch('cinder.context.get_admin_context')
def test_online_migrations_no_max_count(self, mock_get_context):
@ -456,19 +463,24 @@ class TestCinderManageCmd(test.TestCase):
exit = self.assertRaises(SystemExit,
command.online_data_migrations, None)
self.assertEqual(0, exit.code)
expected = """\
Running batches of 50 until complete.
120 rows matched query fake_migration, 50 migrated
120 rows matched query fake_migration, 50 migrated
120 rows matched query fake_migration, 20 migrated
120 rows matched query fake_migration, 0 migrated
+----------------+--------------+-----------+
| Migration | Total Needed | Completed |
+----------------+--------------+-----------+
| fake_migration | 120 | 120 |
+----------------+--------------+-----------+
"""
self.assertEqual(expected, sys.stdout.getvalue())
output = sys.stdout.getvalue()
self.assertIn('Running batches of 50 until complete.', output)
matches = re.findall(
'120 rows matched query fake_migration, 50 migrated',
output, re.MULTILINE)
self.assertEqual(len(matches), 2)
matches = re.findall(
'120 rows matched query fake_migration, 20 migrated',
output, re.MULTILINE)
self.assertEqual(len(matches), 1)
matches = re.findall(
'120 rows matched query fake_migration, 0 migrated',
output, re.MULTILINE)
self.assertEqual(len(matches), 1)
matches = re.findall(
'fake_migration .* 120 .* 120',
output, re.MULTILINE)
self.assertEqual(len(matches), 1)
@mock.patch('cinder.context.get_admin_context')
def test_online_migrations_error(self, mock_get_context):

View File

@ -14,13 +14,13 @@
# under the License.
import datetime
import platform
import prettytable
import six
import time
import traceback
import types
from oslo_log import log as logging
import tabulate
from cinder.objects import volume
from cinder import version
@ -237,12 +237,13 @@ class PowerMaxVolumeMetadata(object):
:param datadict: the data dictionary
"""
t = prettytable.PrettyTable(['Key', 'Value'])
rows = []
for k, v in datadict.items():
if v is not None:
t.add_row([k, v])
rows.append([k, v])
t = tabulate.tabulate(rows, headers=['Key', 'Value'], tablefmt='psql')
LOG.debug('\n%(output)s\n', {'output': t})
LOG.debug('\n%s\n', t)
def _consolidate_volume_trace_list(
self, volume_id, volume_trace_dict, volume_key_value):

View File

@ -95,7 +95,6 @@ pbr==2.0.0
pep8==1.5.7
pika-pool==0.1.3
pika==0.10.0
prettytable==0.7.1
psutil==3.2.2
psycopg2==2.7
pyasn1-modules==0.2.1
@ -145,6 +144,7 @@ statsd==3.2.2
stestr==2.2.0
stevedore==1.20.0
suds-jurko==0.6
tabulate==0.8.5
taskflow==3.2.0
tempest==17.1.0
Tempita==0.5.2

View File

@ -36,7 +36,6 @@ osprofiler>=1.4.0 # Apache-2.0
paramiko>=2.0.0 # LGPLv2.1+
Paste>=2.0.2 # MIT
PasteDeploy>=1.5.0 # MIT
PrettyTable<0.8,>=0.7.1 # BSD
psutil>=3.2.2 # BSD
pyparsing>=2.1.0 # MIT
python-barbicanclient>=4.5.2 # Apache-2.0
@ -55,6 +54,7 @@ SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
sqlalchemy-migrate>=0.11.0 # Apache-2.0
stevedore>=1.20.0 # Apache-2.0
suds-jurko>=0.6 # LGPLv3+
tabulate>=0.8.5 # MIT
WebOb>=1.7.1 # MIT
oslo.i18n>=3.15.3 # Apache-2.0
oslo.vmware>=2.17.0 # Apache-2.0