Remove Python 2 support

Python 2 has been deprecated. This patch removes all
traces of six, unicode strings and Python 2 tweaks.

Change-Id: Ia51ce13bcf1fa92f9c4d0617eb0ff434f2e15226
This commit is contained in:
anguoming 2022-08-13 14:05:05 +08:00
parent 4999d4c3f2
commit d485cdb8f2
7 changed files with 21 additions and 179 deletions

View File

@ -36,7 +36,7 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
copyright = u'2015, Sahara team'
copyright = '2015, Sahara team'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@ -154,8 +154,8 @@ htmlhelp_basename = 'saharasparkplugin-testsdoc'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'doc-sahara-plugin-spark.tex', u'Sahara Spark Plugin Documentation',
u'Sahara team', 'manual'),
('index', 'doc-sahara-plugin-spark.tex', 'Sahara Spark Plugin Documentation',
'Sahara team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@ -185,8 +185,8 @@ smartquotes_excludes = {'builders': ['latex']}
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'sahara-plugin-spark', u'sahara-plugin-spark Documentation',
[u'Sahara team'], 1)
('index', 'sahara-plugin-spark', 'sahara-plugin-spark Documentation',
['Sahara team'], 1)
]
# If true, show URL addresses after external links.
@ -199,8 +199,8 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'sahara-plugin-spark', u'sahara-plugin-spark Documentation',
u'Sahara team', 'sahara-plugin-spark', 'One line description of project.',
('index', 'sahara-plugin-spark', 'sahara-plugin-spark Documentation',
'Sahara team', 'sahara-plugin-spark', 'One line description of project.',
'Miscellaneous'),
]

View File

@ -1,152 +0,0 @@
alabaster==0.7.12
alembic==1.0.7
amqp==2.4.1
appdirs==1.4.3
asn1crypto==0.24.0
astroid==1.3.8
attrs==18.2.0
Babel==2.3.4
bandit==1.5.1
bashate==0.5.1
bcrypt==3.1.6
botocore==1.12.87
cachetools==3.1.0
castellan==1.1.0
certifi==2018.11.29
cffi==1.14.0
chardet==3.0.4
Click==7.0
cliff==2.14.0
cmd2==0.9.7
colorama==0.4.1
coverage==4.0
cryptography==2.5
debtcollector==1.20.0
decorator==4.3.2
doc8==0.6.0
docutils==0.14
dogpile.cache==0.6.8
dulwich==0.19.10
eventlet==0.26.0
extras==1.0.0
fasteners==0.14.1
fixtures==3.0.0
Flask==1.0.2
future==0.16.0
futurist==1.8.0
gitdb2==2.0.5
GitPython==2.1.11
greenlet==0.4.15
grpcio==1.24.3
imagesize==1.1.0
iso8601==0.1.12
itsdangerous==1.1.0
Jinja2==2.10
jmespath==0.9.3
jsonpatch==1.23
jsonpointer==2.0
jsonschema==2.6.0
keystoneauth1==3.11.2
keystonemiddleware==5.3.0
kombu==5.0.2
linecache2==1.0.0
logilab-common==1.4.2
Mako==1.0.7
MarkupSafe==1.1.0
microversion-parse==0.2.1
monotonic==1.5
mox3==0.26.0
msgpack==0.6.1
msgpack-python==0.5.6
munch==2.3.2
netaddr==0.7.19
netifaces==0.10.9
openstacksdk==0.24.0
os-api-ref==1.6.0
os-client-config==1.31.2
os-service-types==1.5.0
osc-lib==1.12.0
oslo.cache==1.32.0
oslo.concurrency==3.29.0
oslo.config==6.8.0
oslo.context==2.22.0
oslo.db==6.0.0
oslo.i18n==3.15.3
oslo.log==3.36.0
oslo.messaging==9.4.0
oslo.middleware==3.37.0
oslo.policy==2.1.0
oslo.rootwrap==5.15.1
oslo.serialization==2.18.0
oslo.service==1.36.0
oslo.upgradecheck==0.1.1
oslo.utils==3.33.0
oslotest==3.2.0
packaging==19.0
paramiko==2.7.1
Paste==3.0.6
PasteDeploy==2.0.1
pbr==2.0.0
prettytable==0.7.2
pyasn1==0.4.5
pycadf==2.9.0
pycodestyle==2.0.0
pycparser==2.19
Pygments==2.3.1
pyinotify==0.9.6
pylint==1.4.5
PyNaCl==1.3.0
pyOpenSSL==19.0.0
pyparsing==2.3.1
pyperclip==1.7.0
python-barbicanclient==4.8.1
python-cinderclient==4.1.0
python-dateutil==2.8.0
python-editor==1.0.4
python-glanceclient==2.15.0
python-heatclient==1.17.0
python-keystoneclient==3.18.0
python-manilaclient==1.26.0
python-mimeparse==1.6.0
python-neutronclient==6.11.0
python-novaclient==11.1.0
python-pcre==0.7
python-subunit==1.4.0
python-swiftclient==3.6.0
pytz==2018.9
PyYAML==3.13
repoze.lru==0.7
requests==2.14.2
requestsexceptions==1.4.0
restructuredtext-lint==1.2.2
rfc3986==1.2.0
Routes==2.4.1
sahara==10.0.0.0b1
simplejson==3.16.0
six==1.10.0
smmap2==2.0.5
snowballstemmer==1.2.1
SQLAlchemy==1.2.17
sqlalchemy-migrate==0.13.0
sqlparse==0.2.4
statsd==3.3.0
stestr==1.0.0
stevedore==1.30.0
Tempita==0.5.2
tenacity==6.1.0
testrepository==0.0.20
testresources==2.0.1
testscenarios==0.4
testtools==2.4.0
tooz==1.65.0
traceback2==1.4.0
unittest2==1.1.0
urllib3==1.24.1
vine==1.2.0
voluptuous==0.11.5
warlock==1.3.0
wcwidth==0.1.7
WebOb==1.8.5
Werkzeug==0.14.1
whereto==0.4.0
wrapt==1.11.1

View File

@ -33,7 +33,7 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
copyright = u'2015, Sahara Developers'
copyright = '2015, Sahara Developers'
# Release do not need a version number in the title, they
# cover multiple versions.
@ -142,8 +142,8 @@ htmlhelp_basename = 'SaharaSparkReleaseNotesdoc'
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'SaharaSparkReleaseNotes.tex',
u'Sahara Spark Plugin Release Notes Documentation',
u'Sahara Developers', 'manual'),
'Sahara Spark Plugin Release Notes Documentation',
'Sahara Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@ -173,8 +173,8 @@ latex_documents = [
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'saharasparkreleasenotes',
u'Sahara Spark Plugin Release Notes Documentation',
[u'Sahara Developers'], 1)
'Sahara Spark Plugin Release Notes Documentation',
['Sahara Developers'], 1)
]
# If true, show URL addresses after external links.
@ -188,8 +188,8 @@ man_pages = [
# dir menu entry, description, category)
texinfo_documents = [
('index', 'SaharaSparkReleaseNotes',
u'Sahara Spark Plugin Release Notes Documentation',
u'Sahara Developers', 'SaharaSparkReleaseNotes',
'Sahara Spark Plugin Release Notes Documentation',
'Sahara Developers', 'SaharaSparkReleaseNotes',
'One line description of project.',
'Miscellaneous'),
]

View File

@ -12,4 +12,3 @@ oslo.serialization!=2.19.1,>=2.18.0 # Apache-2.0
oslo.utils>=3.33.0 # Apache-2.0
requests>=2.14.2 # Apache-2.0
sahara>=10.0.0.0b1
six>=1.10.0 # MIT

View File

@ -15,7 +15,6 @@
from oslo_config import cfg
from oslo_log import log as logging
import six
from sahara.plugins import provisioning as p
from sahara.plugins import swift_helper as swift
@ -184,7 +183,7 @@ PRIORITY_1_CONFS += CLUSTER_WIDE_CONFS
def _initialise_configs():
configs = []
for service, config_lists in six.iteritems(XML_CONFS):
for service, config_lists in XML_CONFS.items():
for config_list in config_lists:
for config in config_list:
if config['name'] not in HIDDEN_CONFS:
@ -204,13 +203,13 @@ def _initialise_configs():
cfg.priority = 1
configs.append(cfg)
for service, config_items in six.iteritems(ENV_CONFS):
for name, param_format_str in six.iteritems(config_items):
for service, config_items in ENV_CONFS.items():
for name, param_format_str in config_items.items():
configs.append(p.Config(name, service, "node",
default_value=1024, priority=1,
config_type="int"))
for service, config_items in six.iteritems(SPARK_CONFS):
for service, config_items in SPARK_CONFS.items():
for item in config_items['OPTIONS']:
cfg = p.Config(name=item["name"],
description=item["description"],
@ -279,7 +278,7 @@ def generate_xml_configs(configs, storage_path, nn_hostname, hadoop_port):
if is_swift_enabled(configs):
swft_def = SWIFT_DEFAULTS
swift_configs = extract_name_values(swift.get_swift_configs())
for key, value in six.iteritems(swift_configs):
for key, value in swift_configs.items():
if key not in cfg:
cfg[key] = value

View File

@ -16,8 +16,6 @@
import os
import six
from sahara.plugins import edp
from sahara.plugins import exceptions as ex
from sahara.plugins import utils as plugin_utils
@ -37,7 +35,7 @@ class EdpEngine(edp.PluginsSparkJobEngine):
get_config_value_or_default("Spark", "Spark home", self.cluster),
"bin/spark-submit")
self.plugin_params["deploy-mode"] = "client"
port_str = six.text_type(
port_str = str(
plugin_utils.get_config_value_or_default(
"Spark", "Master port", self.cluster))
self.plugin_params["master"] = ('spark://%(host)s:' + port_str)

View File

@ -15,8 +15,6 @@
import os
import six
from sahara.plugins import context
from sahara.plugins import utils
from sahara_plugin_spark.i18n import _
@ -94,7 +92,7 @@ def parse_dfs_report(cmd_output):
res = []
datanode_info = {}
for i in six.moves.xrange(0, len(array)):
for i in range(0, len(array)):
if array[i]:
idx = str.find(array[i], ':')
name = array[i][0:idx]