Fix pylint warnings hapening in the build process to make Zuul builds clean
Warnings Fixed: W1514: Using open without explicitly specifying an encoding (unspecified-encoding) E0702: Raising str while only classes or instances are allowed (raising-bad-type) Story: 2005892 Task: 43821 Change-Id: I2083fa4f8d994480c79ff557c56cf2f29c56c19f Signed-off-by: Felipe Desiglo Ferrare <Felipe.DesigloFerrare@windriver.com>
This commit is contained in:
parent
005b2d5dc2
commit
1516b57d62
@ -52,7 +52,6 @@ class MakeReport:
|
||||
msg = "***Failure at test {}: {}".format(call.when, call.excinfo)
|
||||
print(msg)
|
||||
LOG.debug(msg + "\n***Details: {}".format(report.longrepr))
|
||||
global tracebacks
|
||||
tracebacks.append(str(report.longrepr))
|
||||
self.test_results[call.when] = ['Failed', call.excinfo]
|
||||
elif report.skipped:
|
||||
@ -83,7 +82,7 @@ class TestRes:
|
||||
|
||||
def _write_results(res_in_tests, test_name):
|
||||
global tc_start_time
|
||||
with open(ProjVar.get_var("TCLIST_PATH"), mode='a') as f:
|
||||
with open(ProjVar.get_var("TCLIST_PATH"), mode='a', encoding='utf8') as f:
|
||||
f.write('\n{}\t{}\t{}'.format(res_in_tests, tc_start_time, test_name))
|
||||
global test_count
|
||||
test_count += 1
|
||||
@ -510,7 +509,7 @@ def pytest_unconfigure(config):
|
||||
round(TestRes.PASSNUM * 100 / total_exec, 2))
|
||||
fail_rate = "{}%".format(
|
||||
round(TestRes.FAILNUM * 100 / total_exec, 2))
|
||||
with open(tc_res_path, mode='a') as f:
|
||||
with open(tc_res_path, mode='a', encoding='utf8') as f:
|
||||
# Append general info to result log
|
||||
f.write('\n\nLab: {}\n'
|
||||
'Build ID: {}\n'
|
||||
@ -534,7 +533,7 @@ def pytest_unconfigure(config):
|
||||
f.write('------------\nSkipped: {}'.format(TestRes.SKIPNUM))
|
||||
|
||||
LOG.info("Test Results saved to: {}".format(tc_res_path))
|
||||
with open(tc_res_path, 'r') as fin:
|
||||
with open(tc_res_path, 'r', encoding='utf8') as fin:
|
||||
print(fin.read())
|
||||
except Exception as e:
|
||||
LOG.exception(
|
||||
|
@ -330,7 +330,7 @@ def _scp_on_local(cmd, remote_password, logdir=None, timeout=900):
|
||||
logdir = logdir or ProjVar.get_var('LOG_DIR')
|
||||
logfile = os.path.join(logdir, 'scp_files.log')
|
||||
|
||||
with open(logfile, mode='a') as f:
|
||||
with open(logfile, mode='a', encoding='utf8') as f:
|
||||
local_child = pexpect.spawn(command=cmd, encoding='utf-8', logfile=f)
|
||||
index = local_child.expect([pexpect.EOF, 'assword:', 'yes/no'],
|
||||
timeout=timeout)
|
||||
@ -571,7 +571,7 @@ def write_to_file(file_path, content, mode='a'):
|
||||
|
||||
"""
|
||||
time_stamp = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime())
|
||||
with open(file_path, mode=mode) as f:
|
||||
with open(file_path, mode=mode, encoding='utf8') as f:
|
||||
f.write(
|
||||
'\n-----------------[{}]-----------------\n{}\n'.format(time_stamp,
|
||||
content))
|
||||
@ -812,7 +812,7 @@ def get_yaml_data(filepath):
|
||||
Return(json):
|
||||
returns the json data
|
||||
"""
|
||||
with open(filepath, 'r') as f:
|
||||
with open(filepath, 'r', encoding='utf8') as f:
|
||||
data = yaml.safe_load(f)
|
||||
return data
|
||||
|
||||
@ -830,7 +830,7 @@ def write_yaml_data_to_file(data, filename, directory=None):
|
||||
if directory is None:
|
||||
directory = ProjVar.get_var('LOG_DIR')
|
||||
src_path = "{}/{}".format(directory, filename)
|
||||
with open(src_path, 'w') as f:
|
||||
with open(src_path, 'w', encoding='utf8') as f:
|
||||
yaml.dump(data, f)
|
||||
return src_path
|
||||
|
||||
|
@ -4180,7 +4180,7 @@ def _create_cloud_init_if_conf(guest_os, nics_num):
|
||||
# else:
|
||||
# shell = '/bin/bash'
|
||||
|
||||
with open(tmp_file, mode='a') as f:
|
||||
with open(tmp_file, mode='a', encoding='utf8') as f:
|
||||
f.write("#cloud-config\n")
|
||||
|
||||
if new_user is not None:
|
||||
@ -5279,7 +5279,7 @@ def collect_guest_logs(vm_id):
|
||||
else:
|
||||
output = vm_ssh.exec_cmd('tail -n 200 {}'.format(log_path),
|
||||
fail_ok=False)[1]
|
||||
with open(local_log_path, mode='w') as f:
|
||||
with open(local_log_path, mode='w', encoding='utf8') as f:
|
||||
f.write(output)
|
||||
return
|
||||
|
||||
|
@ -487,7 +487,7 @@ def get_system_mode_from_lab_info(lab, multi_region_lab=False,
|
||||
def add_ping_failure(test_name):
|
||||
file_path = '{}{}'.format(ProjVar.get_var('PING_FAILURE_DIR'),
|
||||
'ping_failures.txt')
|
||||
with open(file_path, mode='a') as f:
|
||||
with open(file_path, mode='a', encoding='utf8') as f:
|
||||
f.write(test_name + '\n')
|
||||
|
||||
|
||||
|
@ -125,7 +125,7 @@ def test_cpu_pol_vm_actions(flv_vcpus, cpu_pol, pol_source, boot_source):
|
||||
**image_meta)[1]
|
||||
if boot_source == 'volume':
|
||||
LOG.tc_step("Create a volume from image")
|
||||
source_id = cinder_helper.create_volume(name='cpu_pol'.format(cpu_pol),
|
||||
source_id = cinder_helper.create_volume(name='cpu_pol_{}'.format(cpu_pol),
|
||||
source_id=image_id)[1]
|
||||
ResourceCleanup.add('volume', source_id)
|
||||
else:
|
||||
|
@ -215,8 +215,6 @@ def test_ima_no_event(operation, file_path):
|
||||
|
||||
"""
|
||||
|
||||
|
||||
global files_to_delete
|
||||
start_time = common.get_date_in_format()
|
||||
source_file = file_path
|
||||
con_ssh = ControllerClient.get_active_controller()
|
||||
@ -296,7 +294,6 @@ def test_ima_event_generation(operation, file_path):
|
||||
- Check IMA violation event is logged
|
||||
|
||||
"""
|
||||
global files_to_delete
|
||||
|
||||
con_ssh = ControllerClient.get_active_controller()
|
||||
start_time = common.get_date_in_format()
|
||||
@ -315,8 +312,7 @@ def test_ima_event_generation(operation, file_path):
|
||||
execute_cmd = "{} -p".format(dest_file)
|
||||
else:
|
||||
LOG.tc_step("Append to copy of monitored file")
|
||||
cmd = 'echo "output" | sudo -S tee -a /usr/sbin/TEMP'.format(
|
||||
HostLinuxUser.get_password())
|
||||
cmd = 'echo "output" | sudo -S tee -a /usr/sbin/TEMP'
|
||||
con_ssh.exec_cmd(cmd, fail_ok=False)
|
||||
LOG.tc_step("Execute modified file")
|
||||
con_ssh.exec_sudo_cmd(dest_file)
|
||||
|
@ -32,11 +32,11 @@ def modify_yaml(file_dir, file_name, str_to_add, hugepage_value):
|
||||
Return(str):
|
||||
returns the file_dir and filename with modified values
|
||||
"""
|
||||
with open("{}/{}".format(file_dir, file_name), 'r') as f:
|
||||
with open("{}/{}".format(file_dir, file_name), 'r', encoding='utf8') as f:
|
||||
data = yaml.safe_load(f)
|
||||
data['spec']['containers'][0]['resources']['limits'][str_to_add] = hugepage_value
|
||||
newfile = "hugepages_pod_{}.yaml".format(hugepage_value)
|
||||
with open("{}/{}".format(ProjVar.get_var('LOG_DIR'), newfile), 'w') as f:
|
||||
with open("{}/{}".format(ProjVar.get_var('LOG_DIR'), newfile), 'w', encoding='utf8') as f:
|
||||
yaml.dump(data, f)
|
||||
return ProjVar.get_var('LOG_DIR'), newfile
|
||||
|
||||
|
@ -87,7 +87,6 @@ def snapshot_from_instance(request, create_flavour_and_image, volume_for_instanc
|
||||
|
||||
@fixture(scope="module")
|
||||
def launch_instances(create_flavour_and_image, create_network_sanity, snapshot_from_instance):
|
||||
global VM_IDS
|
||||
net_id_list = list()
|
||||
net_id_list.append({"net-id": create_network_sanity})
|
||||
host = system_helper.get_active_controller_name()
|
||||
|
@ -77,7 +77,6 @@ def volume_from_instance(request, create_flavour_and_image):
|
||||
|
||||
@fixture(scope="module")
|
||||
def launch_instances(create_flavour_and_image, create_network_sanity, volume_from_instance):
|
||||
global VM_IDS
|
||||
net_id_list = list()
|
||||
net_id_list.append({"net-id": create_network_sanity})
|
||||
host = system_helper.get_active_controller_name()
|
||||
|
@ -92,7 +92,7 @@ class LocalHostClient(SSHClient):
|
||||
|
||||
self.logpath = self._get_logpath()
|
||||
if self.logpath:
|
||||
self.session.logfile = open(self.logpath, 'w+')
|
||||
self.session.logfile = open(self.logpath, 'w+', encoding='utf8')
|
||||
|
||||
# Set prompt for matching
|
||||
self.set_prompt(prompt)
|
||||
|
@ -164,7 +164,7 @@ class SSHClient:
|
||||
self.logpath = self._get_logpath()
|
||||
|
||||
if self.logpath:
|
||||
self.session.logfile = open(self.logpath, 'a+')
|
||||
self.session.logfile = open(self.logpath, 'a+', encoding='utf8')
|
||||
|
||||
# Login
|
||||
self.session.login(self.host, self.user, self.password,
|
||||
|
@ -542,7 +542,7 @@ class TelnetClient(Telnet):
|
||||
def get_log_file(self, log_dir):
|
||||
|
||||
if log_dir:
|
||||
logfile = open(log_dir, 'a')
|
||||
logfile = open(log_dir, 'a', encoding='utf8')
|
||||
else:
|
||||
logfile = None
|
||||
|
||||
|
@ -129,10 +129,10 @@ class BaseWebObject:
|
||||
elt = element() if hasattr(element, '__call__') else element
|
||||
except:
|
||||
return False
|
||||
for text in texts: #
|
||||
for text in texts:
|
||||
if self._is_text_visible(elt, 'Error'): #
|
||||
s = 'error' #
|
||||
raise s #
|
||||
s = 'error'
|
||||
raise Exception(s)
|
||||
if self._is_text_visible(elt, text):
|
||||
return text
|
||||
return False
|
||||
|
@ -66,7 +66,7 @@ class MetadatadefinitionsPage(basepage.BasePage):
|
||||
:return = json data container
|
||||
"""
|
||||
try:
|
||||
with open(namespace_template_name, 'r') as template:
|
||||
with open(namespace_template_name, 'r', encoding='utf8') as template:
|
||||
json_template = json.load(template)
|
||||
except Exception:
|
||||
raise EOFError("Can not read template file: [{0}]".format(
|
||||
|
@ -55,7 +55,7 @@ class ApiAccessPage(basepage.BasePage):
|
||||
self._wait_until(
|
||||
lambda _: len(self.list_of_files(directory, template)) > 0)
|
||||
file_name = self.list_of_files(directory, template)[0]
|
||||
with open(join(directory, file_name)) as cred_file:
|
||||
with open(join(directory, file_name), encoding='utf8') as cred_file:
|
||||
content = cred_file.read()
|
||||
username_re = r'export OS_USERNAME="([^"]+)"'
|
||||
if version == 2:
|
||||
|
@ -35,7 +35,7 @@ class VideoRecorder(object):
|
||||
LOG.warning('Video recording is running already')
|
||||
return
|
||||
|
||||
fnull = open(os.devnull, 'w')
|
||||
fnull = open(os.devnull, 'w', encoding='utf8')
|
||||
LOG.info('Record video via %s', ' '.join(self._cmd))
|
||||
self._popen = subprocess.Popen(self._cmd, stdout=fnull, stderr=fnull)
|
||||
self.is_launched = True
|
||||
|
@ -30,7 +30,7 @@ def _get_failed_test_names(log_dir):
|
||||
if not path.exists(test_res_path):
|
||||
return []
|
||||
|
||||
with open(test_res_path, 'r') as file:
|
||||
with open(test_res_path, 'r', encoding='utf8') as file:
|
||||
failed_tests = []
|
||||
|
||||
for line in file:
|
||||
@ -68,7 +68,7 @@ def get_tracebacks_from_pytestlog(log_dir, traceback_lines=10,
|
||||
current_failure = None
|
||||
next_failure = failed_tests.pop(0)
|
||||
traceback_for_test = []
|
||||
with open(path.join(log_dir, 'pytestlog.log'), 'r') as file:
|
||||
with open(path.join(log_dir, 'pytestlog.log'), 'r', encoding='utf8') as file:
|
||||
for line in file:
|
||||
if current_failure is not None:
|
||||
if re.match(new_test_pattern, line):
|
||||
@ -151,8 +151,8 @@ def parse_test_steps(log_dir, failures_only=True):
|
||||
if failures_only and not failed_tests:
|
||||
return
|
||||
|
||||
with open("{}/TIS_AUTOMATION.log".format(log_dir), 'r') as file, \
|
||||
open("{}/test_steps.log".format(log_dir), 'w') as log:
|
||||
with open("{}/TIS_AUTOMATION.log".format(log_dir), 'r', encoding='utf8') as file, \
|
||||
open("{}/test_steps.log".format(log_dir), 'w', encoding='utf8') as log:
|
||||
for line in file:
|
||||
|
||||
if test_steps_length >= 500:
|
||||
|
@ -132,7 +132,7 @@ def update_config_ini(**kwargs):
|
||||
# -----------------------------------------------------------------------
|
||||
|
||||
if count != 0:
|
||||
with open(config_ini, 'w') as configfile:
|
||||
with open(config_ini, 'w', encoding='utf8') as configfile:
|
||||
configurations.write(configfile)
|
||||
status = True
|
||||
message = '{}: was updated successfully'.format(os.path.basename(
|
||||
@ -410,7 +410,7 @@ def get_controllers_ip(env, config_file, config_type, lab_file):
|
||||
"""
|
||||
|
||||
# Read Configurtion File
|
||||
conf = yaml.safe_load(open(config_file))
|
||||
conf = yaml.safe_load(open(config_file, encoding='utf8'))
|
||||
|
||||
cont_data = {}
|
||||
# Get Controllers IP's
|
||||
@ -423,7 +423,7 @@ def get_controllers_ip(env, config_file, config_type, lab_file):
|
||||
|
||||
if env == 'baremetal':
|
||||
# Get phyisical interfaces
|
||||
conf_lab = yaml.safe_load(open(lab_file))
|
||||
conf_lab = yaml.safe_load(open(lab_file, encoding='utf8'))
|
||||
|
||||
cont_data['OAM_IF'] = conf_lab['nodes']['controller-0']['oam_if']
|
||||
cont_data['MGMT_IF'] = conf_lab['nodes']['controller-0']['mgmt_if']
|
||||
|
@ -31,7 +31,7 @@ PROMPT = '$'
|
||||
LOG_FILENAME = 'iso_setup_baremetal.log'
|
||||
LOG_PATH = config.get('general', 'LOG_PATH')
|
||||
LOG = logger.setup_logging('iso_setup_baremetal',
|
||||
log_file='%s/%s'.format(LOG_PATH, LOG_FILENAME),
|
||||
log_file='{}/{}'.format(LOG_PATH, LOG_FILENAME),
|
||||
console_log=False)
|
||||
|
||||
|
||||
@ -133,9 +133,9 @@ class PxeServer(object):
|
||||
@staticmethod
|
||||
def check_pxe_services():
|
||||
"""This function is intended to restart DHCP service
|
||||
|
||||
DHCP service needs to be restarted in order to grab the changes on the
|
||||
dhcp config file"""
|
||||
|
||||
LOG.info('Checking PXE needed services')
|
||||
services = ['dhcpd', 'tftp', 'httpd']
|
||||
|
||||
@ -221,7 +221,7 @@ class PxeServer(object):
|
||||
linuxefi=grub_lines['linuxefi'],
|
||||
initrdefi=grub_lines['initrdefi']))
|
||||
grub_timeout = 'timeout=5\n'
|
||||
with open(os.path.join(self.tftp_dir, 'grub.cfg'), 'w') as grub_file:
|
||||
with open(os.path.join(self.tftp_dir, 'grub.cfg'), 'w', encoding='utf8') as grub_file:
|
||||
grub_file.writelines(grub_timeout)
|
||||
grub_file.write(grub_entry)
|
||||
|
||||
@ -352,7 +352,7 @@ def analyze_grub(grub_cfg_file):
|
||||
Get linuxefi command and initrdefi command from grub_dict according to
|
||||
selected option in config file
|
||||
"""
|
||||
with open(grub_cfg_file, 'r') as grub:
|
||||
with open(grub_cfg_file, 'r', encoding='utf8') as grub:
|
||||
lines = grub.readlines()
|
||||
cmd_lines = list()
|
||||
|
||||
@ -397,7 +397,7 @@ def install_iso_master_controller():
|
||||
|
||||
nodes_file = os.path.join(os.environ['PYTHONPATH'], 'baremetal',
|
||||
'baremetal_setup.yaml')
|
||||
nodes = yaml.safe_load(open(nodes_file))
|
||||
nodes = yaml.safe_load(open(nodes_file, encoding='utf8'))
|
||||
|
||||
# Update config.ini with OAM and MGMT interfaces
|
||||
network_interfaces = []
|
||||
@ -423,7 +423,7 @@ def get_controller0_ip():
|
||||
|
||||
nodes_file = os.path.join(THIS_PATH, '..', 'BareMetal',
|
||||
'installation_setup.yaml')
|
||||
nodes = yaml.load(open(nodes_file))
|
||||
nodes = yaml.load(open(nodes_file, encoding='utf8'))
|
||||
controller_0 = nodes['controller-0']
|
||||
master_controller = Node(controller_0)
|
||||
|
||||
@ -437,7 +437,7 @@ def config_controller(config_file):
|
||||
'iso_installer', 'CONFIG_CONTROLLER_TIMEOUT'))
|
||||
nodes_file = os.path.join(os.environ['PYTHONPATH'], 'baremetal',
|
||||
'baremetal_setup.yaml')
|
||||
nodes = yaml.safe_load(open(nodes_file))
|
||||
nodes = yaml.safe_load(open(nodes_file, encoding='utf8'))
|
||||
controller_0 = nodes['nodes']['controller-0']
|
||||
master_controller = Node(controller_0)
|
||||
serial_cmd = ('ipmitool -I lanplus -H {node_bmc_ip} -U {node_bmc_user} '
|
||||
@ -475,7 +475,7 @@ def install_secondary_nodes():
|
||||
|
||||
nodes_file = os.path.join(THIS_PATH, '..', 'BareMetal',
|
||||
'installation_setup.yml')
|
||||
nodes = yaml.load(open(nodes_file))
|
||||
nodes = yaml.load(open(nodes_file, encoding='utf8'))
|
||||
|
||||
# Removing controller-0 from Nodes
|
||||
controller_0 = nodes.pop('controller-0')
|
||||
|
@ -577,7 +577,7 @@ def setup_qemu(iso_file, configuration_file):
|
||||
check_preconditions()
|
||||
|
||||
# loading all the configurations from yaml file
|
||||
configurations = yaml.safe_load(open(configuration_file))
|
||||
configurations = yaml.safe_load(open(configuration_file, encoding='utf8'))
|
||||
|
||||
# fixme(Humberto): check_system_resources is commented out due
|
||||
# check is giving problems when configuring an instance on qemu virtual
|
||||
|
@ -17,7 +17,7 @@ def setup_logging_using_config(name, config_file):
|
||||
logger
|
||||
:return: returns the instance of the logger already configured
|
||||
"""
|
||||
with open(config_file, 'r') as file_manager:
|
||||
with open(config_file, 'r', encoding='utf8') as file_manager:
|
||||
config = yaml.safe_load(file_manager.read())
|
||||
logging.config.dictConfig(config)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user