Merge "Delete logs on master after node or env deletion"

This commit is contained in:
Jenkins 2015-05-05 09:45:34 +00:00 committed by Gerrit Code Review
commit a56a3759ce
9 changed files with 253 additions and 82 deletions

View File

@ -39,6 +39,7 @@ from nailgun.logger import logger
from nailgun.network import connectivity_check
from nailgun.network import utils as net_utils
from nailgun.task.helpers import TaskHelper
from nailgun.utils import logs as logs_utils
class NailgunReceiver(object):
@ -93,7 +94,7 @@ class NailgunReceiver(object):
def get_node_id(n):
return n.get('id', int(n.get('uid')))
Node.delete_by_ids([get_node_id(n) for n in nodes])
nodes_to_delete_ids = [get_node_id(n) for n in nodes]
if(len(inaccessible_nodes) > 0):
inaccessible_node_ids = [
@ -102,7 +103,13 @@ class NailgunReceiver(object):
logger.warn(u'Nodes %s not answered by RPC, removing from db',
inaccessible_nodes)
Node.delete_by_ids(inaccessible_node_ids)
nodes_to_delete_ids.extend(inaccessible_node_ids)
for node in objects.NodeCollection.filter_by_id_list(
None, nodes_to_delete_ids):
logs_utils.delete_node_logs(node)
Node.delete_by_ids(nodes_to_delete_ids)
for node in error_nodes:
node_db = objects.Node.get_by_uid(node['uid'])

View File

@ -15,8 +15,6 @@
# under the License.
import datetime
import os
import shutil
import six
import web
@ -25,12 +23,10 @@ from sqlalchemy.orm import exc
from nailgun import consts
from nailgun.db import db
from nailgun.db.sqlalchemy.models import IPAddr
from nailgun.db.sqlalchemy.models import Node
from nailgun.db.sqlalchemy.models import Task
from nailgun.errors import errors
from nailgun.logger import logger
from nailgun.settings import settings
from nailgun.statistics.fuel_statistics.tasks_params_white_lists \
import task_output_white_list
@ -58,71 +54,6 @@ tasks_names_actions_groups_mapping = {
class TaskHelper(object):
# TODO(aroma): move it to utils module
@classmethod
def prepare_syslog_dir(cls, node, admin_net_id, prefix=None):
logger.debug("Preparing syslog directories for node: %s", node.fqdn)
if not prefix:
prefix = settings.SYSLOG_DIR
logger.debug("prepare_syslog_dir prefix=%s", prefix)
old = os.path.join(prefix, str(node.ip))
bak = os.path.join(prefix, "%s.bak" % str(node.fqdn))
new = os.path.join(prefix, str(node.fqdn))
links = map(
lambda i: os.path.join(prefix, i.ip_addr),
db().query(IPAddr.ip_addr).
filter_by(node=node.id).
filter_by(network=admin_net_id).all()
)
logger.debug("prepare_syslog_dir old=%s", old)
logger.debug("prepare_syslog_dir new=%s", new)
logger.debug("prepare_syslog_dir bak=%s", bak)
logger.debug("prepare_syslog_dir links=%s", str(links))
# backup directory if it exists
if os.path.isdir(new):
logger.debug("New %s already exists. Trying to backup", new)
if os.path.islink(bak):
logger.debug("Bak %s already exists and it is link. "
"Trying to unlink", bak)
os.unlink(bak)
elif os.path.isdir(bak):
logger.debug("Bak %s already exists and it is directory. "
"Trying to remove", bak)
shutil.rmtree(bak)
os.rename(new, bak)
# rename bootstrap directory into fqdn
if os.path.islink(old):
logger.debug("Old %s exists and it is link. "
"Trying to unlink", old)
os.unlink(old)
if os.path.isdir(old):
logger.debug("Old %s exists and it is directory. "
"Trying to rename into %s", old, new)
os.rename(old, new)
else:
logger.debug("Creating %s", new)
os.makedirs(new)
# creating symlinks
for l in links:
if os.path.islink(l) or os.path.isfile(l):
logger.debug("%s already exists. "
"Trying to unlink", l)
os.unlink(l)
if os.path.isdir(l):
logger.debug("%s already exists and it directory. "
"Trying to remove", l)
shutil.rmtree(l)
logger.debug("Creating symlink %s -> %s", l, new)
os.symlink(str(node.fqdn), l)
os.system("/usr/bin/pkill -HUP rsyslog")
# TODO(aroma): move this function to utils module
@classmethod
def calculate_parent_task_progress(cls, subtasks_list):

View File

@ -45,6 +45,7 @@ from nailgun.orchestrator import stages
from nailgun.settings import settings
from nailgun.task.fake import FAKE_THREADS
from nailgun.task.helpers import TaskHelper
from nailgun.utils import logs as logs_utils
from nailgun.utils.restrictions import VmwareAttributesRestriction
from nailgun.utils.zabbix import ZabbixManager
@ -256,12 +257,7 @@ class ProvisionTask(object):
for node in nodes_to_provisioning:
if settings.FAKE_TASKS or settings.FAKE_TASKS_AMQP:
continue
admin_net_id = objects.Node.get_network_manager(
node
).get_admin_network_group_id(node.id)
TaskHelper.prepare_syslog_dir(node, admin_net_id)
logs_utils.prepare_syslog_dir(node)
rpc_message = make_astute_message(
task,

View File

@ -991,7 +991,7 @@ class BaseIntegrationTest(BaseTestCase):
@classmethod
def setUpClass(cls):
super(BaseIntegrationTest, cls).setUpClass()
nailgun.task.task.DeploymentTask._prepare_syslog_dir = mock.Mock()
nailgun.task.task.logs_utils.prepare_syslog_dir = mock.Mock()
def _wait_for_threads(self):
# wait for fake task thread termination
@ -1017,7 +1017,7 @@ class BaseAuthenticationIntegrationTest(BaseIntegrationTest):
cls.app = app.TestApp(build_app(db_driver=test_db_driver).wsgifunc(
ConnectionMonitorMiddleware, NailgunFakeKeystoneAuthMiddleware))
syncdb()
nailgun.task.task.DeploymentTask._prepare_syslog_dir = mock.Mock()
nailgun.task.task.logs_utils.prepare_syslog_dir = mock.Mock()
class BaseUnitTest(TestCase):

View File

@ -15,6 +15,7 @@
# under the License.
import datetime
import mock
import random
import uuid
@ -1235,8 +1236,16 @@ class TestConsumer(BaseIntegrationTest):
'status': 'ready',
'nodes': [{'uid': node1.id},
{'uid': str(node2.id)}]}
with mock.patch(
'nailgun.rpc.receiver.logs_utils.delete_node_logs') \
as mdelete_node_logs:
self.receiver.remove_nodes_resp(**kwargs)
self.assertEqual(len(self.env.nodes), mdelete_node_logs.call_count)
test_nodes = [arg[0][0] for arg in mdelete_node_logs.call_args_list]
self.assertItemsEqual(self.env.nodes, test_nodes)
self.receiver.remove_nodes_resp(**kwargs)
self.db.refresh(task)
self.assertEqual(task.status, "ready")
nodes_db = self.db.query(Node).all()

View File

@ -0,0 +1,97 @@
# -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import os
import shutil
import tempfile
from nailgun.db.sqlalchemy.models import Cluster
from nailgun import objects
from nailgun.test.base import BaseTestCase
from nailgun.utils import logs as logs_utils
class TestNodeLogsUtils(BaseTestCase):
def create_env(self, nodes):
cluster = self.env.create(nodes_kwargs=nodes)
cluster_db = self.db.query(Cluster).get(cluster['id'])
objects.NodeCollection.prepare_for_deployment(cluster_db.nodes)
self.db.flush()
return cluster_db
def test_generate_log_paths_for_node(self):
cluster = self.create_env([{'roles': ['controller']}])
node = cluster.nodes[0]
prefix = "/var/log/remote"
log_paths = logs_utils.generate_log_paths_for_node(node, prefix)
self.assertItemsEqual(
['links', 'old', 'bak', 'new'],
log_paths.keys())
self.assertEqual(len(log_paths['links']), 1)
self.assertEqual(
"{prefix}/{node_ip}".format(prefix=prefix, node_ip=node.ip),
log_paths['links'][0])
self.assertEqual(
"{prefix}/{node_ip}".format(prefix=prefix, node_ip=node.ip),
log_paths['old'])
self.assertEqual(
"{prefix}/{node_fqdn}".format(prefix=prefix, node_fqdn=node.fqdn),
log_paths['new'])
self.assertEqual(
"{prefix}/{node_fqdn}.bak".format(prefix=prefix,
node_fqdn=node.fqdn),
log_paths['bak'])
def test_delete_node_logs(self):
prefix = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, prefix)
cluster = self.create_env([{'roles': ['controller']}])
node = cluster.nodes[0]
log_paths = logs_utils.generate_log_paths_for_node(node, prefix)
link = log_paths['links'][0]
os.symlink(log_paths['old'], link)
folder = log_paths['new']
os.mkdir(folder)
file_ = log_paths['bak']
with open(file_, 'w') as f:
f.write("RANDOMCONTENT")
logs_utils.delete_node_logs(node, prefix)
self.assertTrue(
all(not os.path.exists(path) for path in [link, folder, file_]))
@mock.patch('os.path.islink', side_effect=OSError)
def test_delete_node_no_existing_logs(self, _):
"""Only checks whether errors are passing silently.
That's why there's no assertions, just expecting no errors.
"""
prefix = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, prefix)
cluster = self.create_env([{'roles': ['controller']}])
node = cluster.nodes[0]
logs_utils.delete_node_logs(node, prefix)

View File

@ -16,6 +16,7 @@ import collections
import glob
import os
import re
import shutil
import string
import six
import yaml
@ -30,6 +31,24 @@ from nailgun.logger import logger
from nailgun.settings import settings
def remove_silently(path):
"""Removes an element from file system
no matter if it's file, folder or symlink. Ignores OSErrors.
:param path: path
"""
try:
if os.path.islink(path):
os.unlink(path)
elif os.path.isfile(path):
os.remove(path)
elif os.path.isdir(path):
shutil.rmtree(path)
except OSError as e:
logger.exception(e)
def dict_merge(a, b):
'''recursively merges dict's. not just simple a['key'] = b['key'], if
both a and bhave a key who's value is a dict then dict_merge is called

View File

@ -0,0 +1,112 @@
# -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import shutil
from nailgun import consts
from nailgun.db import db
from nailgun.db.sqlalchemy.models import IPAddr
from nailgun.db.sqlalchemy.models import NetworkGroup
from nailgun.db.sqlalchemy.models import Node
from nailgun.logger import logger
from nailgun.settings import settings
from nailgun.utils import remove_silently
def prepare_syslog_dir(node, prefix=settings.SYSLOG_DIR):
logger.debug("Preparing syslog directories for node: %s", node.fqdn)
logger.debug("prepare_syslog_dir prefix=%s", prefix)
log_paths = generate_log_paths_for_node(node, prefix)
links = log_paths['links']
old = log_paths['old']
bak = log_paths['bak']
new = log_paths['new']
logger.debug("prepare_syslog_dir old=%s", old)
logger.debug("prepare_syslog_dir new=%s", new)
logger.debug("prepare_syslog_dir bak=%s", bak)
logger.debug("prepare_syslog_dir links=%s", str(links))
# backup directory if it exists
if os.path.isdir(new):
logger.debug("New %s already exists. Trying to backup", new)
if os.path.islink(bak):
logger.debug("Bak %s already exists and it is link. "
"Trying to unlink", bak)
os.unlink(bak)
elif os.path.isdir(bak):
logger.debug("Bak %s already exists and it is directory. "
"Trying to remove", bak)
shutil.rmtree(bak)
os.rename(new, bak)
# rename bootstrap directory into fqdn
if os.path.islink(old):
logger.debug("Old %s exists and it is link. "
"Trying to unlink", old)
os.unlink(old)
if os.path.isdir(old):
logger.debug("Old %s exists and it is directory. "
"Trying to rename into %s", old, new)
os.rename(old, new)
else:
logger.debug("Creating %s", new)
os.makedirs(new)
# creating symlinks
for l in links:
if os.path.islink(l) or os.path.isfile(l):
logger.debug("%s already exists. "
"Trying to unlink", l)
os.unlink(l)
if os.path.isdir(l):
logger.debug("%s already exists and it directory. "
"Trying to remove", l)
shutil.rmtree(l)
logger.debug("Creating symlink %s -> %s", l, new)
os.symlink(str(node.fqdn), l)
os.system("/usr/bin/pkill -HUP rsyslog")
def generate_log_paths_for_node(node, prefix):
links = map(
lambda i: os.path.join(prefix, i.ip_addr),
db().query(IPAddr.ip_addr)
.join(Node)
.join(NetworkGroup)
.filter(Node.id == node.id)
.filter(NetworkGroup.name == consts.NETWORKS.fuelweb_admin))
return {
'links': links,
'old': os.path.join(prefix, str(node.ip)),
'bak': os.path.join(prefix, "%s.bak" % str(node.fqdn)),
'new': os.path.join(prefix, str(node.fqdn)),
}
def delete_node_logs(node, prefix=settings.SYSLOG_DIR):
node_logs = generate_log_paths_for_node(node, prefix)
log_paths = node_logs.pop('links') + node_logs.values()
logger.debug("Deleting logs for removed environment's nodes")
for log_path in log_paths:
if os.path.exists(log_path):
logger.debug('delete_node_logs log_path="%s"', log_path)
remove_silently(log_path)

View File

@ -407,7 +407,7 @@
"title": "Actions",
"rename_environment": "Rename Environment",
"delete_environment": "Delete Environment",
"alert_delete": "Clean up each node and return it to the pool of unallocated nodes.",
"alert_delete": "Clean up each node and return it to the pool of unallocated nodes. All logs from deleted nodes will also be removed.",
"reset_environment": "Reset Environment",
"reset_environment_description": "Enables you to modify the settings for the environment and redeploy.",
"reset_environment_warning": "This action will reset all existing nodes to their pre-deployment state, deleting your existing environment.",
@ -717,7 +717,7 @@
"remove_cluster": {
"title": "Delete Environment",
"incomplete_actions_text": "There are incompleted actions. Removing of the environment may fail and lead to inconsistent state.",
"node_returned_text": "Each node will be cleaned up and returned to the pool of unallocated nodes.",
"node_returned_text": "Each node will be cleaned up and returned to the pool of unallocated nodes. All logs from deleted nodes will also be removed.",
"default_text": "Are you sure you want to delete this environment?",
"enter_environment_name": "Please type in the name of the environment \"__name__\" to confirm."
},