2016-09-26 00:43:46 +03:00
|
|
|
# Copyright (c) 2016 Mirantis, Inc.
|
|
|
|
# All Rights Reserved.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License. You may obtain
|
|
|
|
# a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
|
|
|
import contextlib
|
2021-09-14 15:08:58 +02:00
|
|
|
import datetime
|
2020-09-30 12:56:57 +02:00
|
|
|
import functools
|
2017-01-20 03:49:24 +03:00
|
|
|
import itertools
|
2017-03-03 13:54:38 +05:30
|
|
|
import os
|
2019-09-04 18:41:32 +02:00
|
|
|
import ssl
|
2020-05-22 16:17:02 +02:00
|
|
|
import time
|
2019-11-22 15:19:14 +01:00
|
|
|
from urllib import parse
|
2020-12-01 13:16:26 +01:00
|
|
|
import urllib3
|
2016-09-26 00:43:46 +03:00
|
|
|
|
2017-01-20 03:49:24 +03:00
|
|
|
from oslo_log import log as logging
|
2016-09-26 00:43:46 +03:00
|
|
|
from oslo_serialization import jsonutils
|
2021-09-14 15:08:58 +02:00
|
|
|
import pytz
|
2016-09-26 00:43:46 +03:00
|
|
|
import requests
|
2019-11-22 15:19:14 +01:00
|
|
|
from requests import adapters
|
2016-09-26 00:43:46 +03:00
|
|
|
|
2017-03-03 13:54:38 +05:30
|
|
|
from kuryr.lib._i18n import _
|
|
|
|
from kuryr_kubernetes import config
|
2019-03-11 15:15:26 +03:00
|
|
|
from kuryr_kubernetes import constants
|
2016-09-26 00:43:46 +03:00
|
|
|
from kuryr_kubernetes import exceptions as exc
|
2020-05-22 16:17:02 +02:00
|
|
|
from kuryr_kubernetes import utils
|
2016-09-26 00:43:46 +03:00
|
|
|
|
2019-09-04 18:41:32 +02:00
|
|
|
CONF = config.CONF
|
2017-01-20 03:49:24 +03:00
|
|
|
LOG = logging.getLogger(__name__)
|
|
|
|
|
2016-09-26 00:43:46 +03:00
|
|
|
|
|
|
|
class K8sClient(object):
|
|
|
|
# REVISIT(ivc): replace with python-k8sclient if it could be extended
|
|
|
|
# with 'WATCH' support
|
|
|
|
|
|
|
|
def __init__(self, base_url):
|
|
|
|
self._base_url = base_url
|
2017-03-03 13:54:38 +05:30
|
|
|
cert_file = config.CONF.kubernetes.ssl_client_crt_file
|
|
|
|
key_file = config.CONF.kubernetes.ssl_client_key_file
|
|
|
|
ca_crt_file = config.CONF.kubernetes.ssl_ca_crt_file
|
|
|
|
self.verify_server = config.CONF.kubernetes.ssl_verify_server_crt
|
2017-05-10 11:55:57 -04:00
|
|
|
token_file = config.CONF.kubernetes.token_file
|
|
|
|
self.token = None
|
|
|
|
self.cert = (None, None)
|
2021-12-01 11:25:39 +01:00
|
|
|
self.are_events_enabled = config.CONF.kubernetes.use_events
|
2019-11-22 15:19:14 +01:00
|
|
|
|
|
|
|
# Setting higher numbers regarding connection pools as we're running
|
|
|
|
# with max of 1000 green threads.
|
|
|
|
self.session = requests.Session()
|
|
|
|
prefix = '%s://' % parse.urlparse(base_url).scheme
|
|
|
|
self.session.mount(prefix, adapters.HTTPAdapter(pool_maxsize=1000))
|
2017-05-10 11:55:57 -04:00
|
|
|
if token_file:
|
|
|
|
if os.path.exists(token_file):
|
|
|
|
with open(token_file, 'r') as f:
|
|
|
|
self.token = f.readline().rstrip('\n')
|
|
|
|
else:
|
|
|
|
raise RuntimeError(
|
|
|
|
_("Unable to find token_file : %s") % token_file)
|
|
|
|
else:
|
|
|
|
if cert_file and not os.path.exists(cert_file):
|
|
|
|
raise RuntimeError(
|
|
|
|
_("Unable to find ssl cert_file : %s") % cert_file)
|
|
|
|
if key_file and not os.path.exists(key_file):
|
|
|
|
raise RuntimeError(
|
|
|
|
_("Unable to find ssl key_file : %s") % key_file)
|
|
|
|
self.cert = (cert_file, key_file)
|
2017-03-03 13:54:38 +05:30
|
|
|
if self.verify_server:
|
|
|
|
if not ca_crt_file:
|
|
|
|
raise RuntimeError(
|
|
|
|
_("ssl_ca_crt_file cannot be None"))
|
|
|
|
elif not os.path.exists(ca_crt_file):
|
|
|
|
raise RuntimeError(
|
|
|
|
_("Unable to find ca cert_file : %s") % ca_crt_file)
|
|
|
|
else:
|
|
|
|
self.verify_server = ca_crt_file
|
|
|
|
|
2020-09-30 12:56:57 +02:00
|
|
|
# Let's setup defaults for our Session.
|
|
|
|
self.session.cert = self.cert
|
|
|
|
self.session.verify = self.verify_server
|
|
|
|
if self.token:
|
|
|
|
self.session.headers['Authorization'] = f'Bearer {self.token}'
|
|
|
|
# NOTE(dulek): Seems like this is the only way to set is globally.
|
|
|
|
self.session.request = functools.partial(
|
|
|
|
self.session.request, timeout=(
|
|
|
|
CONF.kubernetes.watch_connection_timeout,
|
|
|
|
CONF.kubernetes.watch_read_timeout))
|
2020-09-30 12:56:57 +02:00
|
|
|
|
2020-03-03 16:18:56 +01:00
|
|
|
def _raise_from_response(self, response):
|
|
|
|
if response.status_code == requests.codes.not_found:
|
|
|
|
raise exc.K8sResourceNotFound(response.text)
|
2020-07-24 15:44:29 +02:00
|
|
|
if response.status_code == requests.codes.conflict:
|
|
|
|
raise exc.K8sConflict(response.text)
|
2020-07-31 10:46:00 +02:00
|
|
|
if response.status_code == requests.codes.forbidden:
|
|
|
|
if 'because it is being terminated' in response.json()['message']:
|
|
|
|
raise exc.K8sNamespaceTerminating(response.text)
|
|
|
|
raise exc.K8sForbidden(response.text)
|
2020-09-07 20:50:53 +00:00
|
|
|
if response.status_code == requests.codes.unprocessable_entity:
|
2020-09-10 12:47:43 +02:00
|
|
|
# NOTE(gryf): on k8s API code 422 is also Forbidden, but specified
|
|
|
|
# to FieldValueForbidden. Perhaps there are other usages for
|
|
|
|
# throwing unprocessable entity errors in different cases.
|
|
|
|
if ('FieldValueForbidden' in response.text and
|
|
|
|
'Forbidden' in response.json()['message']):
|
|
|
|
raise exc.K8sFieldValueForbidden(response.text)
|
2020-09-07 20:50:53 +00:00
|
|
|
raise exc.K8sUnprocessableEntity(response.text)
|
2020-03-03 16:18:56 +01:00
|
|
|
if not response.ok:
|
|
|
|
raise exc.K8sClientException(response.text)
|
|
|
|
|
2018-03-27 17:16:20 +02:00
|
|
|
def get(self, path, json=True, headers=None):
|
2017-01-20 03:49:24 +03:00
|
|
|
LOG.debug("Get %(path)s", {'path': path})
|
2016-09-26 00:43:46 +03:00
|
|
|
url = self._base_url + path
|
2020-09-30 12:56:57 +02:00
|
|
|
response = self.session.get(url, headers=headers)
|
2020-03-03 16:18:56 +01:00
|
|
|
self._raise_from_response(response)
|
2020-12-23 10:32:39 +01:00
|
|
|
|
|
|
|
if json:
|
|
|
|
result = response.json()
|
|
|
|
kind = result['kind']
|
|
|
|
|
|
|
|
api_version = result.get('apiVersion')
|
|
|
|
if not api_version:
|
|
|
|
api_version = utils.get_api_ver(path)
|
|
|
|
|
|
|
|
# Strip List from e.g. PodList. For some reason `.items` of a list
|
|
|
|
# returned from API doesn't have `kind` set.
|
|
|
|
# NOTE(gryf): Also, for the sake of calculating selfLink
|
|
|
|
# equivalent, we need to have both: kind and apiVersion, while the
|
|
|
|
# latter is not present on items list for core resources, while
|
|
|
|
# for custom resources there are both kind and apiVersion..
|
|
|
|
if kind.endswith('List'):
|
|
|
|
kind = kind[:-4]
|
2021-01-08 17:25:03 +01:00
|
|
|
|
|
|
|
# NOTE(gryf): In case we get null/None for items from the API,
|
|
|
|
# we need to convert it to the empty list, otherwise it might
|
|
|
|
# be propagated to the consumers of this method and sent back
|
|
|
|
# to the Kubernetes as is, and fail as a result.
|
|
|
|
if result['items'] is None:
|
|
|
|
result['items'] = []
|
|
|
|
|
2020-12-23 10:32:39 +01:00
|
|
|
for item in result['items']:
|
|
|
|
if not item.get('kind'):
|
|
|
|
item['kind'] = kind
|
|
|
|
if not item.get('apiVersion'):
|
|
|
|
item['apiVersion'] = api_version
|
2021-01-08 17:25:03 +01:00
|
|
|
|
2020-12-23 10:32:39 +01:00
|
|
|
if not result.get('apiVersion'):
|
|
|
|
result['apiVersion'] = api_version
|
|
|
|
else:
|
|
|
|
result = response.text
|
|
|
|
|
2018-03-27 17:16:20 +02:00
|
|
|
return result
|
2016-09-26 00:43:46 +03:00
|
|
|
|
2019-02-13 10:45:37 +00:00
|
|
|
def _get_url_and_header(self, path, content_type):
|
2017-08-27 12:27:18 +03:00
|
|
|
url = self._base_url + path
|
2019-02-13 10:45:37 +00:00
|
|
|
header = {'Content-Type': content_type,
|
2017-08-27 12:27:18 +03:00
|
|
|
'Accept': 'application/json'}
|
|
|
|
|
|
|
|
return url, header
|
|
|
|
|
2018-09-25 08:57:27 -04:00
|
|
|
def patch(self, field, path, data):
|
2020-09-30 12:56:57 +02:00
|
|
|
LOG.debug("Patch %(path)s: %(data)s", {'path': path, 'data': data})
|
2019-02-13 10:45:37 +00:00
|
|
|
content_type = 'application/merge-patch+json'
|
|
|
|
url, header = self._get_url_and_header(path, content_type)
|
2020-09-30 12:56:57 +02:00
|
|
|
response = self.session.patch(url, json={field: data}, headers=header)
|
2020-03-03 16:18:56 +01:00
|
|
|
self._raise_from_response(response)
|
|
|
|
return response.json().get('status')
|
2017-08-27 12:27:18 +03:00
|
|
|
|
2020-02-10 08:46:03 +01:00
|
|
|
def patch_crd(self, field, path, data, action='replace'):
|
2019-02-13 10:45:37 +00:00
|
|
|
content_type = 'application/json-patch+json'
|
|
|
|
url, header = self._get_url_and_header(path, content_type)
|
|
|
|
|
2020-02-10 08:46:03 +01:00
|
|
|
if action == 'remove':
|
|
|
|
data = [{'op': action,
|
|
|
|
'path': f'/{field}/{data}'}]
|
|
|
|
else:
|
2020-09-07 20:50:53 +00:00
|
|
|
if data:
|
|
|
|
data = [{'op': action,
|
|
|
|
'path': f'/{field}/{crd_field}',
|
|
|
|
'value': value}
|
|
|
|
for crd_field, value in data.items()]
|
|
|
|
else:
|
|
|
|
data = [{'op': action,
|
|
|
|
'path': f'/{field}',
|
|
|
|
'value': data}]
|
2019-02-13 10:45:37 +00:00
|
|
|
|
|
|
|
LOG.debug("Patch %(path)s: %(data)s", {
|
|
|
|
'path': path, 'data': data})
|
|
|
|
|
2019-11-22 15:19:14 +01:00
|
|
|
response = self.session.patch(url, data=jsonutils.dumps(data),
|
2020-09-30 12:56:57 +02:00
|
|
|
headers=header)
|
2020-03-03 16:18:56 +01:00
|
|
|
self._raise_from_response(response)
|
|
|
|
return response.json().get('status')
|
2019-02-13 10:45:37 +00:00
|
|
|
|
2019-03-11 15:15:26 +03:00
|
|
|
def patch_node_annotations(self, node, annotation_name, value):
|
|
|
|
content_type = 'application/json-patch+json'
|
|
|
|
path = '{}/nodes/{}/'.format(constants.K8S_API_BASE, node)
|
|
|
|
value = jsonutils.dumps(value)
|
|
|
|
url, header = self._get_url_and_header(path, content_type)
|
|
|
|
|
|
|
|
data = [{'op': 'add',
|
|
|
|
'path': '/metadata/annotations/{}'.format(annotation_name),
|
|
|
|
'value': value}]
|
|
|
|
|
2019-11-22 15:19:14 +01:00
|
|
|
response = self.session.patch(url, data=jsonutils.dumps(data),
|
2020-09-30 12:56:57 +02:00
|
|
|
headers=header)
|
2020-03-03 16:18:56 +01:00
|
|
|
self._raise_from_response(response)
|
|
|
|
return response.json().get('status')
|
2019-03-11 15:15:26 +03:00
|
|
|
|
|
|
|
def remove_node_annotations(self, node, annotation_name):
|
|
|
|
content_type = 'application/json-patch+json'
|
|
|
|
path = '{}/nodes/{}/'.format(constants.K8S_API_BASE, node)
|
|
|
|
url, header = self._get_url_and_header(path, content_type)
|
|
|
|
|
|
|
|
data = [{'op': 'remove',
|
|
|
|
'path': '/metadata/annotations/{}'.format(annotation_name)}]
|
|
|
|
|
2019-11-22 15:19:14 +01:00
|
|
|
response = self.session.patch(url, data=jsonutils.dumps(data),
|
2020-09-30 12:56:57 +02:00
|
|
|
headers=header)
|
2020-03-03 16:18:56 +01:00
|
|
|
self._raise_from_response(response)
|
|
|
|
return response.json().get('status')
|
2019-03-11 15:15:26 +03:00
|
|
|
|
2020-07-29 17:58:20 +02:00
|
|
|
def _jsonpatch_escape(self, value):
|
|
|
|
value = value.replace('~', '~0')
|
|
|
|
value = value.replace('/', '~1')
|
|
|
|
return value
|
|
|
|
|
2020-02-10 08:46:03 +01:00
|
|
|
def remove_annotations(self, path, annotation_name):
|
2020-07-29 17:58:20 +02:00
|
|
|
LOG.debug("Remove annotations %(path)s: %(name)s",
|
|
|
|
{'path': path, 'name': annotation_name})
|
2020-02-10 08:46:03 +01:00
|
|
|
content_type = 'application/json-patch+json'
|
|
|
|
url, header = self._get_url_and_header(path, content_type)
|
2020-07-29 17:58:20 +02:00
|
|
|
annotation_name = self._jsonpatch_escape(annotation_name)
|
2020-02-10 08:46:03 +01:00
|
|
|
|
|
|
|
data = [{'op': 'remove',
|
2020-07-29 17:58:20 +02:00
|
|
|
'path': f'/metadata/annotations/{annotation_name}'}]
|
2020-02-10 08:46:03 +01:00
|
|
|
response = self.session.patch(url, data=jsonutils.dumps(data),
|
2020-09-30 12:56:57 +02:00
|
|
|
headers=header)
|
2020-02-10 08:46:03 +01:00
|
|
|
if response.ok:
|
|
|
|
return response.json().get('status')
|
|
|
|
raise exc.K8sClientException(response.text)
|
|
|
|
|
2018-04-18 11:03:27 +00:00
|
|
|
def post(self, path, body):
|
|
|
|
LOG.debug("Post %(path)s: %(body)s", {'path': path, 'body': body})
|
|
|
|
url = self._base_url + path
|
|
|
|
header = {'Content-Type': 'application/json'}
|
|
|
|
|
2020-09-30 12:56:57 +02:00
|
|
|
response = self.session.post(url, json=body, headers=header)
|
2020-03-03 16:18:56 +01:00
|
|
|
self._raise_from_response(response)
|
|
|
|
return response.json()
|
2018-04-18 11:03:27 +00:00
|
|
|
|
2018-04-18 11:31:48 +00:00
|
|
|
def delete(self, path):
|
|
|
|
LOG.debug("Delete %(path)s", {'path': path})
|
|
|
|
url = self._base_url + path
|
|
|
|
header = {'Content-Type': 'application/json'}
|
|
|
|
|
2020-09-30 12:56:57 +02:00
|
|
|
response = self.session.delete(url, headers=header)
|
2020-03-03 16:18:56 +01:00
|
|
|
self._raise_from_response(response)
|
|
|
|
return response.json()
|
2018-04-18 11:31:48 +00:00
|
|
|
|
2020-07-24 15:44:29 +02:00
|
|
|
# TODO(dulek): add_finalizer() and remove_finalizer() have some code
|
|
|
|
# duplication, but I don't see a nice way to avoid it.
|
|
|
|
def add_finalizer(self, obj, finalizer):
|
|
|
|
if finalizer in obj['metadata'].get('finalizers', []):
|
2020-09-04 12:40:19 +02:00
|
|
|
return True
|
2020-07-24 15:44:29 +02:00
|
|
|
|
2021-01-12 15:59:22 +01:00
|
|
|
path = utils.get_res_link(obj)
|
2020-07-24 15:44:29 +02:00
|
|
|
LOG.debug(f"Add finalizer {finalizer} to {path}")
|
|
|
|
url, headers = self._get_url_and_header(
|
|
|
|
path, 'application/merge-patch+json')
|
|
|
|
|
|
|
|
for i in range(3): # Let's make sure it's not infinite loop
|
|
|
|
finalizers = obj['metadata'].get('finalizers', []).copy()
|
|
|
|
finalizers.append(finalizer)
|
|
|
|
|
|
|
|
data = {
|
|
|
|
'metadata': {
|
|
|
|
'finalizers': finalizers,
|
|
|
|
'resourceVersion': obj['metadata']['resourceVersion'],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2020-09-30 12:56:57 +02:00
|
|
|
response = self.session.patch(url, json=data, headers=headers)
|
2020-07-24 15:44:29 +02:00
|
|
|
|
|
|
|
if response.ok:
|
2020-09-04 12:40:19 +02:00
|
|
|
return True
|
2020-07-24 15:44:29 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
self._raise_from_response(response)
|
2020-09-10 12:47:43 +02:00
|
|
|
except (exc.K8sFieldValueForbidden, exc.K8sResourceNotFound):
|
2020-09-04 12:40:19 +02:00
|
|
|
# Object is being deleting or gone. Return.
|
|
|
|
return False
|
2020-07-24 15:44:29 +02:00
|
|
|
except exc.K8sConflict:
|
2020-09-25 17:15:42 +02:00
|
|
|
try:
|
|
|
|
obj = self.get(path)
|
|
|
|
except exc.K8sResourceNotFound:
|
|
|
|
# Object got removed before finalizer was set
|
|
|
|
return False
|
2020-07-24 15:44:29 +02:00
|
|
|
if finalizer in obj['metadata'].get('finalizers', []):
|
|
|
|
# Finalizer is there, return.
|
2020-09-04 12:40:19 +02:00
|
|
|
return True
|
2020-07-24 15:44:29 +02:00
|
|
|
|
|
|
|
# If after 3 iterations there's still conflict, just raise.
|
|
|
|
self._raise_from_response(response)
|
|
|
|
|
|
|
|
def remove_finalizer(self, obj, finalizer):
|
2021-01-12 15:59:22 +01:00
|
|
|
path = utils.get_res_link(obj)
|
2020-07-24 15:44:29 +02:00
|
|
|
LOG.debug(f"Remove finalizer {finalizer} from {path}")
|
|
|
|
url, headers = self._get_url_and_header(
|
|
|
|
path, 'application/merge-patch+json')
|
|
|
|
|
|
|
|
for i in range(3): # Let's make sure it's not infinite loop
|
|
|
|
finalizers = obj['metadata'].get('finalizers', []).copy()
|
|
|
|
try:
|
|
|
|
finalizers.remove(finalizer)
|
|
|
|
except ValueError:
|
|
|
|
# Finalizer is not there, return.
|
2020-09-04 12:40:19 +02:00
|
|
|
return True
|
2020-07-24 15:44:29 +02:00
|
|
|
|
|
|
|
data = {
|
|
|
|
'metadata': {
|
|
|
|
'finalizers': finalizers,
|
|
|
|
'resourceVersion': obj['metadata']['resourceVersion'],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2020-09-30 12:56:57 +02:00
|
|
|
response = self.session.patch(url, json=data, headers=headers)
|
2020-07-24 15:44:29 +02:00
|
|
|
|
|
|
|
if response.ok:
|
2020-09-04 12:40:19 +02:00
|
|
|
return True
|
2020-07-24 15:44:29 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
self._raise_from_response(response)
|
|
|
|
except exc.K8sConflict:
|
|
|
|
obj = self.get(path)
|
2020-09-10 12:47:43 +02:00
|
|
|
except (exc.K8sFieldValueForbidden, exc.K8sResourceNotFound):
|
|
|
|
# Object is being deleted or gone already, stop.
|
2020-09-04 12:40:19 +02:00
|
|
|
return False
|
2020-07-24 15:44:29 +02:00
|
|
|
|
|
|
|
# If after 3 iterations there's still conflict, just raise.
|
|
|
|
self._raise_from_response(response)
|
|
|
|
|
2020-02-24 20:46:54 +00:00
|
|
|
def get_loadbalancer_crd(self, obj):
|
|
|
|
name = obj['metadata']['name']
|
|
|
|
namespace = obj['metadata']['namespace']
|
|
|
|
|
|
|
|
try:
|
|
|
|
crd = self.get('{}/{}/kuryrloadbalancers/{}'.format(
|
|
|
|
constants.K8S_API_CRD_NAMESPACES, namespace,
|
|
|
|
name))
|
|
|
|
except exc.K8sResourceNotFound:
|
|
|
|
return None
|
|
|
|
except exc.K8sClientException:
|
|
|
|
LOG.exception("Kubernetes Client Exception.")
|
|
|
|
raise
|
|
|
|
return crd
|
|
|
|
|
2016-11-09 17:15:49 +03:00
|
|
|
def annotate(self, path, annotations, resource_version=None):
|
2017-01-20 03:49:24 +03:00
|
|
|
"""Pushes a resource annotation to the K8s API resource
|
|
|
|
|
|
|
|
The annotate operation is made with a PATCH HTTP request of kind:
|
|
|
|
application/merge-patch+json as described in:
|
|
|
|
|
2019-10-03 15:41:17 +03:00
|
|
|
https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#patch-operations # noqa
|
2017-01-20 03:49:24 +03:00
|
|
|
"""
|
|
|
|
LOG.debug("Annotate %(path)s: %(names)s", {
|
|
|
|
'path': path, 'names': list(annotations)})
|
2017-08-27 12:27:18 +03:00
|
|
|
|
2019-02-13 10:45:37 +00:00
|
|
|
content_type = 'application/merge-patch+json'
|
|
|
|
url, header = self._get_url_and_header(path, content_type)
|
2017-08-27 12:27:18 +03:00
|
|
|
|
2017-01-20 03:49:24 +03:00
|
|
|
while itertools.count(1):
|
2017-09-27 15:41:39 +08:00
|
|
|
metadata = {"annotations": annotations}
|
|
|
|
if resource_version:
|
|
|
|
metadata['resourceVersion'] = resource_version
|
|
|
|
data = jsonutils.dumps({"metadata": metadata}, sort_keys=True)
|
2020-09-30 12:56:57 +02:00
|
|
|
response = self.session.patch(url, data=data, headers=header)
|
2017-01-20 03:49:24 +03:00
|
|
|
if response.ok:
|
2020-05-26 13:03:07 +00:00
|
|
|
return response.json()['metadata'].get('annotations', {})
|
2017-01-20 03:49:24 +03:00
|
|
|
if response.status_code == requests.codes.conflict:
|
|
|
|
resource = self.get(path)
|
|
|
|
new_version = resource['metadata']['resourceVersion']
|
|
|
|
retrieved_annotations = resource['metadata'].get(
|
|
|
|
'annotations', {})
|
|
|
|
|
|
|
|
for k, v in annotations.items():
|
2019-07-12 19:04:32 +03:00
|
|
|
if v != retrieved_annotations.get(k):
|
2017-01-20 03:49:24 +03:00
|
|
|
break
|
|
|
|
else:
|
2019-07-12 19:04:32 +03:00
|
|
|
LOG.debug("Annotations for %(path)s already present: "
|
|
|
|
"%(names)s", {'path': path,
|
|
|
|
'names': retrieved_annotations})
|
|
|
|
return retrieved_annotations
|
|
|
|
# Retry patching with updated resourceVersion
|
|
|
|
resource_version = new_version
|
|
|
|
continue
|
2017-09-27 15:41:39 +08:00
|
|
|
|
|
|
|
LOG.error("Exception response, headers: %(headers)s, "
|
|
|
|
"content: %(content)s, text: %(text)s"
|
|
|
|
% {'headers': response.headers,
|
|
|
|
'content': response.content, 'text': response.text})
|
2018-02-21 23:54:52 +02:00
|
|
|
|
2020-03-03 16:18:56 +01:00
|
|
|
self._raise_from_response(response)
|
2016-09-26 00:43:46 +03:00
|
|
|
|
|
|
|
def watch(self, path):
|
|
|
|
url = self._base_url + path
|
2019-09-04 18:41:32 +02:00
|
|
|
resource_version = None
|
2016-09-26 00:43:46 +03:00
|
|
|
|
2020-05-22 16:17:02 +02:00
|
|
|
attempt = 0
|
2016-09-26 00:43:46 +03:00
|
|
|
while True:
|
2019-09-04 18:41:32 +02:00
|
|
|
try:
|
|
|
|
params = {'watch': 'true'}
|
|
|
|
if resource_version:
|
|
|
|
params['resourceVersion'] = resource_version
|
|
|
|
with contextlib.closing(
|
2019-11-22 15:19:14 +01:00
|
|
|
self.session.get(
|
2020-09-30 12:56:57 +02:00
|
|
|
url, params=params, stream=True)) as response:
|
2019-09-04 18:41:32 +02:00
|
|
|
if not response.ok:
|
|
|
|
raise exc.K8sClientException(response.text)
|
2020-05-22 16:17:02 +02:00
|
|
|
attempt = 0
|
2019-09-04 18:41:32 +02:00
|
|
|
for line in response.iter_lines():
|
|
|
|
line = line.decode('utf-8').strip()
|
|
|
|
if line:
|
|
|
|
line_dict = jsonutils.loads(line)
|
|
|
|
yield line_dict
|
|
|
|
# Saving the resourceVersion in case of a restart.
|
|
|
|
# At this point it's safely passed to handler.
|
|
|
|
m = line_dict.get('object', {}).get('metadata', {})
|
|
|
|
resource_version = m.get('resourceVersion', None)
|
2019-11-22 16:01:25 +01:00
|
|
|
except (requests.ReadTimeout, requests.ConnectionError,
|
2020-12-01 13:16:26 +01:00
|
|
|
ssl.SSLError, requests.exceptions.ChunkedEncodingError,
|
|
|
|
urllib3.exceptions.SSLError):
|
2020-07-03 16:26:31 +02:00
|
|
|
t = utils.exponential_backoff(attempt)
|
2020-05-22 16:17:02 +02:00
|
|
|
log = LOG.debug
|
|
|
|
if attempt > 0:
|
|
|
|
# Only make it a warning if it's happening again, no need
|
|
|
|
# to inform about all the read timeouts.
|
|
|
|
log = LOG.warning
|
|
|
|
log('Connection error when watching %s. Retrying in %ds with '
|
|
|
|
'resourceVersion=%s', path, t,
|
|
|
|
params.get('resourceVersion'))
|
|
|
|
time.sleep(t)
|
|
|
|
attempt += 1
|
2021-09-14 15:08:58 +02:00
|
|
|
|
2021-12-15 18:52:21 +01:00
|
|
|
def add_event(self, resource, reason, message, type_='Normal',
|
|
|
|
component='kuryr-controller'):
|
2021-09-14 15:08:58 +02:00
|
|
|
"""Create an Event object for the provided resource."""
|
2021-12-01 11:25:39 +01:00
|
|
|
if not self.are_events_enabled:
|
|
|
|
return {}
|
|
|
|
|
2021-10-15 11:59:18 +02:00
|
|
|
if not resource:
|
|
|
|
return {}
|
|
|
|
|
2021-09-14 15:08:58 +02:00
|
|
|
involved_object = {'apiVersion': resource['apiVersion'],
|
|
|
|
'kind': resource['kind'],
|
|
|
|
'name': resource['metadata']['name'],
|
|
|
|
'namespace': resource['metadata']['namespace'],
|
|
|
|
'uid': resource['metadata']['uid']}
|
|
|
|
|
|
|
|
# This is needed for Event date, otherwise LAST SEEN/Age will be empty
|
|
|
|
# and misleading.
|
|
|
|
now = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC)
|
|
|
|
date_time = now.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
|
|
|
|
name = ".".join((resource['metadata']['name'],
|
|
|
|
self._get_hex_timestamp(now)))
|
|
|
|
|
|
|
|
event = {'kind': 'Event',
|
|
|
|
'apiVersion': 'v1',
|
|
|
|
'firstTimestamp': date_time,
|
|
|
|
'metadata': {'name': name},
|
|
|
|
'reason': reason,
|
|
|
|
'message': message,
|
|
|
|
'type': type_,
|
2021-12-15 18:52:21 +01:00
|
|
|
'involvedObject': involved_object,
|
|
|
|
'source': {'component': component,
|
|
|
|
'host': utils.get_nodename()}}
|
2021-09-14 15:08:58 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
return self.post(f'{constants.K8S_API_BASE}/namespaces/'
|
|
|
|
f'{resource["metadata"]["namespace"]}/events',
|
|
|
|
event)
|
2021-12-15 18:10:44 +01:00
|
|
|
except exc.K8sNamespaceTerminating:
|
|
|
|
# We can't create events in a Namespace that is being terminated,
|
|
|
|
# there's no workaround, no need to log it, just ignore it.
|
|
|
|
return {}
|
2021-09-14 15:08:58 +02:00
|
|
|
except exc.K8sClientException:
|
|
|
|
LOG.warning(f'There was non critical error during creating an '
|
|
|
|
'Event for resource: "{resource}", with reason: '
|
|
|
|
f'"{reason}", message: "{message}" and type: '
|
|
|
|
f'"{type_}"')
|
|
|
|
return {}
|
|
|
|
|
|
|
|
def _get_hex_timestamp(self, datetimeobj):
|
|
|
|
"""Get hex representation for timestamp.
|
|
|
|
|
|
|
|
In Kuberenets, Event name is constructed name of the bounded object
|
|
|
|
and timestamp in hexadecimal representation.
|
|
|
|
Note, that Python timestamp is represented as floating figure:
|
|
|
|
1631622163.8534190654754638671875
|
|
|
|
while those which origin from K8s, after change to int:
|
|
|
|
1631622163915909162
|
|
|
|
so, to get similar integer, we need to multiply the float by
|
|
|
|
100000000 to get the same precision and cast to integer, to get rid
|
|
|
|
of the fractures, and finally convert it to hex representation.
|
|
|
|
"""
|
|
|
|
timestamp = datetime.datetime.timestamp(datetimeobj)
|
|
|
|
return format(int(timestamp * 100000000), 'x')
|