Add region_name option to s3 store

Add an option to let the operator choose a proper region_name.

Some operator are not using S3 from amazon but from other provider.
In such case, the get_s3_location cannot give good region_name.

Adding an option from config seems a better option that relying on
hardcoded stuff in the code.

Signed-off-by: Arnaud Morin <arnaud.morin@ovhcloud.com>
Change-Id: I71352e75f6415494709d8fb62c8b13f3ea7c6016
This commit is contained in:
Arnaud Morin 2022-08-12 20:45:27 +02:00
parent 052c5cc4b4
commit 2ad4a73e7f
4 changed files with 75 additions and 4 deletions

View File

@ -64,6 +64,22 @@ Related Options:
* s3_store_access_key
* s3_store_secret_key
"""),
cfg.StrOpt('s3_store_region_name',
default='',
help="""
The S3 region name.
This parameter will set the region_name used by boto.
If this parameter is not set, we we will try to compute it from the
s3_store_host.
Possible values:
* A valid region name
Related Options:
* s3_store_host
"""),
cfg.StrOpt('s3_store_access_key',
secret=True,
@ -375,6 +391,7 @@ class Store(glance_store.driver.Store):
itself, it should raise `exceptions.BadStoreConfiguration`
"""
self.s3_host = self._option_get('s3_store_host')
self.region_name = self._option_get('s3_store_region_name')
self.access_key = self._option_get('s3_store_access_key')
self.secret_key = self._option_get('s3_store_secret_key')
self.bucket = self._option_get('s3_store_bucket')
@ -432,6 +449,8 @@ class Store(glance_store.driver.Store):
if not result:
if param == 's3_store_create_bucket_on_put':
return result
if param == 's3_store_region_name':
return result
reason = _("Could not find %s in configuration options.") % param
LOG.error(reason)
raise exceptions.BadStoreConfiguration(store_name="s3",
@ -460,7 +479,10 @@ class Store(glance_store.driver.Store):
raise boto_exceptions.InvalidDNSNameError(bucket_name=bucket_name)
region_name, endpoint_url = None, None
if location:
if self.region_name:
region_name = self.region_name
endpoint_url = s3_host
elif location:
region_name = location
else:
endpoint_url = s3_host
@ -578,7 +600,8 @@ class Store(glance_store.driver.Store):
if self._option_get('s3_store_create_bucket_on_put'):
self._create_bucket(s3_client,
self._option_get('s3_store_host'),
bucket)
bucket,
self._option_get('s3_store_region_name'))
else:
msg = (_("The bucket %s does not exist in "
"S3. Please set the "
@ -850,15 +873,20 @@ class Store(glance_store.driver.Store):
return True
@staticmethod
def _create_bucket(s3_client, s3_host, bucket):
def _create_bucket(s3_client, s3_host, bucket, region_name=None):
"""Create bucket into the S3.
:param s3_client: An object with credentials to connect to S3
:param s3_host: S3 endpoint url
:param bucket: S3 bucket name
:param region_name: An optional region_name. If not provided, will try
to compute it from s3_host
:raises: BadStoreConfiguration if cannot connect to S3 successfully
"""
region = get_s3_location(s3_host)
if region_name:
region = region_name
else:
region = get_s3_location(s3_host)
try:
s3_client.create_bucket(
Bucket=bucket,
@ -901,6 +929,7 @@ def get_s3_location(s3_host):
Amazon S3, and if user wants to use S3 compatible storage,
returns ''
"""
# NOTE(arnaud): maybe get rid of hardcoded amazon stuff here?
locations = {
's3.amazonaws.com': '',
's3-us-east-1.amazonaws.com': 'us-east-1',

View File

@ -90,6 +90,7 @@ class TestMultiS3Store(base.MultiStoreBaseTest,
s3_store_access_key='user',
s3_store_secret_key='key',
s3_store_host='https://s3-region1.com',
s3_store_region_name='custom_region_name',
s3_store_bucket='glance',
s3_store_large_object_size=S3_CONF[
's3_store_large_object_size'
@ -132,6 +133,22 @@ class TestMultiS3Store(base.MultiStoreBaseTest,
self.assertRaises(boto_exceptions.InvalidDNSNameError,
self.store.get, loc)
@mock.patch('glance_store.location.Location')
@mock.patch.object(boto3.session.Session, "client")
def test_client_custom_region_name(self, mock_client, mock_loc):
"""Test a custom s3_store_region_name in config"""
mock_loc.accesskey = 'abcd'
mock_loc.secretkey = 'efgh'
mock_loc.bucket = 'bucket1'
self.store._create_s3_client(mock_loc)
mock_client.assert_called_with(
config=mock.ANY,
endpoint_url='https://s3-region1.com',
region_name='custom_region_name',
service_name='s3',
use_ssl=False,
)
@mock.patch.object(boto3.session.Session, "client")
def test_get(self, mock_client):
"""Test a "normal" retrieval of an image in chunks."""

View File

@ -106,6 +106,7 @@ class OptsTestCase(base.StoreBaseTest):
's3_store_bucket_url_format',
's3_store_create_bucket_on_put',
's3_store_host',
's3_store_region_name',
's3_store_secret_key',
's3_store_large_object_size',
's3_store_large_object_chunk_size',

View File

@ -41,6 +41,7 @@ FIVE_KB = 5 * units.Ki
S3_CONF = {
's3_store_access_key': 'user',
's3_store_secret_key': 'key',
's3_store_region_name': '',
's3_store_host': 'localhost',
's3_store_bucket': 'glance',
's3_store_large_object_size': 9, # over 9MB is large
@ -84,6 +85,29 @@ class TestStore(base.StoreBaseTest,
self.assertRaises(boto_exceptions.InvalidDNSNameError,
self.store.get, loc)
@mock.patch('glance_store.location.Location')
@mock.patch.object(boto3.session.Session, "client")
def test_client_custom_region_name(self, mock_client, mock_loc):
"""Test a custom s3_store_region_name in config"""
self.config(s3_store_host='http://example.com')
self.config(s3_store_region_name='regionOne')
self.config(s3_store_bucket_url_format='path')
self.store.configure()
mock_loc.accesskey = 'abcd'
mock_loc.secretkey = 'efgh'
mock_loc.bucket = 'bucket1'
self.store._create_s3_client(mock_loc)
mock_client.assert_called_with(
config=mock.ANY,
endpoint_url='http://example.com',
region_name='regionOne',
service_name='s3',
use_ssl=False,
)
@mock.patch.object(boto3.session.Session, "client")
def test_get(self, mock_client):
"""Test a "normal" retrieval of an image in chunks."""