1# Copyright 2011 OpenStack Foundation
2# All Rights Reserved.
3#
4#    Licensed under the Apache License, Version 2.0 (the "License"); you may
5#    not use this file except in compliance with the License. You may obtain
6#    a copy of the License at
7#
8#         http://www.apache.org/licenses/LICENSE-2.0
9#
10#    Unless required by applicable law or agreed to in writing, software
11#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13#    License for the specific language governing permissions and limitations
14#    under the License.
15
16"""Tests the Multiple S3 backend store"""
17
18import hashlib
19from unittest import mock
20import uuid
21
22import boto3
23import botocore
24from botocore import exceptions as boto_exceptions
25from botocore import stub
26from oslo_config import cfg
27from oslo_utils.secretutils import md5
28from oslo_utils import units
29import six
30
31import glance_store as store
32from glance_store._drivers import s3
33from glance_store import exceptions
34from glance_store import location
35from glance_store.tests import base
36from glance_store.tests.unit import test_store_capabilities
37
38
39FAKE_UUID = str(uuid.uuid4())
40
41FIVE_KB = 5 * units.Ki
42S3_CONF = {
43    's3_store_access_key': 'user',
44    's3_store_secret_key': 'key',
45    's3_store_host': 'https://s3-region1.com',
46    's3_store_bucket': 'glance',
47    's3_store_large_object_size': 9,        # over 9MB is large
48    's3_store_large_object_chunk_size': 6,  # part size is 6MB
49}
50
51
52def format_s3_location(user, key, authurl, bucket, obj):
53    """Helper method that returns a S3 store URI given the component pieces."""
54    scheme = 's3'
55    if authurl.startswith('https://'):
56        scheme = 's3+https'
57        authurl = authurl[8:]
58    elif authurl.startswith('http://'):
59        authurl = authurl[7:]
60    authurl = authurl.strip('/')
61    return "%s://%s:%s@%s/%s/%s" % (scheme, user, key, authurl,
62                                    bucket, obj)
63
64
65class TestMultiS3Store(base.MultiStoreBaseTest,
66                       test_store_capabilities.TestStoreCapabilitiesChecking):
67
68    # NOTE(flaper87): temporary until we
69    # can move to a fully-local lib.
70    # (Swift store's fault)
71    _CONF = cfg.ConfigOpts()
72
73    def setUp(self):
74        """Establish a clean test environment."""
75        super(TestMultiS3Store, self).setUp()
76        enabled_backends = {
77            "s3_region1": "s3",
78            "s3_region2": "s3"
79        }
80        self.hash_algo = 'sha256'
81        self.conf = self._CONF
82        self.conf(args=[])
83        self.conf.register_opt(cfg.DictOpt('enabled_backends'))
84        self.config(enabled_backends=enabled_backends)
85        store.register_store_opts(self.conf)
86        self.config(default_backend='s3_region1', group='glance_store')
87
88        # set s3 related config options
89        self.config(group='s3_region1',
90                    s3_store_access_key='user',
91                    s3_store_secret_key='key',
92                    s3_store_host='https://s3-region1.com',
93                    s3_store_bucket='glance',
94                    s3_store_large_object_size=S3_CONF[
95                        's3_store_large_object_size'
96                    ],
97                    s3_store_large_object_chunk_size=6)
98
99        self.config(group='s3_region2',
100                    s3_store_access_key='user',
101                    s3_store_secret_key='key',
102                    s3_store_host='http://s3-region2.com',
103                    s3_store_bucket='glance',
104                    s3_store_large_object_size=S3_CONF[
105                        's3_store_large_object_size'
106                    ],
107                    s3_store_large_object_chunk_size=6)
108        # Ensure stores + locations cleared
109        location.SCHEME_TO_CLS_BACKEND_MAP = {}
110        store.create_multi_stores(self.conf)
111
112        self.addCleanup(setattr, location, 'SCHEME_TO_CLS_BACKEND_MAP',
113                        dict())
114        self.addCleanup(self.conf.reset)
115
116        self.store = s3.Store(self.conf, backend="s3_region1")
117        self.store.configure()
118        self.register_store_backend_schemes(self.store, 's3', 's3_region1')
119
120    def test_location_url_prefix_is_set(self):
121        expected_url_prefix = "s3+https://user:key@s3-region1.com/glance"
122        self.assertEqual(expected_url_prefix, self.store.url_prefix)
123
124    def test_get_invalid_bucket_name(self):
125        self.config(s3_store_bucket_url_format='virtual', group='s3_region1')
126
127        invalid_buckets = ['not.dns.compliant', 'aa', 'bucket-']
128        for bucket in invalid_buckets:
129            loc = location.get_location_from_uri_and_backend(
130                "s3+https://user:key@auth_address/%s/key" % bucket,
131                's3_region1', conf=self.conf)
132            self.assertRaises(boto_exceptions.InvalidDNSNameError,
133                              self.store.get, loc)
134
135    @mock.patch.object(boto3.session.Session, "client")
136    def test_get(self, mock_client):
137        """Test a "normal" retrieval of an image in chunks."""
138        bucket, key = 'glance', FAKE_UUID
139        fixture_object = {
140            'Body': six.BytesIO(b"*" * FIVE_KB),
141            'ContentLength': FIVE_KB
142        }
143        fake_s3_client = botocore.session.get_session().create_client('s3')
144
145        with stub.Stubber(fake_s3_client) as stubber:
146            stubber.add_response(method='head_object',
147                                 service_response={},
148                                 expected_params={
149                                     'Bucket': bucket,
150                                     'Key': key
151                                 })
152            stubber.add_response(method='get_object',
153                                 service_response=fixture_object,
154                                 expected_params={
155                                     'Bucket': bucket,
156                                     'Key': key
157                                 })
158            mock_client.return_value = fake_s3_client
159
160            loc = location.get_location_from_uri_and_backend(
161                "s3+https://user:key@auth_address/%s/%s" % (bucket, key),
162                's3_region1', conf=self.conf)
163            (image_s3, image_size) = self.store.get(loc)
164
165            self.assertEqual(FIVE_KB, image_size)
166
167            expected_data = b"*" * FIVE_KB
168            data = b""
169
170            for chunk in image_s3:
171                data += chunk
172            self.assertEqual(expected_data, data)
173
174    def test_partial_get(self):
175        loc = location.get_location_from_uri_and_backend(
176            "s3+https://user:key@auth_address/glance/%s" % FAKE_UUID,
177            's3_region1', conf=self.conf)
178        self.assertRaises(exceptions.StoreRandomGetNotSupported,
179                          self.store.get, loc, chunk_size=1)
180
181    @mock.patch.object(boto3.session.Session, "client")
182    def test_get_non_existing(self, mock_client):
183        """Test that trying to retrieve a s3 that doesn't exist raises an
184        error
185        """
186        bucket, key = 'glance', 'no_exist'
187        fake_s3_client = botocore.session.get_session().create_client('s3')
188
189        with stub.Stubber(fake_s3_client) as stubber:
190            stubber.add_client_error(method='head_object',
191                                     service_error_code='404',
192                                     service_message='''
193                                     The specified key does not exist.
194                                     ''',
195                                     expected_params={
196                                         'Bucket': bucket,
197                                         'Key': key
198                                     })
199            mock_client.return_value = fake_s3_client
200
201            uri = "s3+https://user:key@auth_address/%s/%s" % (bucket, key)
202            loc = location.get_location_from_uri_and_backend(uri,
203                                                             's3_region1',
204                                                             conf=self.conf)
205            self.assertRaises(exceptions.NotFound, self.store.get, loc)
206
207    @mock.patch.object(boto3.session.Session, "client")
208    def test_add_singlepart(self, mock_client):
209        """Test that we can add an image via the s3 backend."""
210        expected_image_id = str(uuid.uuid4())
211        # 5KiB is smaller than WRITE_CHUNKSIZE
212        expected_s3_size = FIVE_KB
213        expected_s3_contents = b"*" * expected_s3_size
214        expected_checksum = md5(expected_s3_contents,
215                                usedforsecurity=False).hexdigest()
216        expected_multihash = hashlib.sha256(expected_s3_contents).hexdigest()
217        expected_location = format_s3_location(
218            S3_CONF['s3_store_access_key'],
219            S3_CONF['s3_store_secret_key'],
220            S3_CONF['s3_store_host'],
221            S3_CONF['s3_store_bucket'],
222            expected_image_id)
223        image_s3 = six.BytesIO(expected_s3_contents)
224
225        fake_s3_client = botocore.session.get_session().create_client('s3')
226
227        with stub.Stubber(fake_s3_client) as stubber:
228            stubber.add_response(method='head_bucket',
229                                 service_response={},
230                                 expected_params={
231                                     'Bucket': S3_CONF['s3_store_bucket']
232                                 })
233            stubber.add_client_error(method='head_object',
234                                     service_error_code='404',
235                                     service_message='',
236                                     expected_params={
237                                         'Bucket': S3_CONF['s3_store_bucket'],
238                                         'Key': expected_image_id
239                                     })
240            stubber.add_response(method='put_object',
241                                 service_response={},
242                                 expected_params={
243                                     'Bucket': S3_CONF['s3_store_bucket'],
244                                     'Key': expected_image_id,
245                                     'Body': botocore.stub.ANY
246                                 })
247
248            mock_client.return_value = fake_s3_client
249            loc, size, checksum, multihash, metadata = \
250                self.store.add(expected_image_id, image_s3, expected_s3_size,
251                               self.hash_algo)
252            self.assertEqual("s3_region1", metadata["store"])
253
254            self.assertEqual(expected_location, loc)
255            self.assertEqual(expected_s3_size, size)
256            self.assertEqual(expected_checksum, checksum)
257            self.assertEqual(expected_multihash, multihash)
258
259    @mock.patch.object(boto3.session.Session, "client")
260    def test_add_singlepart_bigger_than_write_chunk(self, mock_client):
261        """Test that we can add an image via the s3 backend."""
262        expected_image_id = str(uuid.uuid4())
263        # 8 MiB is bigger than WRITE_CHUNKSIZE(=5MiB),
264        # but smaller than s3_store_large_object_size
265        expected_s3_size = 8 * units.Mi
266        expected_s3_contents = b"*" * expected_s3_size
267        expected_checksum = md5(expected_s3_contents,
268                                usedforsecurity=False).hexdigest()
269        expected_multihash = hashlib.sha256(expected_s3_contents).hexdigest()
270        expected_location = format_s3_location(
271            S3_CONF['s3_store_access_key'],
272            S3_CONF['s3_store_secret_key'],
273            S3_CONF['s3_store_host'],
274            S3_CONF['s3_store_bucket'],
275            expected_image_id)
276        image_s3 = six.BytesIO(expected_s3_contents)
277
278        fake_s3_client = botocore.session.get_session().create_client('s3')
279
280        with stub.Stubber(fake_s3_client) as stubber:
281            stubber.add_response(method='head_bucket',
282                                 service_response={},
283                                 expected_params={
284                                     'Bucket': S3_CONF['s3_store_bucket']
285                                 })
286            stubber.add_client_error(method='head_object',
287                                     service_error_code='404',
288                                     service_message='',
289                                     expected_params={
290                                         'Bucket': S3_CONF['s3_store_bucket'],
291                                         'Key': expected_image_id
292                                     })
293            stubber.add_response(method='put_object',
294                                 service_response={},
295                                 expected_params={
296                                     'Bucket': S3_CONF['s3_store_bucket'],
297                                     'Key': expected_image_id,
298                                     'Body': botocore.stub.ANY
299                                 })
300
301            mock_client.return_value = fake_s3_client
302            loc, size, checksum, multihash, metadata = \
303                self.store.add(expected_image_id, image_s3, expected_s3_size,
304                               self.hash_algo)
305            self.assertEqual("s3_region1", metadata["store"])
306
307            self.assertEqual(expected_location, loc)
308            self.assertEqual(expected_s3_size, size)
309            self.assertEqual(expected_checksum, checksum)
310            self.assertEqual(expected_multihash, multihash)
311
312    @mock.patch.object(boto3.session.Session, "client")
313    def test_add_different_backend(self, mock_client):
314        self.store = s3.Store(self.conf, backend="s3_region2")
315        self.store.configure()
316        self.register_store_backend_schemes(self.store, 's3', 's3_region2')
317
318        expected_image_id = str(uuid.uuid4())
319        expected_s3_size = FIVE_KB
320        expected_s3_contents = b"*" * expected_s3_size
321        expected_checksum = md5(expected_s3_contents,
322                                usedforsecurity=False).hexdigest()
323        expected_multihash = hashlib.sha256(expected_s3_contents).hexdigest()
324        expected_location = format_s3_location(
325            S3_CONF['s3_store_access_key'],
326            S3_CONF['s3_store_secret_key'],
327            'http://s3-region2.com',
328            S3_CONF['s3_store_bucket'],
329            expected_image_id)
330        image_s3 = six.BytesIO(expected_s3_contents)
331
332        fake_s3_client = botocore.session.get_session().create_client('s3')
333
334        with stub.Stubber(fake_s3_client) as stubber:
335            stubber.add_response(method='head_bucket',
336                                 service_response={},
337                                 expected_params={
338                                     'Bucket': S3_CONF['s3_store_bucket']
339                                 })
340            stubber.add_client_error(method='head_object',
341                                     service_error_code='404',
342                                     service_message='',
343                                     expected_params={
344                                         'Bucket': S3_CONF['s3_store_bucket'],
345                                         'Key': expected_image_id
346                                     })
347            stubber.add_response(method='put_object',
348                                 service_response={},
349                                 expected_params={
350                                     'Bucket': S3_CONF['s3_store_bucket'],
351                                     'Key': expected_image_id,
352                                     'Body': botocore.stub.ANY
353                                 })
354
355            mock_client.return_value = fake_s3_client
356            loc, size, checksum, multihash, metadata = \
357                self.store.add(expected_image_id, image_s3, expected_s3_size,
358                               self.hash_algo)
359            self.assertEqual("s3_region2", metadata["store"])
360
361            self.assertEqual(expected_location, loc)
362            self.assertEqual(expected_s3_size, size)
363            self.assertEqual(expected_checksum, checksum)
364            self.assertEqual(expected_multihash, multihash)
365
366    @mock.patch.object(boto3.session.Session, "client")
367    def test_add_with_verifier(self, mock_client):
368        """Assert 'verifier.update' is called when verifier is provided"""
369        expected_image_id = str(uuid.uuid4())
370        expected_s3_size = FIVE_KB
371        expected_s3_contents = b"*" * expected_s3_size
372        image_s3 = six.BytesIO(expected_s3_contents)
373
374        fake_s3_client = botocore.session.get_session().create_client('s3')
375        verifier = mock.MagicMock(name='mock_verifier')
376
377        with stub.Stubber(fake_s3_client) as stubber:
378            stubber.add_response(method='head_bucket', service_response={})
379            stubber.add_client_error(method='head_object',
380                                     service_error_code='404',
381                                     service_message='')
382            stubber.add_response(method='put_object', service_response={})
383
384            mock_client.return_value = fake_s3_client
385            self.store.add(expected_image_id, image_s3, expected_s3_size,
386                           self.hash_algo, verifier=verifier)
387        verifier.update.assert_called_with(expected_s3_contents)
388
389    @mock.patch.object(boto3.session.Session, "client")
390    def test_add_multipart(self, mock_client):
391        """Test that we can add an image via the s3 backend."""
392        expected_image_id = str(uuid.uuid4())
393        expected_s3_size = 16 * units.Mi
394        expected_s3_contents = b"*" * expected_s3_size
395        expected_checksum = md5(expected_s3_contents,
396                                usedforsecurity=False).hexdigest()
397        expected_multihash = hashlib.sha256(expected_s3_contents).hexdigest()
398        expected_location = format_s3_location(
399            S3_CONF['s3_store_access_key'],
400            S3_CONF['s3_store_secret_key'],
401            S3_CONF['s3_store_host'],
402            S3_CONF['s3_store_bucket'],
403            expected_image_id)
404        image_s3 = six.BytesIO(expected_s3_contents)
405
406        fake_s3_client = botocore.session.get_session().create_client('s3')
407
408        num_parts = 3  # image size = 16MB and chunk size is 6MB
409        with stub.Stubber(fake_s3_client) as stubber:
410            stubber.add_response(method='head_bucket',
411                                 service_response={},
412                                 expected_params={
413                                     'Bucket': S3_CONF['s3_store_bucket']
414                                 })
415            stubber.add_client_error(method='head_object',
416                                     service_error_code='404',
417                                     service_message='',
418                                     expected_params={
419                                         'Bucket': S3_CONF['s3_store_bucket'],
420                                         'Key': expected_image_id
421                                     })
422            stubber.add_response(method='create_multipart_upload',
423                                 service_response={
424                                     "Bucket": S3_CONF['s3_store_bucket'],
425                                     "Key": expected_image_id,
426                                     "UploadId": 'UploadId'
427                                 },
428                                 expected_params={
429                                     "Bucket": S3_CONF['s3_store_bucket'],
430                                     "Key": expected_image_id,
431                                 })
432            parts = []
433            remaining_image_size = expected_s3_size
434            chunk_size = S3_CONF['s3_store_large_object_chunk_size'] * units.Mi
435            for i in range(num_parts):
436                part_number = i + 1
437                stubber.add_response(method='upload_part',
438                                     service_response={
439                                         'ETag': 'ETag'
440                                     },
441                                     expected_params={
442                                         "Bucket": S3_CONF['s3_store_bucket'],
443                                         "Key": expected_image_id,
444                                         "Body": botocore.stub.ANY,
445                                         'ContentLength': chunk_size,
446                                         "PartNumber": part_number,
447                                         "UploadId": 'UploadId'
448                                     })
449                parts.append({'ETag': 'ETag', 'PartNumber': part_number})
450
451                remaining_image_size -= chunk_size
452                if remaining_image_size < chunk_size:
453                    chunk_size = remaining_image_size
454
455            stubber.add_response(method='complete_multipart_upload',
456                                 service_response={
457                                     "Bucket": S3_CONF['s3_store_bucket'],
458                                     "Key": expected_image_id,
459                                     'ETag': 'ETag'
460                                 },
461                                 expected_params={
462                                     "Bucket": S3_CONF['s3_store_bucket'],
463                                     "Key": expected_image_id,
464                                     "MultipartUpload": {
465                                         "Parts": parts
466                                     },
467                                     "UploadId": 'UploadId'
468                                 })
469
470            mock_client.return_value = fake_s3_client
471            loc, size, checksum, multihash, metadata = \
472                self.store.add(expected_image_id, image_s3, expected_s3_size,
473                               self.hash_algo)
474            self.assertEqual("s3_region1", metadata["store"])
475
476            self.assertEqual(expected_location, loc)
477            self.assertEqual(expected_s3_size, size)
478            self.assertEqual(expected_checksum, checksum)
479            self.assertEqual(expected_multihash, multihash)
480
481    @mock.patch.object(boto3.session.Session, "client")
482    def test_add_already_existing(self, mock_client):
483        """Tests that adding an image with an existing identifier raises an
484        appropriate exception
485        """
486        image_s3 = six.BytesIO(b"never_gonna_make_it")
487
488        fake_s3_client = botocore.session.get_session().create_client('s3')
489
490        with stub.Stubber(fake_s3_client) as stubber:
491            stubber.add_response(method='head_bucket', service_response={})
492            stubber.add_response(method='head_object', service_response={})
493            mock_client.return_value = fake_s3_client
494            self.assertRaises(exceptions.Duplicate, self.store.add,
495                              FAKE_UUID, image_s3, 0, self.hash_algo)
496
497    @mock.patch.object(boto3.session.Session, "client")
498    def test_delete_non_existing(self, mock_client):
499        """Test that trying to delete a s3 that doesn't exist raises an error
500        """
501        bucket, key = 'glance', 'no_exist'
502        fake_s3_client = botocore.session.get_session().create_client('s3')
503
504        with stub.Stubber(fake_s3_client) as stubber:
505            stubber.add_client_error(method='head_object',
506                                     service_error_code='404',
507                                     service_message='''
508                                     The specified key does not exist.
509                                     ''',
510                                     expected_params={
511                                         'Bucket': bucket,
512                                         'Key': key
513                                     })
514            fake_s3_client.head_bucket = mock.MagicMock()
515            mock_client.return_value = fake_s3_client
516
517            uri = "s3+https://user:key@auth_address/%s/%s" % (bucket, key)
518            loc = location.get_location_from_uri_and_backend(uri,
519                                                             's3_region1',
520                                                             conf=self.conf)
521            self.assertRaises(exceptions.NotFound, self.store.delete, loc)
522