Skip to content

Commit

Permalink
Merge pull request #55694 from garethgreenaway/55662_s3_pillar_fixes
Browse files Browse the repository at this point in the history
[master] S3 Pillar Pagination Fixes
  • Loading branch information
dwoz authored Jan 8, 2020
2 parents 5a6ed7c + 52cf5f4 commit 3523004
Show file tree
Hide file tree
Showing 2 changed files with 125 additions and 3 deletions.
35 changes: 32 additions & 3 deletions salt/pillar/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,13 @@ def _refresh_buckets_cache_file(creds, cache_file, multiple_env, environment, pr
'''

# helper s3 query function
def __get_s3_meta():
def __get_s3_meta(continuation_token=None):
# We want to use ListObjectsV2 so we get the NextContinuationToken
params = {'prefix': prefix, 'list-type': 2}

if continuation_token:
params['continuation-token'] = continuation_token

return __utils__['s3.query'](
key=creds.key,
keyid=creds.keyid,
Expand All @@ -283,7 +289,7 @@ def __get_s3_meta():
verify_ssl=creds.verify_ssl,
location=creds.location,
return_bin=False,
params={'prefix': prefix},
params=params,
path_style=creds.path_style,
https_enable=creds.https_enable)

Expand All @@ -296,6 +302,12 @@ def __get_pillar_environments(files):
environments = [(os.path.dirname(k['Key']).split('/', 1))[0] for k in files]
return set(environments)

def __get_continuation_token(s3_meta):
return next((item.get('NextContinuationToken')
for item in s3_meta
if item.get('NextContinuationToken')),
None)

log.debug('Refreshing S3 buckets pillar cache file')

metadata = {}
Expand All @@ -312,6 +324,14 @@ def __get_pillar_environments(files):
if s3_meta:
bucket_files[bucket] = __get_pillar_files_from_s3_meta(s3_meta)

# Check if we have a NextContinuationToken and loop until we don't
while True:
continuation_token = __get_continuation_token(s3_meta)
if not continuation_token:
break
s3_meta = __get_s3_meta(continuation_token)
bucket_files[bucket] += __get_pillar_files_from_s3_meta(s3_meta)

metadata[environment] = bucket_files

else:
Expand All @@ -322,6 +342,15 @@ def __get_pillar_environments(files):
# s3 query returned data
if s3_meta:
files = __get_pillar_files_from_s3_meta(s3_meta)

# Check if we have a NextContinuationToken and loop until we don't
while True:
continuation_token = __get_continuation_token(s3_meta)
if not continuation_token:
break
s3_meta = __get_s3_meta(continuation_token)
files += __get_pillar_files_from_s3_meta(s3_meta)

environments = __get_pillar_environments(files)

# pull out the files for the environment
Expand All @@ -343,7 +372,7 @@ def __get_pillar_environments(files):

log.debug('Writing S3 buckets pillar cache file')

with salt.utils.files.fopen(cache_file, 'w') as fp_:
with salt.utils.files.fopen(cache_file, 'wb') as fp_:
pickle.dump(metadata, fp_)

return metadata
Expand Down
93 changes: 93 additions & 0 deletions tests/unit/pillar/test_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
# -*- coding: utf-8 -*-

# Import python libs
from __future__ import absolute_import, print_function, unicode_literals

import logging

# Import Salt Testing libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
from tests.support.mock import patch, mock_open, MagicMock

# Import Salt Libs
import salt.pillar.s3 as s3_pillar
from salt.ext.six.moves import range

log = logging.getLogger(__name__)


class S3PillarTestCase(TestCase, LoaderModuleMockMixin):
'''
TestCase for salt.pillar.s3
'''
def setup_loader_modules(self):
s3_pillar_globals = {
'__utils__': {}
}
return {s3_pillar: s3_pillar_globals}

def test_refresh_buckets_cache_file(self):
'''
Test pagination with refresh_buckets_cache_file
'''
key = 'XXXXXXXXXXXXXXXXXXXXX'
keyid = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
bucket = 'dummy_bucket'
service_url = 's3.amazonaws.com'
cache_file = 'dummy_file'

s3_creds = s3_pillar.S3Credentials(key, keyid, bucket, service_url)

mock_return_first = [{'Name': 'pillar-bucket'},
{'Prefix': 'test'},
{'KeyCount': '10'},
{'MaxKeys': '10'},
{'NextContinuationToken': 'XXXXX'},
{'IsTruncated': 'true'}]

mock_return_second = [{'Name': 'pillar-bucket'},
{'Prefix': 'test'},
{'KeyCount': '10'},
{'MaxKeys': '10'},
{'IsTruncated': 'true'}]

first_range_end = 999
second_range_end = 1200
for i in range(0, first_range_end):
key_name = '{0}/init.sls'.format(i)
tmp = {'Key': key_name,
'LastModified': '2019-12-18T15:54:39.000Z',
'ETag': '"fba0a053704e8b357c94be90b44bb640"',
'Size': '5 ',
'StorageClass': 'STANDARD'}
mock_return_first.append(tmp)

for i in range(first_range_end, second_range_end):
key_name = '{0}/init.sls'.format(i)
tmp = {'Key': key_name,
'LastModified': '2019-12-18T15:54:39.000Z',
'ETag': '"fba0a053704e8b357c94be90b44bb640"',
'Size': '5 ',
'StorageClass': 'STANDARD'}
mock_return_second.append(tmp)

_expected = {'base': {'dummy_bucket': []}}
for i in range(0, second_range_end):
key_name = '{0}/init.sls'.format(i)
tmp = {'Key': key_name,
'LastModified': '2019-12-18T15:54:39.000Z',
'ETag': '"fba0a053704e8b357c94be90b44bb640"',
'Size': '5 ',
'StorageClass': 'STANDARD'}
_expected['base']['dummy_bucket'].append(tmp)

mock_s3_query = MagicMock(side_effect=[mock_return_first, mock_return_second])
with patch.dict(s3_pillar.__utils__, {'s3.query': mock_s3_query}):
with patch('salt.utils.files.fopen', mock_open(read_data=b'')):
ret = s3_pillar._refresh_buckets_cache_file(s3_creds,
cache_file,
False,
'base',
'')
self.assertEqual(ret, _expected)

0 comments on commit 3523004

Please sign in to comment.