Skip to content

Commit

Permalink
Fix filter bug
Browse files Browse the repository at this point in the history
S3 prefixes were not being taken into account when determining the root
directory.
  • Loading branch information
kyleknap committed Nov 10, 2014
1 parent b854792 commit 716e636
Show file tree
Hide file tree
Showing 3 changed files with 70 additions and 6 deletions.
19 changes: 15 additions & 4 deletions awscli/customizations/s3/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,20 +34,31 @@ def create_filter(parameters):
if source_location.startswith('s3://'):
# This gives us (bucket, keyname) and we want
# the bucket to be the root dir.
src_rootdir = _get_s3_root(source_location)
src_rootdir = _get_s3_root(source_location,
parameters['dir_op'])
dst_rootdir = _get_local_root(parameters['dest'],
parameters['dir_op'])
else:
src_rootdir = _get_local_root(parameters['src'], parameters['dir_op'])
dst_rootdir = _get_s3_root(parameters['dest'])
dst_rootdir = _get_s3_root(parameters['dest'],
parameters['dir_op'])

return Filter(real_filters, src_rootdir, dst_rootdir)
else:
return Filter({}, None, None)


def _get_s3_root(source_location):
return split_s3_bucket_key(source_location)[0]
def _get_s3_root(source_location, dir_op):
# Obtain the bucket and the key.
bucket, key = split_s3_bucket_key(source_location)
if not dir_op and not key.endswith('/'):
# If we are not performing an operation on a directory and the key
# is of the form: ``prefix/key``. We only want ``prefix`` included in
# the the s3 root and not ``key``.
key = '/'.join(key.split('/')[:-1])
# Rejoin the bucket and key back together.
s3_path = '/'.join([bucket, key])
return s3_path


def _get_local_root(source_location, dir_op):
Expand Down
24 changes: 24 additions & 0 deletions tests/integration/customizations/s3/test_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -1366,6 +1366,30 @@ def test_exclude_filter_with_relative_path(self):
"end, the 'bar.py' file was deleted even though"
" it was excluded."))

def test_filter_s3_with_prefix(self):
bucket_name = self.create_bucket()
self.put_object(bucket_name, key_name='temp/test')
p = aws('s3 cp s3://%s/temp/ %s --recursive --exclude test --dryrun'
% (bucket_name, self.files.rootdir))
self.assert_no_files_would_be_uploaded(p)

def test_filter_no_resync(self):
# This specifically tests for the issue described here:
# https://github.com/aws/aws-cli/issues/794
bucket_name = self.create_bucket()
dir_name = os.path.join(self.files.rootdir, 'temp')
filename = self.files.create_file(os.path.join(dir_name, 'test.txt'),
contents='foo')
# Sync a local directory to an s3 prefix.
p = aws('s3 sync %s s3://%s/temp' % (dir_name, bucket_name))
self.assert_no_errors(p)
self.assertTrue(self.key_exists(bucket_name, key_name='temp/test.txt'))

# Nothing should be synced down if filters are used.
p = aws("s3 sync s3://%s/temp %s --exclude '*' --include test.txt"
% (bucket_name, dir_name))
self.assert_no_files_would_be_uploaded(p)


class TestFileWithSpaces(BaseS3CLICommand):
def test_upload_download_file_with_spaces(self):
Expand Down
33 changes: 31 additions & 2 deletions tests/unit/customizations/s3/test_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import platform

from awscli.customizations.s3.filegenerator import FileStat
from awscli.customizations.s3.filters import Filter
from awscli.customizations.s3.filters import Filter, create_filter


def platform_path(filepath):
Expand Down Expand Up @@ -50,13 +50,16 @@ def file_stat(self, filename, src_type='local'):
last_update=0, src_type=src_type,
dest_type=dest_type, operation_name='')

def create_filter(self, filters=None, root=None, dst_root=None):
def create_filter(self, filters=None, root=None, dst_root=None,
parameters=None):
if root is None:
root = os.getcwd()
if filters is None:
filters = {}
if dst_root is None:
dst_root = 'bucket'
if parameters is not None:
return create_filter(parameters)
return Filter(filters, root, dst_root)

def test_no_filter(self):
Expand Down Expand Up @@ -173,6 +176,32 @@ def test_root_dir(self):
self.assertEqual(len(filtered), 1)
self.assertEqual(filtered[0].src, p('/foo/bar/baz.txt'))

def test_create_root_s3_with_prefix(self):
parameters = {'filters': [['--exclude', 'test.txt']],
'dir_op': True,
'src': 's3://bucket/prefix/',
'dest': 'prefix'}
s3_filter = self.create_filter(parameters=parameters)
s3_files = [
self.file_stat('bucket/prefix/test.txt', src_type='s3'),
self.file_stat('bucket/prefix/test2.txt', src_type='s3'),
]
filtered = list(s3_filter.call(s3_files))
self.assertEqual(len(filtered), 1)
self.assertEqual(filtered[0].src, 'bucket/prefix/test2.txt')

def test_create_root_s3_no_dir_op(self):
parameters = {'filters': [['--exclude', 'test.txt']],
'dir_op': False,
'src': 's3://bucket/test.txt',
'dest': 'temp'}
s3_filter = self.create_filter(parameters=parameters)
s3_files = [
self.file_stat('bucket/test.txt', src_type='s3'),
]
filtered = list(s3_filter.call(s3_files))
self.assertEqual(len(filtered), 0)


if __name__ == "__main__":
unittest.main()

0 comments on commit 716e636

Please sign in to comment.