Skip to content

Commit

Permalink
Merge branch 'release-1.3.22'
Browse files Browse the repository at this point in the history
* release-1.3.22:
  Bumping version to 1.3.22
  Update CHANGELOG with the latest features
  Update completer test with new services
  Update changelog with #825
  Add changelog entry for #834
  Fix changelog entry for merge of #831
  Added test_cancel_after_upload_id to test_tasks
  Update changelog with fix for #549
  Disable fix_s3_host when --endpoint-url is given
  Fixes issue #834
  Update changelog with bugfix
  Add validation to ensure we don't mv a file onto itself
  Let aws.cmd find python.exe on paths with spaces.
  • Loading branch information
jamesls committed Jul 10, 2014
2 parents 3f48075 + ecfda6c commit 0acbafd
Show file tree
Hide file tree
Showing 14 changed files with 240 additions and 17 deletions.
28 changes: 28 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,34 @@
CHANGELOG
=========


1.3.22
======

* feature:``aws cwlogs``: Add support for Amazon CloudWatch Logs
* feature:``aws cognito-sync``: Add support for
Amazon Cognito Service
* feature:``aws cognito-identity``: Add support for
Amazon Cognito Identity Service
* feature:``aws route53``: Update ``aws route53`` command to the
latest version
* feature:``aws ec2``: Update ``aws ec2`` command to the
latest version
* bugfix:``aws s3/s3api``: Fix issue where ``--endpoint-url``
wasn't being used for ``aws s3/s3api`` commands
(`issue 549 <https://github.com/aws/aws-cli/issues/549>`__)
* bugfix:``aws s3 mv``: Fix bug where using the ``aws s3 mv``
command to move a large file onto itself results in the
file being deleted
(`issue 831 <https://github.com/aws/aws-cli/issues/831>`__)
* bugfix:``aws s3``: Fix issue where parts in a multipart
upload are stil being uploaded when a part has failed
(`issue 834 <https://github.com/aws/aws-cli/issues/834>`__)
* bugfix:Windows: Fix issue where ``python.exe`` is on a path
that contains spaces
(`issue 825 <https://github.com/aws/aws-cli/pull/825>`__)


1.3.21
======

Expand Down
2 changes: 1 addition & 1 deletion awscli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
"""
import os

__version__ = '1.3.21'
__version__ = '1.3.22'

#
# Get our data path to be added to botocore's search path
Expand Down
17 changes: 17 additions & 0 deletions awscli/customizations/s3/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -662,6 +662,23 @@ def add_paths(self, paths):
self.parameters['dest'] = paths[1]
elif len(paths) == 1:
self.parameters['dest'] = paths[0]
self._validate_path_args()

def _validate_path_args(self):
# If we're using a mv command, you can't copy the object onto itself.
params = self.parameters
if self.cmd == 'mv' and self._same_path(params['src'], params['dest']):
raise ValueError("Cannot mv a file onto itself: '%s' - '%s'" % (
params['src'], params['dest']))

def _same_path(self, src, dest):
if not self.parameters['paths_type'] == 's3s3':
return False
elif src == dest:
return True
elif dest.endswith('/'):
src_base = os.path.basename(src)
return src == os.path.join(dest, src_base)

def _normalize_s3_trailing_slash(self, paths):
for i, path in enumerate(paths):
Expand Down
8 changes: 4 additions & 4 deletions awscli/customizations/s3/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,10 +560,10 @@ def wait_for_parts_to_finish(self):

def wait_for_upload_id(self):
with self._upload_id_condition:
while self._upload_id is None:
if self._state == self._CANCELLED:
raise UploadCancelledError("Upload has been cancelled.")
self._upload_id_condition.wait(timeout=1)
while self._upload_id is None and self._state != self._CANCELLED:
self._upload_id_condition.wait(timeout=1)
if self._state == self._CANCELLED:
raise UploadCancelledError("Upload has been cancelled.")
return self._upload_id

def wait_for_completion(self):
Expand Down
45 changes: 45 additions & 0 deletions awscli/customizations/s3endpoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Disable endpoint url customizations for s3.
There's a customization in botocore such that for S3 operations
we try to fix the S3 endpoint url based on whether a bucket is
dns compatible. We also try to map the endpoint url to the
standard S3 region (s3.amazonaws.com). This normally happens
even if a user provides an --endpoint-url (if the bucket is
DNS compatible).
This customization ensures that if a user specifies
an --endpoint-url, then we turn off the botocore customization
that messes with endpoint url.
"""
from functools import partial

from botocore.handlers import fix_s3_host


def register_s3_endpoint(cli):
handler = partial(on_top_level_args_parsed, event_handler=cli)
cli.register('top-level-args-parsed', handler)


def on_top_level_args_parsed(parsed_args, event_handler, **kwargs):
# The fix_s3_host has logic to set the endpoint to the
# standard region endpoint for s3 (s3.amazonaws.com) under
# certain conditions. We're making sure that if
# the user provides an --endpoint-url, that entire handler
# is disabled.
if parsed_args.command in ['s3', 's3api'] and \
parsed_args.endpoint_url is not None:
event_handler.unregister('before-auth.s3', fix_s3_host)
2 changes: 2 additions & 0 deletions awscli/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
from awscli.customizations.cloudsearch import initialize as cloudsearch_init
from awscli.customizations.emr.emr import emr_initialize
from awscli.customizations.cloudsearchdomain import register_cloudsearchdomain
from awscli.customizations.s3endpoint import register_s3_endpoint


def awscli_initialize(event_handlers):
Expand Down Expand Up @@ -94,3 +95,4 @@ def awscli_initialize(event_handlers):
cloudsearch_init(event_handlers)
emr_initialize(event_handlers)
register_cloudsearchdomain(event_handlers)
register_s3_endpoint(event_handlers)
10 changes: 5 additions & 5 deletions bin/aws.cmd
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
@echo OFF
REM="""
setlocal
set PythonExe=
set PythonExe=""
set PythonExeFlags=

for %%i in (cmd bat exe) do (
Expand All @@ -16,13 +16,13 @@ for /f "tokens=2 delims==" %%i in ('assoc .py') do (
)
)
)
"%PythonExe%" -x %PythonExeFlags% "%~f0" %*
%PythonExe% -x %PythonExeFlags% "%~f0" %*
goto :EOF

:SetPythonExe
if not [%1]==[""] (
if ["%PythonExe%"]==[""] (
set PythonExe=%~1
if not ["%~1"]==[""] (
if [%PythonExe%]==[""] (
set PythonExe="%~1"
)
)
goto :EOF
Expand Down
2 changes: 1 addition & 1 deletion doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
# The short X.Y version.
version = '1.3.'
# The full version, including alpha/beta/rc tags.
release = '1.3.21'
release = '1.3.22'

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import awscli


requires = ['botocore>=0.55.0,<0.56.0',
requires = ['botocore>=0.56.0,<0.57.0',
'bcdoc>=0.12.0,<0.13.0',
'six>=1.1.0',
'colorama==0.2.5',
Expand Down
21 changes: 21 additions & 0 deletions tests/integration/customizations/s3/test_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,27 @@ def test_mv_to_nonexistent_bucket(self):
p = aws('s3 mv %s s3://bad-noexist-13143242/foo.txt' % (full_path,))
self.assertEqual(p.rc, 1)

def test_cant_move_file_onto_itself_small_file(self):
# We don't even need a remote file in this case. We can
# immediately validate that we can't move a file onto itself.
bucket_name = self.create_bucket()
self.put_object(bucket_name, key_name='key.txt', contents='foo')
p = aws('s3 mv s3://%s/key.txt s3://%s/key.txt' % (bucket_name, bucket_name))
self.assertEqual(p.rc, 255)
self.assertIn('Cannot mv a file onto itself', p.stderr)

def test_cant_move_large_file_onto_itself(self):
# At the API level, you can multipart copy an object onto itself,
# but a mv command doesn't make sense because a mv is just a
# cp + an rm of the src file. We should be consistent and
# not allow large files to be mv'd onto themselves.
file_contents = six.BytesIO(b'a' * (1024 * 1024 * 10))
bucket_name = self.create_bucket()
self.put_object(bucket_name, key_name='key.txt', contents=file_contents)
p = aws('s3 mv s3://%s/key.txt s3://%s/key.txt' % (bucket_name, bucket_name))
self.assertEqual(p.rc, 255)
self.assertIn('Cannot mv a file onto itself', p.stderr)


class TestRm(BaseS3CLICommand):
@unittest.skipIf(platform.system() not in ['Darwin', 'Linux'],
Expand Down
46 changes: 46 additions & 0 deletions tests/unit/customizations/s3/test_mv_command.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
#!/usr/bin/env python
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from awscli.testutils import BaseAWSCommandParamsTest, FileCreator
import re

import mock
import six


class TestMvCommand(BaseAWSCommandParamsTest):

prefix = 's3 mv '

def setUp(self):
super(TestMvCommand, self).setUp()
self.files = FileCreator()

def tearDown(self):
super(TestMvCommand, self).tearDown()
self.files.remove_all()

def test_cant_mv_object_onto_itself(self):
cmdline = '%s s3://bucket/key s3://bucket/key' % self.prefix
stderr = self.run_cmd(cmdline, expected_rc=255)[1]
self.assertIn('Cannot mv a file onto itself', stderr)

def test_cant_mv_object_with_implied_name(self):
# The "key" key name is implied in the dst argument.
cmdline = '%s s3://bucket/key s3://bucket/' % self.prefix
stderr = self.run_cmd(cmdline, expected_rc=255)[1]
self.assertIn('Cannot mv a file onto itself', stderr)


if __name__ == "__main__":
unittest.main()
20 changes: 20 additions & 0 deletions tests/unit/customizations/s3/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,26 @@ def test_can_cancel_tasks(self):
with self.assertRaises(UploadCancelledError):
self.context.wait_for_parts_to_finish()

def test_cancel_after_upload_id(self):
# We want have a thread waiting for the upload id.
upload_part_thread = threading.Thread(target=self.upload_part,
args=(1,))
self.start_thread(upload_part_thread)

# We announce the upload id.
self.create_upload('my_upload_id')
# The upload_part thread can now proceed,
# now, let's cancel this upload.
self.context.cancel_upload()

# The upload_part_thread should be finished.
self.join_threads()

# In a cancelled multipart upload task any subsequent
# call to wait_for_upload_id must raise an UploadCancelledError
with self.assertRaises(UploadCancelledError):
self.context.wait_for_upload_id()

def test_cancel_threads_waiting_for_completion(self):
# So we have a thread waiting for the entire upload to complete.
arbitrary_waiting_thread = threading.Thread(target=self.wait_for_upload_complete)
Expand Down
43 changes: 43 additions & 0 deletions tests/unit/customizations/test_s3endpoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from awscli.testutils import unittest
from awscli.customizations.s3endpoint import on_top_level_args_parsed

from botocore.handlers import fix_s3_host

import mock


class TestS3EndpointURL(unittest.TestCase):
def test_endpoint_url_unregisters_fix_s3_host(self):
args = mock.Mock()
args.endpoint_url = 'http://custom/'
args.command = 's3'
event_handler = mock.Mock()
on_top_level_args_parsed(args, event_handler)
event_handler.unregister.assert_called_with('before-auth.s3', fix_s3_host)

def test_unregister_not_called_for_no_endpoint(self):
args = mock.Mock()
args.endpoint_url = None
event_handler = mock.Mock()
on_top_level_args_parsed(args, event_handler)
self.assertFalse(event_handler.unregister.called)

def test_endpoint_url_set_but_not_for_s3(self):
args = mock.Mock()
args.endpoint_url = 'http://custom/'
args.command = 'NOTS3'
event_handler = mock.Mock()
on_top_level_args_parsed(args, event_handler)
self.assertFalse(event_handler.unregister.called)
11 changes: 6 additions & 5 deletions tests/unit/test_completer.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,12 @@
COMPLETIONS = [
('aws ', -1, set(['autoscaling', 'cloudformation', 'cloudsearch',
'cloudsearchdomain', 'cloudtrail', 'cloudwatch',
'configure', 'datapipeline', 'directconnect', 'dynamodb',
'ec2', 'elasticache', 'elasticbeanstalk',
'elastictranscoder', 'elb', 'iam', 'importexport',
'kinesis', 'opsworks', 'rds', 'redshift', 'route53',
's3', 's3api', 'ses', 'sns', 'sqs', 'storagegateway',
'cognito-identity', 'cognito-sync', 'configure',
'datapipeline', 'directconnect', 'dynamodb', 'ec2',
'elasticache', 'elasticbeanstalk', 'elastictranscoder',
'elb', 'iam', 'importexport', 'kinesis', 'logs',
'opsworks', 'rds', 'redshift', 'route53', 's3', 's3api',
'ses', 'sns', 'sqs', 'storagegateway',
'sts', 'support', 'swf'])),
('aws cloud', -1, set(['cloudformation', 'cloudsearch',
'cloudsearchdomain', 'cloudtrail', 'cloudwatch'])),
Expand Down

0 comments on commit 0acbafd

Please sign in to comment.