Skip to content

Commit

Permalink
Merge branch 'release-1.6.10'
Browse files Browse the repository at this point in the history
* release-1.6.10:
  Bumping version to 1.6.10
  [EMR] Bug fix: Fix script runnner jar to the current region location when --enable-debugging is specified in create-cluster command.
  [EMR] Add examples of adding multiple files in a streaming step in create-cluster and add-steps.
  Update changelog with the latest changes
  Use .move instead of .rename when creating bundled installer
  Add issue to changelog
  Create cache file with 0600 permissions
  Replace ':' char for windows-safe filenames
  • Loading branch information
jamesls committed Dec 18, 2014
2 parents 37196d4 + b53c8cb commit 24e6947
Show file tree
Hide file tree
Showing 11 changed files with 160 additions and 17 deletions.
18 changes: 18 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,24 @@
CHANGELOG
=========

1.6.10
======

* bugfix:AssumeRole: Fix issue with cache filenames when assuming a role
on Windows
(`issue 1063 <https://github.com/aws/aws-cli/issues/1063>`__)
* bugfix:``aws s3 ls``: Fix issue when listing Amazon S3 objects containing
non-ascii characters in eu-central-1
(`issue 1046 <https://github.com/aws/aws-cli/issues/1046>`__)
* feature:``aws storagegateway``: Update the ``aws storagegateway`` command
to the latest version
* feature:``aws emr``: Update the ``aws emr`` command to the latest
version
* bugfix:``aws emr create-cluster``: Fix script runnner jar to the current
region location when ``--enable-debugging`` is specified in the
``aws emr create-cluster`` command


1.6.9
=====

Expand Down
2 changes: 1 addition & 1 deletion awscli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
"""
import os

__version__ = '1.6.9'
__version__ = '1.6.10'

#
# Get our data path to be added to botocore's search path
Expand Down
8 changes: 6 additions & 2 deletions awscli/customizations/assumerole.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ def __setitem__(self, cache_key, value):
"JSON serializable: %s" % value)
if not os.path.isdir(self._working_dir):
os.makedirs(self._working_dir)
with open(full_key, 'w') as f:
with os.fdopen(os.open(full_key,
os.O_WRONLY | os.O_CREAT, 0o600), 'w') as f:
f.write(file_content)

def _convert_cache_key(self, cache_key):
Expand Down Expand Up @@ -231,7 +232,10 @@ def _is_expired(self, credentials):

def _create_cache_key(self):
role_config = self._get_role_config_values()
cache_key = '%s--%s' % (self._profile_name, role_config['role_arn'])
# On windows, ':' is not allowed in filenames, so we'll
# replace them with '_' instead.
role_arn = role_config['role_arn'].replace(':', '_')
cache_key = '%s--%s' % (self._profile_name, role_arn)
return cache_key.replace('/', '-')

def _write_cached_credentials(self, creds, cache_key):
Expand Down
2 changes: 1 addition & 1 deletion awscli/customizations/emr/createcluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ def _build_enable_debugging(self, parsed_globals):
return emrutils.build_step(
name=constants.DEBUGGING_NAME,
action_on_failure=constants.TERMINATE_CLUSTER,
jar=emrutils.get_script_runner(),
jar=emrutils.get_script_runner(parsed_globals.region),
args=[emrutils.build_s3_link(
relative_path=constants.DEBUGGING_PATH,
region=parsed_globals.region)])
Expand Down
50 changes: 47 additions & 3 deletions awscli/examples/emr/add-steps.rst
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,51 @@ NOTE: JSON arguments must include options and values as their own items in the l
]
}

**3. To add Hive steps to a cluster**
**3. To add a Streaming step with multiple files to a cluster (JSON only)**

- JSON (multiplefiles.json)::

[
{
"Name": "JSON Streaming Step",
"Type": "STREAMING",
"ActionOnFailure": "CONTINUE",
"Args": [
"-files",
"s3://mybucket/mapper.py,s3://mybucket/reducer.py",
"-mapper",
"mapper.py",
"-reducer",
"reducer.py",
"-input",
"s3://mybucket/input",
"-output",
"s3://mybucket/output"]
}
]

- Command::

aws emr add-steps --cluster-id j-XXXXXXXX --steps file://./multiplefiles.json

- Required parameters::

Type, Args

- Optional parameters::

Name, ActionOnFailure

- Output::

{
"StepIds":[
"s-XXXXXXXX",
]
}


**4. To add Hive steps to a cluster**

- Command::

Expand All @@ -86,7 +130,7 @@ NOTE: JSON arguments must include options and values as their own items in the l
}


**4. To add Pig steps to a cluster**
**5. To add Pig steps to a cluster**

- Command::

Expand All @@ -111,7 +155,7 @@ NOTE: JSON arguments must include options and values as their own items in the l
}


**5. To add Impala steps to a cluster**
**6. To add Impala steps to a cluster**

- Command::

Expand Down
35 changes: 31 additions & 4 deletions awscli/examples/emr/create-cluster-examples.rst
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,34 @@ NOTE: JSON arguments must include options and values as their own items in the l

aws emr create-cluster --steps file://./step.json --ami-version 3.1.0 --instance-groups InstanceGroupType=MASTER,InstanceCount=1,InstanceType=m3.xlarge InstanceGroupType=CORE,InstanceCount=2,InstanceType=m3.xlarge --auto-terminate

**14. To add Hive steps when creating an Amazon EMR cluster**
**14. To use multiple files in a Streaming step (JSON only)**

- JSON (multiplefiles.json)::

[
{
"Name": "JSON Streaming Step",
"Type": "STREAMING",
"ActionOnFailure": "CONTINUE",
"Args": [
"-files",
"s3://mybucket/mapper.py,s3://mybucket/reducer.py",
"-mapper",
"mapper.py",
"-reducer",
"reducer.py",
"-input",
"s3://mybucket/input",
"-output",
"s3://mybucket/output"]
}
]

- Command::

aws emr create-cluster --steps file://./multiplefiles.json --ami-version 3.3.1 --instance-groups InstanceGroupType=MASTER,InstanceCount=1,InstanceType=m3.xlarge InstanceGroupType=CORE,InstanceCount=2,InstanceType=m3.xlarge --auto-terminate

**15. To add Hive steps when creating an Amazon EMR cluster**

- Command::

Expand All @@ -151,7 +178,7 @@ NOTE: JSON arguments must include options and values as their own items in the l

Name, ActionOnFailure

**15. To add Pig steps when creating an Amazon EMR cluster**
**16. To add Pig steps when creating an Amazon EMR cluster**

- Command::

Expand All @@ -165,7 +192,7 @@ NOTE: JSON arguments must include options and values as their own items in the l

Name, ActionOnFailure

**16. To add Impala steps when creating an Amazon EMR cluster**
**17. To add Impala steps when creating an Amazon EMR cluster**

- Command::

Expand All @@ -179,7 +206,7 @@ NOTE: JSON arguments must include options and values as their own items in the l

Name, ActionOnFailure

**17. To enable consistent view and server-side encryption in EMRFS when creating an Amazon EMR cluster and changing RetryCount, RetryPeriod, and encryption algorithm from default values**
**18. To enable consistent view and server-side encryption in EMRFS when creating an Amazon EMR cluster and changing RetryCount, RetryPeriod, and encryption algorithm from default values**

- Command::

Expand Down
4 changes: 2 additions & 2 deletions doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,9 @@
# built documents.
#
# The short X.Y version.
version = '1.6'
version = '1.6.'
# The full version, including alpha/beta/rc tags.
release = '1.6.9'
release = '1.6.10'

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
6 changes: 3 additions & 3 deletions scripts/make-bundle
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,8 @@ def add_cli_sdist(scratch_dir):
with cd(awscli_dir):
run('python setup.py sdist')
filename = os.listdir('dist')[0]
os.rename(os.path.join('dist', filename),
os.path.join(scratch_dir, filename))
shutil.move(os.path.join('dist', filename),
os.path.join(scratch_dir, filename))


def create_bootstrap_script(scratch_dir):
Expand All @@ -133,7 +133,7 @@ def zip_dir(scratch_dir):
final_dir_name = os.path.join(dirname, 'awscli-bundle')
if os.path.isdir(final_dir_name):
shutil.rmtree(final_dir_name)
os.rename(scratch_dir, final_dir_name)
shutil.move(scratch_dir, final_dir_name)
with cd(dirname):
with zipfile.ZipFile(basename, 'w', zipfile.ZIP_DEFLATED) as zipped:
for root, dirnames, filenames in os.walk('awscli-bundle'):
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import awscli


requires = ['botocore>=0.79.0,<0.80.0',
requires = ['botocore>=0.80.0,<0.81.0',
'bcdoc>=0.12.0,<0.13.0',
'colorama==0.2.5',
'docutils>=0.10',
Expand Down
17 changes: 17 additions & 0 deletions tests/unit/customizations/emr/test_create_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -504,6 +504,23 @@ def test_enable_debugging(self):
result['Steps'] = debugging_config
self.assert_params_for_cmd2(cmd, result)

cmd = DEFAULT_CMD + ('--log-uri s3://test/logs --enable-debugging '
'--region us-west-2')
debugging_config = \
[{'Name': 'Setup Hadoop Debugging',
'ActionOnFailure': 'TERMINATE_CLUSTER',
'HadoopJarStep':
{'Args':
[('s3://us-west-2.elasticmapreduce/libs/'
'state-pusher/0.1/fetch')],
'Jar':
's3://us-west-2.elasticmapreduce/libs/' +
'script-runner/script-runner.jar'
}
}]
result['Steps'] = debugging_config
self.assert_params_for_cmd2(cmd, result)

def test_enable_debugging_no_log_uri(self):
cmd = DEFAULT_CMD + '--enable-debugging'
expected_error_msg = (
Expand Down
33 changes: 33 additions & 0 deletions tests/unit/customizations/test_assumerole.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import shutil
import tempfile
import os
import platform
from datetime import datetime, timedelta

import mock
Expand Down Expand Up @@ -132,6 +133,31 @@ def test_assume_role_retrieves_from_cache(self):
self.assertEqual(credentials.secret_key, 'bar-cached')
self.assertEqual(credentials.token, 'baz-cached')

def test_cache_key_is_windows_safe(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': datetime.now(tzlocal()).isoformat()
},
}
cache = {}
self.fake_config['profiles']['development']['role_arn'] = (
'arn:aws:iam::foo-role')

client_creator = self.create_client_creator(with_response=response)
provider = assumerole.AssumeRoleProvider(
self.create_config_loader(),
client_creator, cache=cache, profile_name='development')

provider.load()
# On windows, you cannot use a a ':' in the filename, so
# we need to do some small transformations on the filename
# to replace any ':' that come up.
self.assertEqual(cache['development--arn_aws_iam__foo-role'],
response)

def test_assume_role_in_cache_but_expired(self):
expired_creds = datetime.utcnow()
utc_timestamp = expired_creds.isoformat() + 'Z'
Expand Down Expand Up @@ -328,3 +354,10 @@ def test_working_dir_does_not_exist(self):
def test_key_error_raised_when_cache_key_does_not_exist(self):
with self.assertRaises(KeyError):
self.cache['foo']

@unittest.skipIf(platform.system() not in ['Darwin', 'Linux'],
'File permissions tests not supported on Windows.')
def test_permissions_for_file_restricted(self):
self.cache['mykey'] = {'foo': 'bar'}
filename = os.path.join(self.tempdir, 'mykey.json')
self.assertEqual(os.stat(filename).st_mode & 0xFFF, 0o600)

0 comments on commit 24e6947

Please sign in to comment.