From faee2be4532652842c24a4fe4d8b497899c8cc34 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Mon, 8 Sep 2014 15:59:24 -0700 Subject: [PATCH 1/2] Enable region parameter to specify new bucket location --- cloud/s3 | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/cloud/s3 b/cloud/s3 index 80549fdd011..029a18135c8 100644 --- a/cloud/s3 +++ b/cloud/s3 @@ -128,6 +128,7 @@ import hashlib try: import boto + from boto.s3.connection import Location except ImportError: print "failed=True msg='boto required for this module'" sys.exit(1) @@ -164,9 +165,9 @@ def bucket_check(module, s3, bucket): else: return False -def create_bucket(module, s3, bucket): +def create_bucket(module, s3, bucket, location=Location.DEFAULT): try: - bucket = s3.create_bucket(bucket) + bucket = s3.create_bucket(bucket, location=location) except s3.provider.storage_response_error, e: module.fail_json(msg= str(e)) if bucket: @@ -300,7 +301,15 @@ def main(): metadata = module.params.get('metadata') ec2_url, aws_access_key, aws_secret_key, region = get_ec2_creds(module) - + + if region in ('us-east-1', '', None): + # S3ism for the US Standard region + location = Location.DEFAULT + else: + # Boto uses symbolic names for locations but region strings will + # actually work fine for everything except us-east-1 (US Standard) + location = region + if module.params.get('object'): obj = os.path.expanduser(module.params['object']) @@ -413,16 +422,16 @@ def main(): upload_s3file(module, s3, bucket, obj, src, expiry, metadata) else: module.exit_json(msg="WARNING: Checksums do not match. Use overwrite parameter to force upload.", failed=True) - + # If neither exist (based on bucket existence), we can create both. - if bucketrtn is False and pathrtn is True: - create_bucket(module, s3, bucket) + if bucketrtn is False and pathrtn is True: + create_bucket(module, s3, bucket, location) upload_s3file(module, s3, bucket, obj, src, expiry, metadata) # If bucket exists but key doesn't, just upload. if bucketrtn is True and pathrtn is True and keyrtn is False: upload_s3file(module, s3, bucket, obj, src, expiry, metadata) - + # Support for deleting an object if we have both params. if mode == 'delete': if bucket: @@ -444,7 +453,7 @@ def main(): if bucketrtn is True: module.exit_json(msg="Bucket already exists.", changed=False) else: - module.exit_json(msg="Bucket created successfully", changed=create_bucket(module, s3, bucket)) + module.exit_json(msg="Bucket created successfully", changed=create_bucket(module, s3, bucket, location)) if bucket and obj: bucketrtn = bucket_check(module, s3, bucket) if obj.endswith('/'): @@ -458,7 +467,7 @@ def main(): else: create_dirkey(module, s3, bucket, dirobj) if bucketrtn is False: - created = create_bucket(module, s3, bucket) + created = create_bucket(module, s3, bucket, location) create_dirkey(module, s3, bucket, dirobj) # Support for grabbing the time-expired URL for an object in S3/Walrus. From f99ed17b42be23e2de51d0a88416a59cd766e793 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Mon, 8 Sep 2014 16:02:08 -0700 Subject: [PATCH 2/2] Document how to use region in s3 module --- cloud/s3 | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/cloud/s3 b/cloud/s3 index 029a18135c8..5e756999ae8 100644 --- a/cloud/s3 +++ b/cloud/s3 @@ -17,7 +17,7 @@ DOCUMENTATION = ''' --- module: s3 -short_description: idempotent S3 module putting a file into S3. +short_description: S3 module putting a file into S3. description: - This module allows the user to dictate the presence of a given file in an S3 bucket. If or once the key (file) exists in the bucket, it returns a time-expired download URL. This module has a dependency on python-boto. version_added: "1.1" @@ -89,6 +89,12 @@ options: required: false default: null version_added: "1.6" + region: + description: + - AWS region to create the bucket in. If not set then the value of the EC2_REGION and AWS_REGION environment variables are checked, followed by the aws_region and ec2_region settings in the Boto config file. If none of those are set the region defaults to the S3 Location: US Standard. Prior to ansible 1.8 this parameter could be specified but had no effect. + required: false + default: null + version_added: "1.8" requirements: [ "boto" ] author: Lester Wade, Ralph Tice @@ -117,6 +123,8 @@ EXAMPLES = ''' - s3: bucket=mybucket mode=create # Create a bucket with key as directory - s3: bucket=mybucket object=/my/directory/path mode=create +# Create an empty bucket in the EU region +- s3: bucket=mybucket mode=create region=eu-west-1 # Delete a bucket and all contents - s3: bucket=mybucket mode=delete '''