. python, 's3_upload.py'
from __future__ import print_function
import os
import sys
import argparse
import boto3
from botocore.exceptions import ClientError
def upload_to_s3(bucket, artefact, is_folder, bucket_key):
try:
client = boto3.client('s3')
except ClientError as err:
print("Failed to create boto3 client.\n" + str(err))
return False
if is_folder == 'true':
for root, dirs, files in os.walk(artefact, topdown=False):
print('Walking it')
for file in files:
try:
print(file)
client.upload_file(os.path.join(root, file), bucket, os.path.join(root, file))
except ClientError as err:
print("Failed to upload artefact to S3.\n" + str(err))
return False
except IOError as err:
print("Failed to access artefact in this directory.\n" + str(err))
return False
else:
print('Uploading file ' + artefact)
client.upload_file(artefact, bucket, bucket_key)
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("bucket", help="Name of the existing S3 bucket")
parser.add_argument("artefact", help="Name of the artefact to be uploaded to S3")
parser.add_argument("is_folder", help="True if its the name of a folder")
parser.add_argument("bucket_key", help="Name of file in bucket")
args = parser.parse_args()
if not upload_to_s3(args.bucket, args.artefact, args.is_folder, args.bucket_key):
sys.exit(1)
if __name__ == "__main__":
main()
bitbucket-pipelines.yml:
image: python:3.5.1
pipelines:
branches:
dev:
- step:
script:
- pip install boto3==1.4.1
- pip install requests
- python s3_emptyBucket.py dev-slz-processor-repo
- python s3_upload.py dev-slz-processor-repo lambda true lambda
- python s3_upload.py dev-slz-processor-repo node_modules true node_modules
- python s3_upload.py dev-slz-processor-repo config.dev.json false config.json
stage:
- step:
script:
- pip install boto3==1.3.0
- python s3_emptyBucket.py staging-slz-processor-repo
- python s3_upload.py staging-slz-processor-repo lambda true lambda
- python s3_upload.py staging-slz-processor-repo node_modules true node_modules
- python s3_upload.py staging-slz-processor-repo config.staging.json false config.json
master:
- step:
script:
- pip install boto3==1.3.0
- python s3_emptyBucket.py prod-slz-processor-repo
- python s3_upload.py prod-slz-processor-repo lambda true lambda
- python s3_upload.py prod-slz-processor-repo node_modules true node_modules
- python s3_upload.py prod-slz-processor-repo config.prod.json false config.json
dev "", dev-slz-processor-repo
, "s3_emptyBucket", :
from __future__ import print_function
import os
import sys
import argparse
import boto3
from botocore.exceptions import ClientError
def empty_bucket(bucket):
try:
resource = boto3.resource('s3')
except ClientError as err:
print("Failed to create boto3 resource.\n" + str(err))
return False
print("Removing all objects from bucket: " + bucket)
resource.Bucket(bucket).objects.delete()
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("bucket", help="Name of the existing S3 bucket to empty")
args = parser.parse_args()
if not empty_bucket(args.bucket):
sys.exit(1)
if __name__ == "__main__":
main()