-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdoPreparePackage.py
132 lines (115 loc) · 4.43 KB
/
doPreparePackage.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
########################################################################
# Name:
# dePreparePackage.py
# Description:
# A aws lambad funtion, it will publish the package from the out put of aws code pipeline
# Author:
# wuwesley
# Python:
# 2.7
# Version:
# 1.0
########################################################################
import logging
import boto3
import os
import datetime
from botocore.client import Config
from botocore.exceptions import ClientError
LOGGER = logging.getLogger()
LOGGER.setLevel(logging.INFO)
LOGGER.info("============Loading function============")
s3r = boto3.resource('s3', region_name="us-east-1", config=Config(signature_version='s3v4'))
s3 = boto3.client('s3', region_name="us-east-1", config=Config(signature_version='s3v4'))
def log_event(event):
"""
Logs event information for debugging
"""
LOGGER.info("=========================================")
LOGGER.info(event)
LOGGER.info("=========================================")
def update_package_time(bucket, key):
"""
update the creation time of the final package
"""
# Get time stamp
utc_datetime = datetime.datetime.utcnow()
timestamp = utc_datetime.strftime("%Y%m%d%H%M%S")
# Get the file MD5
md5 = s3.head_object(Bucket=bucket,Key=key)['ETag'][1:-1]
msgBody = timestamp + "|" + md5
infoKey = key + ".info";
try:
s3r.Bucket(bucket).put_object(Key=infoKey, Body=msgBody)
s3.put_object_acl(ACL='public-read', Bucket=bucket, Key=infoKey)
LOGGER.info("The lastest update info:%s", msgBody)
return True
except ClientError as err:
LOGGER.error("Failed to update the creation time of the final package!\n%s", err)
return False
def codepipeline_success(job_id):
"""
Puts CodePipeline Success Result
"""
try:
codepipeline = boto3.client('codepipeline')
codepipeline.put_job_success_result(jobId=job_id)
LOGGER.info("============SUCCESS============")
return True
except ClientError as err:
LOGGER.error("Failed to PutJobSuccessResult for CodePipeline!\n%s", err)
return False
def codepipeline_failure(job_id, message):
"""
Puts CodePipeline Failure Result
"""
try:
codepipeline = boto3.client('codepipeline')
codepipeline.put_job_failure_result(
jobId=job_id,
failureDetails={'type': 'JobFailed', 'message': message}
)
LOGGER.info("============FAILURE============")
return True
except ClientError as err:
LOGGER.error("Failed to PutJobFailureResult for CodePipeline!\n%s", err)
return False
def checkObjecExist(bucket, key):
results = s3.list_objects(Bucket=bucket, Prefix=key)
return 'Contents' in results
def doRenameAndMakepublic(event, context):
"""
Rename the artifact and make it public
"""
log_event(event)
try:
job_id = event['CodePipeline.job']['id']
except KeyError as err:
LOGGER.error("Could not retrieve CodePipeline Job ID!\n%s", err)
return False
# Get the object from the event and show its content type
bucket = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName']
key = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey']
userParams = event["CodePipeline.job"]['data']['actionConfiguration']['configuration']['UserParameters']
sourcefile = {'Bucket': bucket, 'Key': key}
LOGGER.info("Input artifact:%s", sourcefile)
newbucket = "wuwesley"
newkey = 'flashsales/{0}'.format(userParams)
LOGGER.info("New Bucket:%s, new key:%s", newbucket, newkey)
try:
if (checkObjecExist(bucket, newkey)):
s3.delete_object(Bucket=bucket, Key=newkey)
LOGGER.info("Delete existing:%s", newkey)
s3r.Object(newbucket, newkey).copy_from(CopySource=sourcefile)
LOGGER.info("Copied from %s to %s", bucket + '/' + key, newbucket + '/' + newkey)
s3.put_object_acl(ACL='public-read', Bucket=newbucket, Key=newkey)
LOGGER.info("Changed permission of %s", newkey)
update_package_time(newbucket, newkey)
if (codepipeline_success(job_id) == True):
return True
else:
return False
except (TypeError, KeyError) as err:
LOGGER.error(err)
codepipeline_failure(job_id, 'The task of rename and change permission is failed!')
return False