|
4 | 4 | # copied here are used as part of the deployment process for this project as |
5 | 5 | # as some runtime dependencies such as product images and seed data for loading |
6 | 6 | # products and categories into DDB and CSVs for training Personalize models. |
| 7 | +# |
| 8 | +# Exammple usage: |
| 9 | +# ./stage.sh S3_BUCKET [OPTIONAL_S3_PATH/] [--private-s3] [--only-cfn-template] |
| 10 | +# |
| 11 | +# The S3_BUCKET/OPTIONAL_S3_PATH is where all resources and templates will be uploaded. |
| 12 | +# If you don't specify the OPTIONAL_S3_PATH, it will be uploaded to the root of the bucket. |
| 13 | +# |
| 14 | +# The optional flags are: |
| 15 | +# 1. "--private-s3" to upload files without setting the object ACL to public |
| 16 | +# 2. "--only-cfn-template" to upload only CloudFormation templates (to speed up development time if you aren't changing any code) |
| 17 | + |
7 | 18 |
|
8 | 19 | set -e |
9 | 20 |
|
10 | | -BUCKET=$1 |
| 21 | +######################################################################################################################################## |
| 22 | +# Parse arguments and flag |
| 23 | +######################################################################################################################################## |
| 24 | +# The script parses the command line argument and extract these variables: |
| 25 | +# 1. "args" contains an array of arguments (e.g. args[0], args[1], etc.) In this case, we take 2 arguments for BUCKET and S3PATH |
| 26 | +# 2. "private_s3" contains a boolean value whether "--private-s3" is presented (e.g. "./stage.sh --private-s3" will set this to true. |
| 27 | +# 2. "only_cfn_template" contains a boolean value whether "--private-s3" is presented (e.g. "./stage.sh --private-s3" will set this to true. |
| 28 | +######################################################################################################################################## |
| 29 | +args=() |
| 30 | +private_s3=false |
| 31 | +only_cfn_template=false |
| 32 | + |
| 33 | +while [ "$1" ]; |
| 34 | +do |
| 35 | + arg=$1 |
| 36 | + if [ "${1:0:2}" == "--" ] |
| 37 | + then |
| 38 | + shift |
| 39 | + rev=$(echo "$arg" | rev) |
| 40 | + if [ -z "$1" ] || [ "${1:0:2}" == "--" ] || [ "${rev:0:1}" == ":" ] |
| 41 | + then |
| 42 | + bool=$(echo ${arg:2} | sed s/://g) |
| 43 | + if [ "$bool" == "private-s3" ] |
| 44 | + then |
| 45 | + private_s3=true |
| 46 | + echo Recieved a \"--private-s3\" flag. Will upload object without public access |
| 47 | + elif [ "$bool" == "only-cfn-template" ] |
| 48 | + then |
| 49 | + only_cfn_template=true |
| 50 | + echo Recieved a \"--only-cfn-template\" flag. Will upload object without public access |
| 51 | + else |
| 52 | + echo Received an unknown flag \"$bool\" |
| 53 | + exit 1 |
| 54 | + fi |
| 55 | + else |
| 56 | + value=$1 |
| 57 | + shift |
| 58 | + # echo \"$arg\" is flag with value \"$value\" |
| 59 | + fi |
| 60 | + else |
| 61 | + args+=("$arg") |
| 62 | + shift |
| 63 | + echo Received argument \"$arg\" |
| 64 | + fi |
| 65 | +done |
| 66 | + |
| 67 | +BUCKET=${args[0]} |
11 | 68 | #Path with trailing / |
12 | | -S3PATH=$2 |
| 69 | +S3PATH=${args[1]} |
| 70 | + |
| 71 | +echo "==============================================" |
| 72 | +echo "Executing the script with following arguments:" |
| 73 | +echo "==============================================" |
| 74 | +echo "BUCKET = ${BUCKET}" |
| 75 | +echo "S3PATH = ${S3PATH}" |
| 76 | +echo "private_s3 = ${private_s3}" |
| 77 | +echo "only_cfn_template = ${only_cfn_template}" |
| 78 | +echo "==============================================" |
| 79 | +######################################################################################################################################## |
| 80 | + |
| 81 | + |
| 82 | +# Add suffix to "s3 cp" commands to upload public objects |
| 83 | +if [ "$private_s3" = false ]; then |
| 84 | + export S3PUBLIC=" --acl public-read" |
| 85 | +fi |
13 | 86 |
|
14 | | -# remove this line if you want to keep the objects private in your S3 bucket |
15 | | -# export S3PUBLIC=" --acl public-read" |
16 | 87 |
|
17 | 88 | if [ ! -d "local" ]; then |
18 | 89 | mkdir local |
@@ -41,52 +112,53 @@ echo " + Uploading CloudFormation Templates" |
41 | 112 | aws s3 cp aws/cloudformation-templates/ s3://${BUCKET}/${S3PATH}cloudformation-templates --recursive $S3PUBLIC |
42 | 113 | echo " For CloudFormation : https://${BUCKET_DOMAIN}/${BUCKET}/${S3PATH}cloudformation-templates/template.yaml" |
43 | 114 |
|
44 | | -echo " + Packaging Source" |
45 | | -[ -e "retaildemostore-source.zip" ] && rm retaildemostore-source.zip |
46 | | -zip -qr retaildemostore-source.zip ./src/ -x "*.DS_Store" "*__pycache__*" "*/aws-lambda/*" "*/node_modules/*" "*.zip" |
47 | | - |
48 | | -echo " + Uploading Source" |
49 | | -aws s3 cp retaildemostore-source.zip s3://${BUCKET}/${S3PATH}source/retaildemostore-source.zip $S3PUBLIC |
50 | | - |
51 | | -echo " + Upload seed data" |
52 | | -aws s3 cp src/products/src/products-service/data/ s3://${BUCKET}/${S3PATH}data --recursive $S3PUBLIC |
53 | | -aws s3 cp src/users/src/users-service/data/ s3://${BUCKET}/${S3PATH}data --recursive $S3PUBLIC |
54 | | - |
55 | | -echo " + Upload IVS videos" |
56 | | -aws s3 cp videos/ s3://${BUCKET}/${S3PATH}videos --recursive $S3PUBLIC |
57 | | - |
58 | | -echo " + Creating CSVs for Personalize model pre-create training" |
59 | | -python3 -m venv .venv |
60 | | -. .venv/bin/activate |
61 | | -pip install -r generators/requirements.txt |
62 | | -PYTHONPATH=. python3 generators/generate_interactions_personalize.py |
63 | | -PYTHONPATH=. python3 generators/generate_interactions_personalize_offers.py |
64 | | - |
65 | | -# Sync product images |
66 | | -echo " + Copying product images" |
67 | | -aws s3 sync s3://retail-demo-store-code/datasets/1.3/images/ s3://${BUCKET}/${S3PATH}images/ $S3PUBLIC || echo "Skipping load of remote dataset 1.3" |
68 | | -aws s3 sync s3://retail-demo-store-code/datasets/1.4/images/ s3://${BUCKET}/${S3PATH}images/ $S3PUBLIC || echo "Skipping load of remote dataset 1.4" |
69 | | -aws s3 sync datasets/1.4/images/ s3://${BUCKET}/${S3PATH}images/ $S3PUBLIC || echo "Skipping load of local dataset 1.4" |
70 | | - |
71 | | -# Sync location data files |
72 | | -echo " + Copying location location data" |
73 | | -aws s3 sync ./location_services s3://${BUCKET}/${S3PATH}location_services --only-show-errors $S3PUBLIC |
74 | | - |
75 | | -# Sync CSVs used for Personalize pre-create resources Lambda function |
76 | | -echo " + Copying CSVs for Personalize model pre-create resources" |
77 | | -aws s3 sync src/aws-lambda/personalize-pre-create-resources/data/ s3://${BUCKET}/${S3PATH}csvs/ $S3PUBLIC |
78 | | - |
79 | | -# Stage AWS Lambda functions |
80 | | -echo " + Staging AWS Lambda functions" |
81 | | - |
82 | | -for function in ./src/aws-lambda/*/ |
83 | | -do |
84 | | - echo " + Staging $function" |
85 | | - cd $function |
86 | | - chmod +x ./stage.sh |
87 | | - ./stage.sh ${BUCKET} ${S3PATH} > ../../../local/stage.log |
88 | | - cd - |
89 | | -done |
90 | | - |
| 115 | +if [ "$only_cfn_template" = false ]; then |
| 116 | + echo " + Packaging Source" |
| 117 | + [ -e "retaildemostore-source.zip" ] && rm retaildemostore-source.zip |
| 118 | + zip -qr retaildemostore-source.zip ./src/ -x "*.DS_Store" "*__pycache__*" "*/aws-lambda/*" "*/node_modules/*" "*.zip" "*/venv*" |
| 119 | + |
| 120 | + echo " + Uploading Source" |
| 121 | + aws s3 cp retaildemostore-source.zip s3://${BUCKET}/${S3PATH}source/retaildemostore-source.zip $S3PUBLIC |
| 122 | + |
| 123 | + echo " + Upload seed data" |
| 124 | + aws s3 cp src/products/src/products-service/data/ s3://${BUCKET}/${S3PATH}data --recursive $S3PUBLIC |
| 125 | + aws s3 cp src/users/src/users-service/data/ s3://${BUCKET}/${S3PATH}data --recursive $S3PUBLIC |
| 126 | + |
| 127 | + echo " + Upload IVS videos" |
| 128 | + aws s3 cp videos/ s3://${BUCKET}/${S3PATH}videos --recursive $S3PUBLIC |
| 129 | + |
| 130 | + echo " + Creating CSVs for Personalize model pre-create training" |
| 131 | + python3 -m venv .venv |
| 132 | + . .venv/bin/activate |
| 133 | + pip install -r generators/requirements.txt |
| 134 | + PYTHONPATH=. python3 generators/generate_interactions_personalize.py |
| 135 | + PYTHONPATH=. python3 generators/generate_interactions_personalize_offers.py |
| 136 | + |
| 137 | + # Sync product images |
| 138 | + echo " + Copying product images" |
| 139 | + aws s3 sync s3://retail-demo-store-code/datasets/1.3/images/ s3://${BUCKET}/${S3PATH}images/ $S3PUBLIC || echo "Skipping load of remote dataset 1.3" |
| 140 | + aws s3 sync s3://retail-demo-store-code/datasets/1.4/images/ s3://${BUCKET}/${S3PATH}images/ $S3PUBLIC || echo "Skipping load of remote dataset 1.4" |
| 141 | + aws s3 sync datasets/1.4/images/ s3://${BUCKET}/${S3PATH}images/ $S3PUBLIC || echo "Skipping load of local dataset 1.4" |
| 142 | + |
| 143 | + # Sync location data files |
| 144 | + echo " + Copying location location data" |
| 145 | + aws s3 sync ./location_services s3://${BUCKET}/${S3PATH}location_services --only-show-errors $S3PUBLIC |
| 146 | + |
| 147 | + # Sync CSVs used for Personalize pre-create resources Lambda function |
| 148 | + echo " + Copying CSVs for Personalize model pre-create resources" |
| 149 | + aws s3 sync src/aws-lambda/personalize-pre-create-resources/data/ s3://${BUCKET}/${S3PATH}csvs/ $S3PUBLIC |
| 150 | + |
| 151 | + # Stage AWS Lambda functions |
| 152 | + echo " + Staging AWS Lambda functions" |
| 153 | + |
| 154 | + for function in ./src/aws-lambda/*/ |
| 155 | + do |
| 156 | + echo " + Staging $function" |
| 157 | + cd $function |
| 158 | + chmod +x ./stage.sh |
| 159 | + ./stage.sh ${BUCKET} ${S3PATH} > ../../../local/stage.log |
| 160 | + cd - |
| 161 | + done |
| 162 | +fi |
91 | 163 | echo " + Done s3://${BUCKET}/${S3PATH} " |
92 | 164 | echo " For CloudFormation : https://${BUCKET_DOMAIN}/${BUCKET}/${S3PATH}cloudformation-templates/template.yaml" |
0 commit comments