Skip to content

Merge pull request #354 from devpalsPlus/feat/#347 #131

Merge pull request #354 from devpalsPlus/feat/#347

Merge pull request #354 from devpalsPlus/feat/#347 #131

Workflow file for this run

name: Front Deployment
on:
push:
branches:
- develop
jobs:
build:
name: react build & deploy
runs-on: ubuntu-latest
steps:
- name: Checkout Github Action
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '20'
- name: Install npm dependencies
run: npm install
- name: Set environment variables and Build
run: |
echo "VITE_APP_API_BASE_URL=${{ secrets.VITE_APP_API_BASE_URL }}" >> .env.local
echo "VITE_APP_IMAGE_CDN_URL=${{ secrets.VITE_APP_IMAGE_CDN_URL }}" >> .env.local
npm run build
- name: Verify Build Directory
run: ls -la dist/
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_S3_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY_ID }}
aws-region: ap-northeast-2
- name: Process and Upload Files to S3
env:
BUCKET: devpals
SRC_DIR: dist
FINAL_DIR: dist_final
COMPRESSED_LIST: compressed_files.txt
run: |
set -euo pipefail
rm -rf "$FINAL_DIR" "$COMPRESSED_LIST"
mkdir -p "$FINAL_DIR"
while IFS= read -r file; do
relpath="${file#$SRC_DIR/}"
if [[ "$file" == *.gz ]]; then
target_rel="${relpath%.gz}"
target="$FINAL_DIR/$target_rel"
mkdir -p "$(dirname "$target")"
cp "$file" "$target"
echo "$target_rel" >> "$COMPRESSED_LIST"
echo "Compressed file processed: '$relpath' -> '$target_rel'"
else
compressed_candidate="${file}.gz"
if [ -f "$compressed_candidate" ]; then
echo "Skipping non-compressed file: '$relpath' because compressed version exists"
else
target="$FINAL_DIR/$relpath"
mkdir -p "$(dirname "$target")"
cp "$file" "$target"
echo "Non-compressed file processed: '$relpath' remains unchanged"
fi
fi
done < <(find "$SRC_DIR" -type f)
echo "Final upload directory structure:"
find "$FINAL_DIR" -type f
echo "Contents of compressed files list:"
cat "$COMPRESSED_LIST"
echo "Deleting all existing objects from S3 bucket '$BUCKET'..."
aws s3 rm "s3://${BUCKET}" --recursive
echo "Uploading files to S3 bucket '$BUCKET'..."
while IFS= read -r file; do
relpath="${file#$FINAL_DIR/}"
relpath_trimmed=$(echo "$relpath" | xargs)
if grep -Fqx "$relpath_trimmed" "$COMPRESSED_LIST"; then
echo "Uploading compressed file: '$relpath_trimmed' with Content-Encoding: gzip"
aws s3 cp "$file" "s3://${BUCKET}/$relpath_trimmed" \
--content-encoding gzip \
--metadata-directive REPLACE \
--cache-control "max-age=31536000"
else
echo "Uploading non-compressed file: '$relpath_trimmed'"
aws s3 cp "$file" "s3://${BUCKET}/$relpath_trimmed" \
--metadata-directive REPLACE \
--cache-control "max-age=31536000"
fi
done < <(find "$FINAL_DIR" -type f)
echo "Upload completed."
rm -rf "$FINAL_DIR" "$COMPRESSED_LIST"
echo "Cleanup completed."
- name: CloudFront Invalidation
env:
CLOUD_FRONT_ID: ${{ secrets.AWS_CLOUDFRONT_ID }}
run: |
aws cloudfront create-invalidation --distribution-id $CLOUD_FRONT_ID --paths "/*"