Skip to content

Commit 1d167fa

Browse files
authored
Merge pull request #209 from D3vPals/feat/#173
CI-CD 스크립트 수정(# issue 173)
2 parents 2b0d6c2 + 9386269 commit 1d167fa

File tree

2 files changed

+75
-52
lines changed

2 files changed

+75
-52
lines changed

.github/workflows/front_ci-cd.yml

Lines changed: 73 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -14,75 +14,96 @@ jobs:
1414
- name: Checkout Github Action
1515
uses: actions/checkout@v4
1616

17-
- name: Get npm cache directory
18-
id: npm-cache-dir
19-
run: |
20-
echo "::set-output name=dir::$(npm config get cache)"
21-
- uses: actions/cache@v4
22-
id: npm-cache
17+
- name: Setup Node.js
18+
uses: actions/setup-node@v3
2319
with:
24-
path: ${{ steps.npm-cache-dir.outputs.dir }}
25-
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
26-
restore-keys: |
27-
${{ runner.os }}-node-
20+
node-version: '16'
2821

2922
- name: Install npm dependencies
3023
run: npm install
3124

32-
- name: Set environment variables
25+
- name: Set environment variables and Build
3326
run: |
3427
echo "VITE_APP_API_BASE_URL=${{ secrets.VITE_APP_API_BASE_URL }}" >> .env.local
3528
echo "VITE_APP_IMAGE_CDN_URL=${{ secrets.VITE_APP_IMAGE_CDN_URL }}" >> .env.local
36-
- name: React Build
37-
env:
38-
VITE_APP_API_BASE_URL: ${{ secrets.VITE_APP_API_BASE_URL }}
39-
VITE_APP_IMAGE_CDN_URL: ${{ secrets.VITE_APP_IMAGE_CDN_URL }}
40-
run: npm run build
29+
npm run build
4130
4231
- name: Verify Build Directory
4332
run: ls -la dist/
4433

45-
- name: Compress text-based files (gzip)
46-
run: |
47-
echo "Compressing text-based files..."
48-
for file in $(find dist/ -type f \( -iname "*.html" -o -iname "*.css" -o -iname "*.js" \)); do
49-
echo "Compressing $file"
50-
gzip -c "$file" > "$file.gz"
51-
done
52-
53-
- name: Configure AWS credentials
54-
uses: aws-actions/configure-aws-credentials@v4
55-
with:
56-
aws-access-key-id: ${{ secrets.AWS_S3_ACCESS_KEY_ID }}
57-
aws-secret-access-key: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY_ID }}
58-
aws-region: ap-northeast-2
59-
60-
- name: Upload compressed files to S3 with header
61-
env:
62-
BUCKET_NAME: ${{ secrets.AWS_S3_BUCKET_NAME }}
63-
run: |
64-
echo "Uploading compressed files with gzip header..."
65-
for file in $(find dist/ -type f -name "*.gz"); do
66-
original=$(echo "$file" | sed 's/\.gz$//')
67-
filename=$(basename "$original")
68-
echo "Uploading $file as $filename with Content-Encoding: gzip"
69-
aws s3 cp "$file" "s3://${{ secrets.AWS_S3_BUCKET_NAME }}/${filename}" \
70-
--content-encoding gzip \
71-
--metadata-directive REPLACE \
72-
--cache-control "max-age=31536000"
73-
done
74-
75-
- name: Upload non-compressed files to S3
34+
- name: Process and Upload Files to S3
7635
env:
77-
BUCKET_NAME: ${{ secrets.AWS_S3_BUCKET_NAME }}
36+
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }}
37+
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }}
38+
AWS_REGION: ap-northeast-2
39+
BUCKET: devpals
40+
SRC_DIR: dist
41+
FINAL_DIR: dist_final
42+
COMPRESSED_LIST: compressed_files.txt
7843
run: |
79-
echo "Uploading non-compressed files..."
80-
aws s3 sync dist/ s3://${{ secrets.AWS_S3_BUCKET_NAME }} --exclude "*.gz" --delete
44+
echo "Cleaning up temporary folder and compressed files list..."
45+
rm -rf "$FINAL_DIR" "$COMPRESSED_LIST"
46+
mkdir -p "$FINAL_DIR"
47+
48+
echo "Processing files from '$SRC_DIR'..."
49+
while IFS= read -r file; do
50+
relpath="${file#$SRC_DIR/}"
51+
if [[ "$file" == *.gz ]]; then
52+
target_rel="${relpath%.gz}"
53+
target="$FINAL_DIR/$target_rel"
54+
mkdir -p "$(dirname "$target")"
55+
cp "$file" "$target"
56+
echo "$target_rel" >> "$COMPRESSED_LIST"
57+
echo "Compressed file processed: '$relpath' -> '$target_rel'"
58+
else
59+
compressed_candidate="${file}.gz"
60+
if [ -f "$compressed_candidate" ]; then
61+
echo "Skipping non-compressed file: '$relpath' because compressed version exists"
62+
else
63+
target="$FINAL_DIR/$relpath"
64+
mkdir -p "$(dirname "$target")"
65+
cp "$file" "$target"
66+
echo "Non-compressed file processed: '$relpath' remains unchanged"
67+
fi
68+
fi
69+
done < <(find "$SRC_DIR" -type f)
70+
71+
echo "Final upload directory structure:"
72+
find "$FINAL_DIR" -type f
73+
74+
echo "Contents of compressed files list:"
75+
cat "$COMPRESSED_LIST"
76+
77+
echo "Deleting all existing objects from S3 bucket '$BUCKET'..."
78+
aws s3 rm "s3://${BUCKET}" --recursive
79+
80+
echo "Uploading files to S3 bucket '$BUCKET'..."
81+
while IFS= read -r file; do
82+
relpath="${file#$FINAL_DIR/}"
83+
relpath_trimmed=$(echo "$relpath" | xargs)
84+
if grep -Fqx "$relpath_trimmed" "$COMPRESSED_LIST"; then
85+
echo "Uploading compressed file: '$relpath_trimmed' with Content-Encoding: gzip"
86+
aws s3 cp "$file" "s3://${BUCKET}/$relpath_trimmed" \
87+
--content-encoding gzip \
88+
--metadata-directive REPLACE \
89+
--cache-control "max-age=31536000"
90+
else
91+
echo "Uploading non-compressed file: '$relpath_trimmed'"
92+
aws s3 cp "$file" "s3://${BUCKET}/$relpath_trimmed" \
93+
--metadata-directive REPLACE \
94+
--cache-control "max-age=31536000"
95+
fi
96+
done < <(find "$FINAL_DIR" -type f)
97+
98+
echo "Upload completed."
99+
100+
echo "Cleaning up temporary folder and compressed files list..."
101+
rm -rf "$FINAL_DIR" "$COMPRESSED_LIST"
102+
echo "Cleanup completed."
81103
82104
- name: CloudFront Invalidation
83105
env:
84106
CLOUD_FRONT_ID: ${{ secrets.AWS_CLOUDFRONT_ID }}
85107
run: |
86108
echo "Creating CloudFront invalidation..."
87-
aws cloudfront create-invalidation \
88-
--distribution-id $CLOUD_FRONT_ID --paths /*
109+
aws cloudfront create-invalidation --distribution-id $CLOUD_FRONT_ID --paths "/*"

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@ lerna-debug.log*
1010
node_modules
1111
dist
1212
dist-ssr
13+
dist_final
14+
compressed_files.txt
1315
*.local
1416

1517

0 commit comments

Comments
 (0)