Skip to content

Commit

Permalink
Clean up and simplify filesystem build script
Browse files Browse the repository at this point in the history
  • Loading branch information
hhvrc committed Jan 15, 2025
1 parent c1fe72b commit 7b1df43
Showing 1 changed file with 21 additions and 72 deletions.
93 changes: 21 additions & 72 deletions scripts/build_frontend.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import os
import re
import sys
import gzip
import shutil
import hashlib
Expand All @@ -14,6 +12,11 @@ def dir_ensure(dir):
os.makedirs(dir)


def dir_delete(dir):
if os.path.exists(dir):
shutil.rmtree(dir)


def file_delete(file):
if os.path.exists(file):
os.remove(file)
Expand All @@ -26,7 +29,7 @@ def file_gzip(file_path, gzip_path):
with open(file_path, 'rb') as f_in, gzip.open(gzip_path, 'wb') as f_out:
f_out.write(f_in.read())
size_after = os.path.getsize(gzip_path)
print('Gzipped ' + file_path + ': ' + str(size_before) + ' => ' + str(size_after) + ' bytes')
print(f'Gzipped {file_path}: {size_before} => {size_after} bytes')


def file_write_bin(file, data):
Expand Down Expand Up @@ -80,80 +83,34 @@ def build_frontend(source, target, env):
os.chdir('..')

# Shorten all the filenames in the data/www/_app/immutable directory.
fileIndex = 0
copy_actions = []
rename_actions = []
renamed_filenames = []
for root, dirs, files in os.walk('frontend/build'):
for root, _, files in os.walk('frontend/build'):
root = root.replace('\\', '/')
newroot = root.replace('frontend/build', 'data/www')
isImmutable = '_app/immutable' in root

for filename in files:
filepath = os.path.join(root, filename)

if isImmutable:
newfilename = str(fileIndex) + '.' + filename.split('.')[-1]
renamed_filenames.append((filename, newfilename))
newfilepath = os.path.join(newroot, newfilename)
fileIndex += 1
else:
newfilepath = os.path.join(newroot, filename)

# Skip formatting binary files.
ext = filename.split('.')[-1]
if (
ext == 'png'
or ext == 'jpg'
or ext == 'jpeg'
or ext == 'gif'
or ext == 'ico'
or ext == 'svg'
or ext == 'ttf'
or ext == 'woff'
or ext == 'woff2'
or ext == 'gz'
):
copy_actions.append((filepath, newfilepath))
else:
rename_actions.append((filepath, newfilepath))
newfilepath = os.path.join(newroot, filename)

copy_actions.append((filepath, newfilepath))

# Delete the data/www directory if it exists.
if os.path.exists('data/www'):
shutil.rmtree('data/www')
dir_delete('data/www')

for src_path, dst_path in copy_actions:
dir_ensure(os.path.dirname(dst_path))
if src_path.endswith('.gz'):
# Don't gzip files that are already gzipped.
shutil.copyfile(src_path, dst_path)
else:
file_gzip(src_path, dst_path + '.gz')

for src_path, dst_path in rename_actions:
dir_ensure(os.path.dirname(dst_path))
file_delete(dst_path)
(s, enc) = file_read_text(src_path)
if s == None:
print('Error reading ' + src_path)
continue

for old, new in renamed_filenames:
s = s.replace(old, new)

try:
if enc == 'utf-8':
s = s.encode('utf-8')
else:
s = s.encode('ascii')
except Exception as e:
print('Error encoding ' + src_path + ': ' + str(e))
continue

with gzip.open(dst_path + '.gz', 'wb') as f_out:
f_out.write(s)

def hash_file(filepath):
md5 = hashlib.md5()
sha1 = hashlib.sha1()
sha256 = hashlib.sha256()

def hash_file_update(md5, sha1, sha256, filepath):
with open(filepath, 'rb') as f:
while True:
chunk = f.read(65536)
Expand All @@ -163,33 +120,25 @@ def hash_file_update(md5, sha1, sha256, filepath):
sha1.update(chunk)
sha256.update(chunk)


def hash_file(filepath):
hashMd5 = hashlib.md5()
hashSha1 = hashlib.sha1()
hashSha256 = hashlib.sha256()

hash_file_update(hashMd5, hashSha1, hashSha256, filepath)

return {
'MD5': hashMd5.hexdigest(),
'SHA1': hashSha1.hexdigest(),
'SHA256': hashSha256.hexdigest(),
'MD5': md5.hexdigest(),
'SHA1': sha1.hexdigest(),
'SHA256': sha256.hexdigest(),
}


def process_littlefs_binary(source, target, env):
nTargets = len(target)
if nTargets != 1:
raise Exception('Expected 1 target, got ' + str(nTargets))
raise Exception(f'Expected 1 target, got {nTargets}')

# Get the path to the binary and its directory.
littlefs_path = target[0].get_abspath()

bin_size = os.path.getsize(littlefs_path)
bin_hashes = hash_file(littlefs_path)

print('FileSystem Size: ' + str(bin_size) + ' bytes')
print(f'FileSystem Size: {bin_size} bytes')
print('FileSystem Hashes:')
print('MD5: ' + bin_hashes['MD5'])
print('SHA1: ' + bin_hashes['SHA1'])
Expand Down

0 comments on commit 7b1df43

Please sign in to comment.