-
-
Notifications
You must be signed in to change notification settings - Fork 287
/
Copy pathpatch.py
147 lines (109 loc) · 4.72 KB
/
patch.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
import hashlib
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
Import: Any = None
env: Any = {}
import glob
import gzip
from os import makedirs, remove, rename
from os.path import basename, dirname, exists, isfile, join
Import("env") # type: ignore
FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoespressif32")
patchflag_path = join(FRAMEWORK_DIR, ".patched")
board_mcu = env.BoardConfig()
mcu = board_mcu.get("build.mcu", "")
# patch file only if we didn't do it befored
if not isfile(join(FRAMEWORK_DIR, ".patched")):
original_file = join(FRAMEWORK_DIR, "tools", "sdk", mcu, "lib", "libnet80211.a")
patched_file = join(
FRAMEWORK_DIR, "tools", "sdk", mcu, "lib", "libnet80211.a.patched"
)
env.Execute(
"pio pkg exec -p toolchain-xtensa-%s -- xtensa-%s-elf-objcopy --weaken-symbol=s %s %s"
% (mcu, mcu, original_file, patched_file)
)
if isfile("%s.old" % (original_file)):
remove("%s.old" % (original_file))
rename(original_file, "%s.old" % (original_file))
env.Execute(
"pio pkg exec -p toolchain-xtensa-%s -- xtensa-%s-elf-objcopy --weaken-symbol=ieee80211_raw_frame_sanity_check %s %s"
% (mcu, mcu, patched_file, original_file)
)
def _touch(path):
with open(path, "w") as fp:
fp.write("")
env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
def hash_file(file_path):
"""Generate SHA-256 hash for a single file."""
hasher = hashlib.sha256()
with open(file_path, "rb") as f:
# Read the file in chunks to avoid memory issues
for chunk in iter(lambda: f.read(4096), b""):
hasher.update(chunk)
return hasher.hexdigest()
def hash_files(file_paths):
"""Generate a combined hash for multiple files."""
combined_hash = hashlib.sha256()
for file_path in file_paths:
file_hash = hash_file(file_path)
combined_hash.update(file_hash.encode("utf-8")) # Update with the file's hash
return combined_hash.hexdigest()
def save_checksum_file(hash_value, output_file):
"""Save the hash value to a specified output file."""
with open(output_file, "w") as f:
f.write(hash_value)
def load_checksum_file(input_file):
"""Load the hash value from a specified input file."""
with open(input_file, "r") as f:
return f.readline().strip()
# gzip web files
def prepare_www_files():
HEADER_FILE = join(env.get("PROJECT_DIR"), "include", "webFiles.h")
filetypes_to_gzip = ["html", "css", "js"]
data_src_dir = join(env.get("PROJECT_DIR"), "embedded_resources/web_interface")
checksum_file = join(data_src_dir, "checksum.sha256")
checksum = ""
if not exists(data_src_dir):
print(f'Error: Source directory "{data_src_dir}" does not exist!')
return
if exists(checksum_file):
checksum = load_checksum_file(checksum_file)
files_to_gzip = []
for extension in filetypes_to_gzip:
files_to_gzip.extend(glob.glob(join(data_src_dir, "*." + extension)))
files_checksum = hash_files(files_to_gzip)
if files_checksum == checksum:
print("[GZIP & EMBED INTO HEADER] - Nothing to process.")
return
print(f"[GZIP & EMBED INTO HEADER] - Processing {len(files_to_gzip)} files.")
makedirs(dirname(HEADER_FILE), exist_ok=True)
with open(HEADER_FILE, "w") as header:
header.write(
"#ifndef WEB_FILES_H\n#define WEB_FILES_H\n\n#include <Arduino.h>\n\n"
)
header.write(
"// THIS FILE IS AUTOGENERATED DO NOT MODIFY IT. MODIFY FILES IN /embedded_resources/web_interface\n\n"
)
for file in files_to_gzip:
gz_file = file + ".gz"
with open(file, "rb") as src, gzip.open(gz_file, "wb") as dst:
dst.writelines(src)
with open(gz_file, "rb") as gz:
compressed_data = gz.read()
var_name = basename(file).replace(".", "_")
header.write(f"const char {var_name}[] PROGMEM = {{\n")
# Write hex values, inserting a newline every 15 bytes
for i in range(0, len(compressed_data), 15):
hex_chunk = ", ".join(
f"0x{byte:02X}" for byte in compressed_data[i : i + 15]
)
header.write(f" {hex_chunk},\n")
header.write("};\n\n")
header.write(
f"const uint32_t {var_name}_size = {len(compressed_data)};\n\n"
)
remove(gz_file) # Clean up temporary gzip file
header.write("#endif // WEB_FILES_H\n")
save_checksum_file(files_checksum, checksum_file)
print(f"[DONE] Gzipped files embedded into {HEADER_FILE}")
prepare_www_files()