Skip to content

Commit cee6c88

Browse files
Merge pull request #2415 from bunkerity/dev
2 parents f249869 + acfeba4 commit cee6c88

File tree

12 files changed

+151
-174
lines changed

12 files changed

+151
-174
lines changed

.github/workflows/container-build.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ jobs:
139139
command: cves,recommendations
140140
image: local/${{ inputs.IMAGE }}
141141
only-fixed: true
142-
only-package-types: apk,pypi
142+
only-package-types: apk
143143
exit-code: true
144144
# Push image
145145
- name: Push image

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
- [BUGFIX] Ensure template defaults settings are correctly retrieved by jobs and templates.
66
- [BUGFIX] No longer completely delete all PRO plugins data upon PRO deactivation, allowing for easier reactivation without losing data.
7+
- [BUGFIX] Enhance cache robustness by using dict.get() for lookups to avoid KeyError exceptions during cache operations.
78
- [SECURITY] Make sure the files/dirs in /usr/share/bunkerweb have the appropriate permissions to prevent unauthorized access to sensitive files on Linux integration
89

910
## v1.6.2-rc5 - 2025/06/17

docs/advanced.md

Lines changed: 108 additions & 152 deletions
Large diffs are not rendered by default.

src/common/core/blacklist/jobs/blacklist-download.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -215,12 +215,16 @@ def check_line(kind: str, line: bytes) -> Tuple[bool, bytes]:
215215
if url in failed_urls:
216216
if url_file not in urls:
217217
aggregated_recap[kind]["failed_count"] += 1
218-
elif isinstance(cached_url, dict) and cached_url["last_update"] > (datetime.now().astimezone() - timedelta(hours=1)).timestamp():
218+
elif (
219+
isinstance(cached_url, dict)
220+
and cached_url.get("last_update")
221+
and cached_url["last_update"] > (datetime.now().astimezone() - timedelta(hours=1)).timestamp()
222+
):
219223
LOGGER.debug(f"URL {url} has already been downloaded less than 1 hour ago, skipping download...")
220224
if url_file not in urls:
221225
aggregated_recap[kind]["skipped_urls"] += 1
222226
# Remove first line (URL) and add to content
223-
content += b"\n".join(cached_url["data"].split(b"\n")[1:]) + b"\n"
227+
content += b"\n".join(cached_url.get("data", b"").split(b"\n")[1:]) + b"\n"
224228
else:
225229
failed = False
226230
LOGGER.info(f"Downloading blacklist data from {url} ...")

src/common/core/greylist/jobs/greylist-download.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,12 +157,16 @@ def check_line(kind: str, line: bytes) -> Tuple[bool, bytes]:
157157
if url in failed_urls:
158158
if url_file not in urls:
159159
aggregated_recap[kind]["failed_count"] += 1
160-
elif isinstance(cached_url, dict) and cached_url["last_update"] > (datetime.now().astimezone() - timedelta(hours=1)).timestamp():
160+
elif (
161+
isinstance(cached_url, dict)
162+
and cached_url.get("last_update")
163+
and cached_url["last_update"] > (datetime.now().astimezone() - timedelta(hours=1)).timestamp()
164+
):
161165
LOGGER.debug(f"URL {url} has already been downloaded less than 1 hour ago, skipping download...")
162166
if url_file not in urls:
163167
aggregated_recap[kind]["skipped_urls"] += 1
164168
# Remove first line (URL) and add to content
165-
content += b"\n".join(cached_url["data"].split(b"\n")[1:]) + b"\n"
169+
content += b"\n".join(cached_url.get("data", b"").split(b"\n")[1:]) + b"\n"
166170
else:
167171
failed = False
168172
LOGGER.info(f"Downloading greylist data from {url} ...")

src/common/core/jobs/jobs/mmdb-asn.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -75,8 +75,8 @@ def request_mmdb() -> Optional[Response]:
7575
response = request_mmdb()
7676

7777
if response and response.status_code == 200:
78-
skip_dl = response.content.find(bytes_hash(job_cache["data"], algorithm="sha1").encode()) != -1
79-
elif job_cache["last_update"] < (datetime.now().astimezone() - timedelta(weeks=1)).timestamp():
78+
skip_dl = response.content.find(bytes_hash(job_cache.get("data", b"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"), algorithm="sha1").encode()) != -1
79+
elif job_cache.get("last_update") and job_cache["last_update"] < (datetime.now().astimezone() - timedelta(weeks=1)).timestamp():
8080
LOGGER.warning("Unable to check if the cache file is the latest version from db-ip.com and file is older than 1 week, checking anyway...")
8181
skip_dl = False
8282

@@ -118,7 +118,7 @@ def request_mmdb() -> Optional[Response]:
118118
if job_cache:
119119
# Check if file has changed
120120
new_hash = file_hash(tmp_path)
121-
if new_hash == job_cache["checksum"]:
121+
if new_hash == job_cache.get("checksum"):
122122
LOGGER.info("New file is identical to cache file, reload is not needed")
123123
sys_exit(0)
124124
except BaseException as e:

src/common/core/jobs/jobs/mmdb-country.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -75,8 +75,8 @@ def request_mmdb() -> Optional[Response]:
7575
response = request_mmdb()
7676

7777
if response and response.status_code == 200:
78-
skip_dl = response.content.find(bytes_hash(job_cache["data"], algorithm="sha1").encode()) != -1
79-
elif job_cache["last_update"] < (datetime.now().astimezone() - timedelta(weeks=1)).timestamp():
78+
skip_dl = response.content.find(bytes_hash(job_cache.get("data", b"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"), algorithm="sha1").encode()) != -1
79+
elif job_cache.get("last_update") and job_cache["last_update"] < (datetime.now().astimezone() - timedelta(weeks=1)).timestamp():
8080
LOGGER.warning("Unable to check if the cache file is the latest version from db-ip.com and file is older than 1 week, checking anyway...")
8181
skip_dl = False
8282

@@ -118,7 +118,7 @@ def request_mmdb() -> Optional[Response]:
118118
if job_cache:
119119
# Check if file has changed
120120
new_hash = file_hash(tmp_path)
121-
if new_hash == job_cache["checksum"]:
121+
if new_hash == job_cache.get("checksum"):
122122
LOGGER.info("New file is identical to cache file, reload is not needed")
123123
sys_exit(0)
124124
except BaseException as e:

src/common/core/letsencrypt/jobs/certbot-new.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,11 @@
7070

7171
def load_public_suffix_list(job):
7272
job_cache = job.get_cache(PSL_STATIC_FILE, with_info=True, with_data=True)
73-
if isinstance(job_cache, dict) and job_cache["last_update"] < (datetime.now().astimezone() - timedelta(days=1)).timestamp():
73+
if (
74+
isinstance(job_cache, dict)
75+
and job_cache.get("last_update")
76+
and job_cache["last_update"] < (datetime.now().astimezone() - timedelta(days=1)).timestamp()
77+
):
7478
return job_cache["data"].decode("utf-8").splitlines()
7579

7680
try:

src/common/core/modsecurity/jobs/download-crs-plugins.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -178,11 +178,11 @@ def get_download_url(repo_url, version=None) -> Tuple[bool, str]:
178178
plugin_registry = JOB.get_cache("plugin_registry.json", with_info=True, with_data=True)
179179

180180
if isinstance(plugin_registry, dict):
181-
up_to_date = plugin_registry["last_update"] > (datetime.now().astimezone() - timedelta(hours=1)).timestamp()
181+
up_to_date = plugin_registry.get("last_update") and plugin_registry["last_update"] > (datetime.now().astimezone() - timedelta(hours=1)).timestamp()
182182

183183
if up_to_date:
184184
try:
185-
plugin_registry = loads(plugin_registry["data"])
185+
plugin_registry = loads(plugin_registry.get("data"))
186186
except BaseException as e:
187187
LOGGER.debug(format_exc())
188188
LOGGER.error(f"Failed to load the plugin registry data from cache: \n{e}")

src/common/core/realip/jobs/realip-download.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -122,12 +122,16 @@ def check_line(line):
122122
if url in failed_urls:
123123
if url_file not in urls:
124124
aggregated_recap["failed_count"] += 1
125-
elif isinstance(cached_url, dict) and cached_url["last_update"] > (datetime.now().astimezone() - timedelta(hours=1)).timestamp():
125+
elif (
126+
isinstance(cached_url, dict)
127+
and cached_url.get("last_update")
128+
and cached_url["last_update"] > (datetime.now().astimezone() - timedelta(hours=1)).timestamp()
129+
):
126130
LOGGER.debug(f"URL {url} has already been downloaded less than 1 hour ago, skipping download...")
127131
if url_file not in urls:
128132
aggregated_recap["skipped_urls"] += 1
129133
# Remove first line (URL) and add to content
130-
content += b"\n".join(cached_url["data"].split(b"\n")[1:]) + b"\n"
134+
content += b"\n".join(cached_url.get("data", b"").split(b"\n")[1:]) + b"\n"
131135
else:
132136
LOGGER.info(f"Downloading Real IP data from {url} ...")
133137
failed = False

0 commit comments

Comments
 (0)