Skip to content

Commit 89027ef

Browse files
authored
quickfix: delete seedfile after the workflow has been deleted (#2763)
Since seedfile deletion checks that the seedfile is not used in any workflow, it should be deleted after the workflow is removed. noticed in checking #2744
1 parent 309977f commit 89027ef

File tree

2 files changed

+13
-9
lines changed

2 files changed

+13
-9
lines changed

backend/btrixcloud/crawlconfigs.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -976,21 +976,21 @@ async def make_inactive_or_delete(
976976

977977
# if no crawls have been run, actually delete
978978
if not crawlconfig.crawlAttemptCount:
979-
if crawlconfig.config and crawlconfig.config.seedFileId:
980-
try:
981-
await self.file_ops.delete_seed_file(
982-
crawlconfig.config.seedFileId, org
983-
)
984-
except HTTPException:
985-
pass
986-
987979
result = await self.crawl_configs.delete_one(
988980
{"_id": crawlconfig.id, "oid": crawlconfig.oid}
989981
)
990982

991983
if result.deleted_count != 1:
992984
raise HTTPException(status_code=404, detail="failed_to_delete")
993985

986+
if crawlconfig and crawlconfig.config.seedFileId:
987+
try:
988+
await self.file_ops.delete_seed_file(
989+
crawlconfig.config.seedFileId, org
990+
)
991+
except HTTPException:
992+
pass
993+
994994
status = "deleted"
995995

996996
else:

backend/test/test_stop_cancel_crawl.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,11 @@ def test_stop_crawl_partial(
181181
def test_crawl_with_hostname(default_org_id, crawler_auth_headers):
182182
r = requests.get(
183183
f"{API_PREFIX}/orgs/{default_org_id}/crawls/{crawl_id}/replay.json",
184-
headers={"X-Forwarded-Proto": "https", "host": "custom-domain.example.com", **crawler_auth_headers},
184+
headers={
185+
"X-Forwarded-Proto": "https",
186+
"host": "custom-domain.example.com",
187+
**crawler_auth_headers,
188+
},
185189
)
186190
assert r.status_code == 200
187191
assert r.json()["pagesQueryUrl"].startswith("https://custom-domain.example.com/")

0 commit comments

Comments
 (0)