Skip to content

Commit 00f4944

Browse files
authored
Refactor async_delete mapping and filter logic for improved clarity and accuracy (#13908)
1 parent b18d062 commit 00f4944

1 file changed

Lines changed: 19 additions & 19 deletions

File tree

dojo/utils.py

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1987,20 +1987,20 @@ class async_delete:
19871987
def __init__(self, *args, **kwargs):
19881988
self.mapping = {
19891989
"Product_Type": [
1990-
(Endpoint, "product__prod_type"),
1991-
(Finding, "test__engagement__product__prod_type"),
1992-
(Test, "engagement__product__prod_type"),
1993-
(Engagement, "product__prod_type"),
1994-
(Product, "prod_type")],
1990+
(Endpoint, "product__prod_type__id"),
1991+
(Finding, "test__engagement__product__prod_type__id"),
1992+
(Test, "engagement__product__prod_type__id"),
1993+
(Engagement, "product__prod_type__id"),
1994+
(Product, "prod_type__id")],
19951995
"Product": [
1996-
(Endpoint, "product"),
1997-
(Finding, "test__engagement__product"),
1998-
(Test, "engagement__product"),
1999-
(Engagement, "product")],
1996+
(Endpoint, "product__id"),
1997+
(Finding, "test__engagement__product__id"),
1998+
(Test, "engagement__product__id"),
1999+
(Engagement, "product__id")],
20002000
"Engagement": [
2001-
(Finding, "test__engagement"),
2002-
(Test, "engagement")],
2003-
"Test": [(Finding, "test")],
2001+
(Finding, "test__engagement__id"),
2002+
(Test, "engagement__id")],
2003+
"Test": [(Finding, "test__id")],
20042004
}
20052005

20062006
@dojo_async_task
@@ -2069,11 +2069,11 @@ def delete(self, obj, **kwargs):
20692069
@app.task
20702070
def crawl(self, obj, model_list, **kwargs):
20712071
logger.debug("ASYNC_DELETE: Crawling " + self.get_object_name(obj) + ": " + str(obj))
2072-
task_results = []
20732072
for model_info in model_list:
2073+
task_results = []
20742074
model = model_info[0]
20752075
model_query = model_info[1]
2076-
filter_dict = {model_query: obj}
2076+
filter_dict = {model_query: obj.id}
20772077
# Only fetch the IDs since we will make a list of IDs in the following function call
20782078
objects_to_delete = model.objects.only("id").filter(**filter_dict).distinct().order_by("id")
20792079
logger.debug("ASYNC_DELETE: Deleting " + str(len(objects_to_delete)) + " " + self.get_object_name(model) + "s in chunks")
@@ -2087,11 +2087,11 @@ def crawl(self, obj, model_list, **kwargs):
20872087
# Wait for all chunk deletions to complete (they run in parallel)
20882088
for task_result in task_results:
20892089
task_result.get(timeout=300) # 5 minute timeout per chunk
2090-
# Now delete the main object after all chunks are done
2091-
result = self.delete_chunk([obj])
2092-
# Wait for final deletion to complete
2093-
if hasattr(result, "get"):
2094-
result.get(timeout=300) # 5 minute timeout
2090+
# Now delete the main object after all chunks are done
2091+
result = self.delete_chunk([obj])
2092+
# Wait for final deletion to complete
2093+
if hasattr(result, "get"):
2094+
result.get(timeout=300) # 5 minute timeout
20952095
logger.debug("ASYNC_DELETE: Successfully deleted " + self.get_object_name(obj) + ": " + str(obj))
20962096

20972097
def chunk_list(self, model, full_list):

0 commit comments

Comments
 (0)