@@ -31,18 +31,18 @@ def test_clear_dump_urls(self):
31
31
def test_create_or_update_delta_url_add (self ):
32
32
collection = CollectionFactory ()
33
33
dump_url = DumpUrlFactory (collection = collection )
34
- collection .create_or_update_delta_url (dump_url , delete = False )
34
+ collection .create_or_update_delta_url (dump_url , to_delete = False )
35
35
delta = DeltaUrl .objects .get (url = dump_url .url )
36
- assert delta .delete is False
36
+ assert delta .to_delete is False
37
37
for field in DELTA_COMPARISON_FIELDS :
38
38
assert getattr (delta , field ) == getattr (dump_url , field )
39
39
40
40
def test_create_or_update_delta_url_delete (self ):
41
41
collection = CollectionFactory ()
42
42
curated_url = CuratedUrlFactory (collection = collection )
43
- collection .create_or_update_delta_url (curated_url , delete = True )
43
+ collection .create_or_update_delta_url (curated_url , to_delete = True )
44
44
delta = DeltaUrl .objects .get (url = curated_url .url )
45
- assert delta .delete is True
45
+ assert delta .to_delete is True
46
46
assert delta .scraped_title == ""
47
47
48
48
@@ -53,7 +53,7 @@ def test_new_url_in_dump_only(self):
53
53
dump_url = DumpUrlFactory (collection = collection )
54
54
collection .migrate_dump_to_delta ()
55
55
delta = DeltaUrl .objects .get (url = dump_url .url )
56
- assert delta .delete is False
56
+ assert delta .to_delete is False
57
57
for field in DELTA_COMPARISON_FIELDS :
58
58
assert getattr (delta , field ) == getattr (dump_url , field )
59
59
@@ -63,15 +63,15 @@ def test_url_in_both_with_different_field(self):
63
63
curated_url = CuratedUrlFactory (collection = collection , url = dump_url .url , scraped_title = "Old Title" ) # noqa
64
64
collection .migrate_dump_to_delta ()
65
65
delta = DeltaUrl .objects .get (url = dump_url .url )
66
- assert delta .delete is False
66
+ assert delta .to_delete is False
67
67
assert delta .scraped_title == "New Title"
68
68
69
69
def test_url_in_curated_only (self ):
70
70
collection = CollectionFactory ()
71
71
curated_url = CuratedUrlFactory (collection = collection )
72
72
collection .migrate_dump_to_delta ()
73
73
delta = DeltaUrl .objects .get (url = curated_url .url )
74
- assert delta .delete is True
74
+ assert delta .to_delete is True
75
75
assert delta .scraped_title == ""
76
76
77
77
def test_identical_url_in_both (self ):
@@ -91,16 +91,16 @@ def test_full_migration_flow(self):
91
91
collection .migrate_dump_to_delta ()
92
92
93
93
# New URL moved to DeltaUrls
94
- assert DeltaUrl .objects .filter (url = dump_url_new .url , delete = False ).exists ()
94
+ assert DeltaUrl .objects .filter (url = dump_url_new .url , to_delete = False ).exists ()
95
95
96
96
# Updated URL moved to DeltaUrls
97
97
delta_update = DeltaUrl .objects .get (url = dump_url_update .url )
98
98
assert delta_update .scraped_title == "Updated Title"
99
- assert delta_update .delete is False
99
+ assert delta_update .to_delete is False
100
100
101
101
# Deleted URL in CuratedUrls marked as delete in DeltaUrls
102
102
delta_delete = DeltaUrl .objects .get (url = curated_url_delete .url )
103
- assert delta_delete .delete is True
103
+ assert delta_delete .to_delete is True
104
104
105
105
def test_empty_collections (self ):
106
106
collection = CollectionFactory ()
@@ -113,7 +113,7 @@ def test_partial_data_in_dump_urls(self):
113
113
collection .migrate_dump_to_delta ()
114
114
delta = DeltaUrl .objects .get (url = dump_url .url )
115
115
assert delta .scraped_title == ""
116
- assert delta .delete is False
116
+ assert delta .to_delete is False
117
117
118
118
119
119
@pytest .mark .django_db
@@ -136,11 +136,11 @@ def test_create_or_update_delta_url_idempotency(self):
136
136
dump_url = DumpUrlFactory (collection = collection )
137
137
138
138
# First call
139
- collection .create_or_update_delta_url (dump_url , delete = False )
139
+ collection .create_or_update_delta_url (dump_url , to_delete = False )
140
140
assert DeltaUrl .objects .filter (url = dump_url .url ).count () == 1
141
141
142
142
# Second call with the same data
143
- collection .create_or_update_delta_url (dump_url , delete = False )
143
+ collection .create_or_update_delta_url (dump_url , to_delete = False )
144
144
assert DeltaUrl .objects .filter (url = dump_url .url ).count () == 1 # Should still be one
145
145
146
146
@@ -157,7 +157,7 @@ def test_create_or_update_delta_url_field_copy():
157
157
division = 2 ,
158
158
)
159
159
160
- collection .create_or_update_delta_url (dump_url , delete = False )
160
+ collection .create_or_update_delta_url (dump_url , to_delete = False )
161
161
delta = DeltaUrl .objects .get (url = dump_url .url )
162
162
163
163
# Verify each field is copied correctly
@@ -174,7 +174,7 @@ def test_full_migration_new_url(self):
174
174
collection .migrate_dump_to_delta ()
175
175
176
176
# New URL should be added to DeltaUrls
177
- assert DeltaUrl .objects .filter (url = dump_url .url , delete = False ).exists ()
177
+ assert DeltaUrl .objects .filter (url = dump_url .url , to_delete = False ).exists ()
178
178
179
179
def test_full_migration_updated_url (self ):
180
180
collection = CollectionFactory ()
@@ -184,7 +184,7 @@ def test_full_migration_updated_url(self):
184
184
# URL with differing fields should be updated in DeltaUrls
185
185
delta_update = DeltaUrl .objects .get (url = dump_url .url )
186
186
assert delta_update .scraped_title == "Updated Title"
187
- assert delta_update .delete is False
187
+ assert delta_update .to_delete is False
188
188
189
189
def test_full_migration_deleted_url (self ):
190
190
collection = CollectionFactory ()
@@ -193,7 +193,7 @@ def test_full_migration_deleted_url(self):
193
193
194
194
# Missing URL in DumpUrls should be marked as delete in DeltaUrls
195
195
delta_delete = DeltaUrl .objects .get (url = curated_url .url )
196
- assert delta_delete .delete is True
196
+ assert delta_delete .to_delete is True
197
197
198
198
199
199
@pytest .mark .django_db
@@ -225,4 +225,4 @@ def test_partial_data_in_curated_urls():
225
225
# Since `scraped_title` differs (None vs "Title Exists"), it should create a DeltaUrl
226
226
delta = DeltaUrl .objects .get (url = dump_url .url )
227
227
assert delta .scraped_title == "Title Exists"
228
- assert delta .delete is False
228
+ assert delta .to_delete is False
0 commit comments