12
12
13
13
import os
14
14
15
+ import celery
15
16
import elasticsearch
16
17
import packaging .version
17
18
import pretend
18
19
import pytest
20
+ import redis
19
21
20
22
from first import first
21
23
25
27
reindex_project ,
26
28
unindex_project ,
27
29
_project_docs ,
30
+ SearchLock ,
28
31
)
29
32
30
33
@@ -125,6 +128,34 @@ def __init__(self):
125
128
self .indices = FakeESIndices ()
126
129
127
130
131
+ class NotLock :
132
+ def __init__ (* a , ** kw ):
133
+ pass
134
+
135
+ def acquire (self ):
136
+ return True
137
+
138
+ def release (self ):
139
+ return True
140
+
141
+
142
+ class TestSearchLock :
143
+ def test_success (self ):
144
+ lock_stub = pretend .stub (acquire = pretend .call_recorder (lambda : True ))
145
+ r = pretend .stub (lock = lambda * a , ** kw : lock_stub )
146
+ test_lock = SearchLock (r )
147
+ test_lock .__enter__ ()
148
+ assert lock_stub .acquire .calls == [pretend .call ()]
149
+
150
+ def test_failure (self ):
151
+ lock_stub = pretend .stub (acquire = pretend .call_recorder (lambda : False ))
152
+ r = pretend .stub (lock = lambda * a , ** kw : lock_stub )
153
+ test_lock = SearchLock (r )
154
+ with pytest .raises (redis .exceptions .LockError ):
155
+ test_lock .__enter__ ()
156
+ assert lock_stub .acquire .calls == [pretend .call ()]
157
+
158
+
128
159
class TestReindex :
129
160
def test_fails_when_raising (self , db_request , monkeypatch ):
130
161
docs = pretend .stub ()
@@ -134,10 +165,14 @@ def project_docs(db):
134
165
135
166
monkeypatch .setattr (warehouse .search .tasks , "_project_docs" , project_docs )
136
167
168
+ task = pretend .stub ()
137
169
es_client = FakeESClient ()
138
170
139
171
db_request .registry .update ({"elasticsearch.index" : "warehouse" })
140
- db_request .registry .settings = {"elasticsearch.url" : "http://some.url" }
172
+ db_request .registry .settings = {
173
+ "elasticsearch.url" : "http://some.url" ,
174
+ "celery.scheduler_url" : "redis://redis:6379/0" ,
175
+ }
141
176
monkeypatch .setattr (
142
177
warehouse .search .tasks .elasticsearch ,
143
178
"Elasticsearch" ,
@@ -153,18 +188,41 @@ def parallel_bulk(client, iterable, index=None):
153
188
assert index == "warehouse-cbcbcbcbcb"
154
189
raise TestException
155
190
191
+ monkeypatch .setattr (
192
+ redis .StrictRedis , "from_url" , lambda * a , ** kw : pretend .stub (lock = NotLock )
193
+ )
194
+
156
195
monkeypatch .setattr (warehouse .search .tasks , "parallel_bulk" , parallel_bulk )
157
196
158
197
monkeypatch .setattr (os , "urandom" , lambda n : b"\xcb " * n )
159
198
160
199
with pytest .raises (TestException ):
161
- reindex (db_request )
200
+ reindex (task , db_request )
162
201
163
202
assert es_client .indices .delete .calls == [
164
203
pretend .call (index = "warehouse-cbcbcbcbcb" )
165
204
]
166
205
assert es_client .indices .put_settings .calls == []
167
206
207
+ def test_retry_on_lock (self , db_request , monkeypatch ):
208
+ task = pretend .stub (
209
+ retry = pretend .call_recorder (pretend .raiser (celery .exceptions .Retry ))
210
+ )
211
+
212
+ db_request .registry .settings = {"celery.scheduler_url" : "redis://redis:6379/0" }
213
+
214
+ le = redis .exceptions .LockError ()
215
+ monkeypatch .setattr (
216
+ redis .StrictRedis ,
217
+ "from_url" ,
218
+ lambda * a , ** kw : pretend .stub (lock = pretend .raiser (le )),
219
+ )
220
+
221
+ with pytest .raises (celery .exceptions .Retry ):
222
+ reindex (task , db_request )
223
+
224
+ assert task .retry .calls == [pretend .call (countdown = 60 , exc = le )]
225
+
168
226
def test_successfully_indexes_and_adds_new (self , db_request , monkeypatch ):
169
227
170
228
docs = pretend .stub ()
@@ -174,24 +232,31 @@ def project_docs(db):
174
232
175
233
monkeypatch .setattr (warehouse .search .tasks , "_project_docs" , project_docs )
176
234
235
+ task = pretend .stub ()
177
236
es_client = FakeESClient ()
178
237
179
238
db_request .registry .update (
180
239
{"elasticsearch.index" : "warehouse" , "elasticsearch.shards" : 42 }
181
240
)
182
- db_request .registry .settings = {"elasticsearch.url" : "http://some.url" }
241
+ db_request .registry .settings = {
242
+ "elasticsearch.url" : "http://some.url" ,
243
+ "celery.scheduler_url" : "redis://redis:6379/0" ,
244
+ }
183
245
monkeypatch .setattr (
184
246
warehouse .search .tasks .elasticsearch ,
185
247
"Elasticsearch" ,
186
248
lambda * a , ** kw : es_client ,
187
249
)
250
+ monkeypatch .setattr (
251
+ redis .StrictRedis , "from_url" , lambda * a , ** kw : pretend .stub (lock = NotLock )
252
+ )
188
253
189
254
parallel_bulk = pretend .call_recorder (lambda client , iterable , index : [None ])
190
255
monkeypatch .setattr (warehouse .search .tasks , "parallel_bulk" , parallel_bulk )
191
256
192
257
monkeypatch .setattr (os , "urandom" , lambda n : b"\xcb " * n )
193
258
194
- reindex (db_request )
259
+ reindex (task , db_request )
195
260
196
261
assert parallel_bulk .calls == [
197
262
pretend .call (es_client , docs , index = "warehouse-cbcbcbcbcb" )
@@ -220,6 +285,7 @@ def project_docs(db):
220
285
221
286
def test_successfully_indexes_and_replaces (self , db_request , monkeypatch ):
222
287
docs = pretend .stub ()
288
+ task = pretend .stub ()
223
289
224
290
def project_docs (db ):
225
291
return docs
@@ -238,19 +304,25 @@ def project_docs(db):
238
304
"sqlalchemy.engine" : db_engine ,
239
305
}
240
306
)
241
- db_request .registry .settings = {"elasticsearch.url" : "http://some.url" }
307
+ db_request .registry .settings = {
308
+ "elasticsearch.url" : "http://some.url" ,
309
+ "celery.scheduler_url" : "redis://redis:6379/0" ,
310
+ }
242
311
monkeypatch .setattr (
243
312
warehouse .search .tasks .elasticsearch ,
244
313
"Elasticsearch" ,
245
314
lambda * a , ** kw : es_client ,
246
315
)
316
+ monkeypatch .setattr (
317
+ redis .StrictRedis , "from_url" , lambda * a , ** kw : pretend .stub (lock = NotLock )
318
+ )
247
319
248
320
parallel_bulk = pretend .call_recorder (lambda client , iterable , index : [None ])
249
321
monkeypatch .setattr (warehouse .search .tasks , "parallel_bulk" , parallel_bulk )
250
322
251
323
monkeypatch .setattr (os , "urandom" , lambda n : b"\xcb " * n )
252
324
253
- reindex (db_request )
325
+ reindex (task , db_request )
254
326
255
327
assert parallel_bulk .calls == [
256
328
pretend .call (es_client , docs , index = "warehouse-cbcbcbcbcb" )
@@ -281,6 +353,7 @@ def project_docs(db):
281
353
class TestPartialReindex :
282
354
def test_reindex_fails_when_raising (self , db_request , monkeypatch ):
283
355
docs = pretend .stub ()
356
+ task = pretend .stub ()
284
357
285
358
def project_docs (db , project_name = None ):
286
359
return docs
@@ -302,44 +375,96 @@ def parallel_bulk(client, iterable, index=None):
302
375
raise TestException
303
376
304
377
monkeypatch .setattr (warehouse .search .tasks , "parallel_bulk" , parallel_bulk )
378
+ monkeypatch .setattr (
379
+ redis .StrictRedis , "from_url" , lambda * a , ** kw : pretend .stub (lock = NotLock )
380
+ )
305
381
306
382
with pytest .raises (TestException ):
307
- reindex_project (db_request , "foo" )
383
+ reindex_project (task , db_request , "foo" )
308
384
309
385
assert es_client .indices .put_settings .calls == []
310
386
311
- def test_unindex_fails_when_raising (self , db_request ):
387
+ def test_unindex_fails_when_raising (self , db_request , monkeypatch ):
388
+ task = pretend .stub ()
389
+
312
390
class TestException (Exception ):
313
391
pass
314
392
315
393
es_client = FakeESClient ()
316
394
es_client .delete = pretend .raiser (TestException )
395
+ monkeypatch .setattr (
396
+ redis .StrictRedis , "from_url" , lambda * a , ** kw : pretend .stub (lock = NotLock )
397
+ )
317
398
318
399
db_request .registry .update (
319
400
{"elasticsearch.client" : es_client , "elasticsearch.index" : "warehouse" }
320
401
)
321
402
322
403
with pytest .raises (TestException ):
323
- unindex_project (db_request , "foo" )
404
+ unindex_project (task , db_request , "foo" )
405
+
406
+ def test_unindex_accepts_defeat (self , db_request , monkeypatch ):
407
+ task = pretend .stub ()
324
408
325
- def test_unindex_accepts_defeat (self , db_request ):
326
409
es_client = FakeESClient ()
327
410
es_client .delete = pretend .call_recorder (
328
411
pretend .raiser (elasticsearch .exceptions .NotFoundError )
329
412
)
413
+ monkeypatch .setattr (
414
+ redis .StrictRedis , "from_url" , lambda * a , ** kw : pretend .stub (lock = NotLock )
415
+ )
330
416
331
417
db_request .registry .update (
332
418
{"elasticsearch.client" : es_client , "elasticsearch.index" : "warehouse" }
333
419
)
334
420
335
- unindex_project (db_request , "foo" )
421
+ unindex_project (task , db_request , "foo" )
336
422
337
423
assert es_client .delete .calls == [
338
424
pretend .call (index = "warehouse" , doc_type = "doc" , id = "foo" )
339
425
]
340
426
427
+ def test_unindex_retry_on_lock (self , db_request , monkeypatch ):
428
+ task = pretend .stub (
429
+ retry = pretend .call_recorder (pretend .raiser (celery .exceptions .Retry ))
430
+ )
431
+
432
+ db_request .registry .settings = {"celery.scheduler_url" : "redis://redis:6379/0" }
433
+
434
+ le = redis .exceptions .LockError ()
435
+ monkeypatch .setattr (
436
+ redis .StrictRedis ,
437
+ "from_url" ,
438
+ lambda * a , ** kw : pretend .stub (lock = pretend .raiser (le )),
439
+ )
440
+
441
+ with pytest .raises (celery .exceptions .Retry ):
442
+ unindex_project (task , db_request , "foo" )
443
+
444
+ assert task .retry .calls == [pretend .call (countdown = 60 , exc = le )]
445
+
446
+ def test_reindex_retry_on_lock (self , db_request , monkeypatch ):
447
+ task = pretend .stub (
448
+ retry = pretend .call_recorder (pretend .raiser (celery .exceptions .Retry ))
449
+ )
450
+
451
+ db_request .registry .settings = {"celery.scheduler_url" : "redis://redis:6379/0" }
452
+
453
+ le = redis .exceptions .LockError ()
454
+ monkeypatch .setattr (
455
+ redis .StrictRedis ,
456
+ "from_url" ,
457
+ lambda * a , ** kw : pretend .stub (lock = pretend .raiser (le )),
458
+ )
459
+
460
+ with pytest .raises (celery .exceptions .Retry ):
461
+ reindex_project (task , db_request , "foo" )
462
+
463
+ assert task .retry .calls == [pretend .call (countdown = 60 , exc = le )]
464
+
341
465
def test_successfully_indexes (self , db_request , monkeypatch ):
342
466
docs = pretend .stub ()
467
+ task = pretend .stub ()
343
468
344
469
def project_docs (db , project_name = None ):
345
470
return docs
@@ -364,8 +489,11 @@ def project_docs(db, project_name=None):
364
489
lambda client , iterable , index = None : [None ]
365
490
)
366
491
monkeypatch .setattr (warehouse .search .tasks , "parallel_bulk" , parallel_bulk )
492
+ monkeypatch .setattr (
493
+ redis .StrictRedis , "from_url" , lambda * a , ** kw : pretend .stub (lock = NotLock )
494
+ )
367
495
368
- reindex_project (db_request , "foo" )
496
+ reindex_project (task , db_request , "foo" )
369
497
370
498
assert parallel_bulk .calls == [pretend .call (es_client , docs , index = "warehouse" )]
371
499
assert es_client .indices .create .calls == []
0 commit comments