12
12
13
13
import os
14
14
15
- from contextlib import contextmanager
16
-
17
- import celery
18
15
import elasticsearch
19
16
import packaging .version
20
17
import pretend
21
18
import pytest
22
- import redis
23
19
24
20
from first import first
25
21
@@ -134,11 +130,6 @@ def __init__(self):
134
130
self .indices = FakeESIndices ()
135
131
136
132
137
- @contextmanager
138
- def _not_lock (* a , ** kw ):
139
- yield True
140
-
141
-
142
133
class TestReindex :
143
134
144
135
def test_fails_when_raising (self , db_request , monkeypatch ):
@@ -153,7 +144,6 @@ def project_docs(db):
153
144
project_docs ,
154
145
)
155
146
156
- task = pretend .stub ()
157
147
es_client = FakeESClient ()
158
148
159
149
db_request .registry .update (
@@ -163,10 +153,6 @@ def project_docs(db):
163
153
},
164
154
)
165
155
166
- db_request .registry .settings = {
167
- "celery.scheduler_url" : "redis://redis:6379/0" ,
168
- }
169
-
170
156
class TestException (Exception ):
171
157
pass
172
158
@@ -175,47 +161,20 @@ def parallel_bulk(client, iterable):
175
161
assert iterable is docs
176
162
raise TestException
177
163
178
- monkeypatch .setattr (
179
- redis .StrictRedis , "from_url" ,
180
- lambda * a , ** kw : pretend .stub (lock = _not_lock ))
181
-
182
164
monkeypatch .setattr (
183
165
warehouse .search .tasks , "parallel_bulk" , parallel_bulk )
184
166
185
167
monkeypatch .setattr (os , "urandom" , lambda n : b"\xcb " * n )
186
168
187
169
with pytest .raises (TestException ):
188
- reindex (task , db_request )
170
+ reindex (db_request )
189
171
190
172
assert es_client .indices .delete .calls == [
191
173
pretend .call (index = 'warehouse-cbcbcbcbcb' ),
192
174
]
193
175
assert es_client .indices .put_settings .calls == []
194
176
assert es_client .indices .forcemerge .calls == []
195
177
196
- def test_retry_on_lock (self , db_request , monkeypatch ):
197
- task = pretend .stub (
198
- retry = pretend .call_recorder (
199
- pretend .raiser (celery .exceptions .Retry )
200
- )
201
- )
202
-
203
- db_request .registry .settings = {
204
- "celery.scheduler_url" : "redis://redis:6379/0" ,
205
- }
206
-
207
- le = redis .exceptions .LockError ()
208
- monkeypatch .setattr (
209
- redis .StrictRedis , "from_url" ,
210
- lambda * a , ** kw : pretend .stub (lock = pretend .raiser (le )))
211
-
212
- with pytest .raises (celery .exceptions .Retry ):
213
- reindex (task , db_request )
214
-
215
- assert task .retry .calls == [
216
- pretend .call (countdown = 60 , exc = le )
217
- ]
218
-
219
178
def test_successfully_indexes_and_adds_new (self , db_request , monkeypatch ):
220
179
221
180
docs = pretend .stub ()
@@ -229,7 +188,6 @@ def project_docs(db):
229
188
project_docs ,
230
189
)
231
190
232
- task = pretend .stub ()
233
191
es_client = FakeESClient ()
234
192
235
193
db_request .registry .update (
@@ -240,21 +198,13 @@ def project_docs(db):
240
198
}
241
199
)
242
200
243
- db_request .registry .settings = {
244
- "celery.scheduler_url" : "redis://redis:6379/0" ,
245
- }
246
-
247
- monkeypatch .setattr (
248
- redis .StrictRedis , "from_url" ,
249
- lambda * a , ** kw : pretend .stub (lock = _not_lock ))
250
-
251
201
parallel_bulk = pretend .call_recorder (lambda client , iterable : [None ])
252
202
monkeypatch .setattr (
253
203
warehouse .search .tasks , "parallel_bulk" , parallel_bulk )
254
204
255
205
monkeypatch .setattr (os , "urandom" , lambda n : b"\xcb " * n )
256
206
257
- reindex (task , db_request )
207
+ reindex (db_request )
258
208
259
209
assert parallel_bulk .calls == [pretend .call (es_client , docs )]
260
210
assert es_client .indices .create .calls == [
@@ -301,7 +251,6 @@ def project_docs(db):
301
251
project_docs ,
302
252
)
303
253
304
- task = pretend .stub ()
305
254
es_client = FakeESClient ()
306
255
es_client .indices .indices ["warehouse-aaaaaaaaaa" ] = None
307
256
es_client .indices .aliases ["warehouse" ] = ["warehouse-aaaaaaaaaa" ]
@@ -316,21 +265,13 @@ def project_docs(db):
316
265
},
317
266
)
318
267
319
- db_request .registry .settings = {
320
- "celery.scheduler_url" : "redis://redis:6379/0" ,
321
- }
322
-
323
- monkeypatch .setattr (
324
- redis .StrictRedis , "from_url" ,
325
- lambda * a , ** kw : pretend .stub (lock = _not_lock ))
326
-
327
268
parallel_bulk = pretend .call_recorder (lambda client , iterable : [None ])
328
269
monkeypatch .setattr (
329
270
warehouse .search .tasks , "parallel_bulk" , parallel_bulk )
330
271
331
272
monkeypatch .setattr (os , "urandom" , lambda n : b"\xcb " * n )
332
273
333
- reindex (task , db_request )
274
+ reindex (db_request )
334
275
335
276
assert parallel_bulk .calls == [pretend .call (es_client , docs )]
336
277
assert es_client .indices .create .calls == [
@@ -382,7 +323,6 @@ def project_docs(db, project_name=None):
382
323
project_docs ,
383
324
)
384
325
385
- task = pretend .stub ()
386
326
es_client = FakeESClient ()
387
327
388
328
db_request .registry .update (
@@ -392,10 +332,6 @@ def project_docs(db, project_name=None):
392
332
},
393
333
)
394
334
395
- db_request .registry .settings = {
396
- "celery.scheduler_url" : "redis://redis:6379/0" ,
397
- }
398
-
399
335
class TestException (Exception ):
400
336
pass
401
337
@@ -404,24 +340,19 @@ def parallel_bulk(client, iterable):
404
340
assert iterable is docs
405
341
raise TestException
406
342
407
- monkeypatch .setattr (
408
- redis .StrictRedis , "from_url" ,
409
- lambda * a , ** kw : pretend .stub (lock = _not_lock ))
410
-
411
343
monkeypatch .setattr (
412
344
warehouse .search .tasks , "parallel_bulk" , parallel_bulk )
413
345
414
346
with pytest .raises (TestException ):
415
- reindex_project (task , db_request , 'foo' )
347
+ reindex_project (db_request , 'foo' )
416
348
417
349
assert es_client .indices .put_settings .calls == []
418
350
assert es_client .indices .forcemerge .calls == []
419
351
420
- def test_unindex_fails_when_raising (self , db_request , monkeypatch ):
352
+ def test_unindex_fails_when_raising (self , db_request ):
421
353
class TestException (Exception ):
422
354
pass
423
355
424
- task = pretend .stub ()
425
356
es_client = FakeESClient ()
426
357
es_client .delete = pretend .raiser (TestException )
427
358
@@ -432,65 +363,10 @@ class TestException(Exception):
432
363
},
433
364
)
434
365
435
- db_request .registry .settings = {
436
- "celery.scheduler_url" : "redis://redis:6379/0" ,
437
- }
438
-
439
- monkeypatch .setattr (
440
- redis .StrictRedis , "from_url" ,
441
- lambda * a , ** kw : pretend .stub (lock = _not_lock ))
442
-
443
366
with pytest .raises (TestException ):
444
- unindex_project (task , db_request , 'foo' )
445
-
446
- def test_unindex_retry_on_lock (self , db_request , monkeypatch ):
447
- task = pretend .stub (
448
- retry = pretend .call_recorder (
449
- pretend .raiser (celery .exceptions .Retry )
450
- )
451
- )
452
-
453
- db_request .registry .settings = {
454
- "celery.scheduler_url" : "redis://redis:6379/0" ,
455
- }
456
-
457
- le = redis .exceptions .LockError ()
458
- monkeypatch .setattr (
459
- redis .StrictRedis , "from_url" ,
460
- lambda * a , ** kw : pretend .stub (lock = pretend .raiser (le )))
461
-
462
- with pytest .raises (celery .exceptions .Retry ):
463
- unindex_project (task , db_request , "foo" )
464
-
465
- assert task .retry .calls == [
466
- pretend .call (countdown = 60 , exc = le )
467
- ]
468
-
469
- def test_reindex_retry_on_lock (self , db_request , monkeypatch ):
470
- task = pretend .stub (
471
- retry = pretend .call_recorder (
472
- pretend .raiser (celery .exceptions .Retry )
473
- )
474
- )
367
+ unindex_project (db_request , 'foo' )
475
368
476
- db_request .registry .settings = {
477
- "celery.scheduler_url" : "redis://redis:6379/0" ,
478
- }
479
-
480
- le = redis .exceptions .LockError ()
481
- monkeypatch .setattr (
482
- redis .StrictRedis , "from_url" ,
483
- lambda * a , ** kw : pretend .stub (lock = pretend .raiser (le )))
484
-
485
- with pytest .raises (celery .exceptions .Retry ):
486
- reindex_project (task , db_request , "foo" )
487
-
488
- assert task .retry .calls == [
489
- pretend .call (countdown = 60 , exc = le )
490
- ]
491
-
492
- def test_unindex_accepts_defeat (self , db_request , monkeypatch ):
493
- task = pretend .stub ()
369
+ def test_unindex_accepts_defeat (self , db_request ):
494
370
es_client = FakeESClient ()
495
371
es_client .delete = pretend .call_recorder (
496
372
pretend .raiser (elasticsearch .exceptions .NotFoundError ))
@@ -502,15 +378,7 @@ def test_unindex_accepts_defeat(self, db_request, monkeypatch):
502
378
},
503
379
)
504
380
505
- db_request .registry .settings = {
506
- "celery.scheduler_url" : "redis://redis:6379/0" ,
507
- }
508
-
509
- monkeypatch .setattr (
510
- redis .StrictRedis , "from_url" ,
511
- lambda * a , ** kw : pretend .stub (lock = _not_lock ))
512
-
513
- unindex_project (task , db_request , 'foo' )
381
+ unindex_project (db_request , 'foo' )
514
382
515
383
assert es_client .delete .calls == [
516
384
pretend .call (index = "warehouse" , doc_type = "project" , id = "foo" )
@@ -528,7 +396,6 @@ def project_docs(db, project_name=None):
528
396
project_docs ,
529
397
)
530
398
531
- task = pretend .stub ()
532
399
es_client = FakeESClient ()
533
400
es_client .indices .indices ["warehouse-aaaaaaaaaa" ] = None
534
401
es_client .indices .aliases ["warehouse" ] = ["warehouse-aaaaaaaaaa" ]
@@ -543,19 +410,11 @@ def project_docs(db, project_name=None):
543
410
},
544
411
)
545
412
546
- db_request .registry .settings = {
547
- "celery.scheduler_url" : "redis://redis:6379/0" ,
548
- }
549
-
550
- monkeypatch .setattr (
551
- redis .StrictRedis , "from_url" ,
552
- lambda * a , ** kw : pretend .stub (lock = _not_lock ))
553
-
554
413
parallel_bulk = pretend .call_recorder (lambda client , iterable : [None ])
555
414
monkeypatch .setattr (
556
415
warehouse .search .tasks , "parallel_bulk" , parallel_bulk )
557
416
558
- reindex_project (task , db_request , 'foo' )
417
+ reindex_project (db_request , 'foo' )
559
418
560
419
assert parallel_bulk .calls == [pretend .call (es_client , docs )]
561
420
assert es_client .indices .create .calls == []
0 commit comments