Skip to content

Commit d7f50a5

Browse files
committed
Revert "wrap search index mutation in a lock (#3700)"
This reverts commit 7a8a249.
1 parent aadc20e commit d7f50a5

File tree

2 files changed

+103
-271
lines changed

2 files changed

+103
-271
lines changed

tests/unit/search/test_tasks.py

Lines changed: 9 additions & 150 deletions
Original file line numberDiff line numberDiff line change
@@ -12,14 +12,10 @@
1212

1313
import os
1414

15-
from contextlib import contextmanager
16-
17-
import celery
1815
import elasticsearch
1916
import packaging.version
2017
import pretend
2118
import pytest
22-
import redis
2319

2420
from first import first
2521

@@ -134,11 +130,6 @@ def __init__(self):
134130
self.indices = FakeESIndices()
135131

136132

137-
@contextmanager
138-
def _not_lock(*a, **kw):
139-
yield True
140-
141-
142133
class TestReindex:
143134

144135
def test_fails_when_raising(self, db_request, monkeypatch):
@@ -153,7 +144,6 @@ def project_docs(db):
153144
project_docs,
154145
)
155146

156-
task = pretend.stub()
157147
es_client = FakeESClient()
158148

159149
db_request.registry.update(
@@ -163,10 +153,6 @@ def project_docs(db):
163153
},
164154
)
165155

166-
db_request.registry.settings = {
167-
"celery.scheduler_url": "redis://redis:6379/0",
168-
}
169-
170156
class TestException(Exception):
171157
pass
172158

@@ -175,47 +161,20 @@ def parallel_bulk(client, iterable):
175161
assert iterable is docs
176162
raise TestException
177163

178-
monkeypatch.setattr(
179-
redis.StrictRedis, "from_url",
180-
lambda *a, **kw: pretend.stub(lock=_not_lock))
181-
182164
monkeypatch.setattr(
183165
warehouse.search.tasks, "parallel_bulk", parallel_bulk)
184166

185167
monkeypatch.setattr(os, "urandom", lambda n: b"\xcb" * n)
186168

187169
with pytest.raises(TestException):
188-
reindex(task, db_request)
170+
reindex(db_request)
189171

190172
assert es_client.indices.delete.calls == [
191173
pretend.call(index='warehouse-cbcbcbcbcb'),
192174
]
193175
assert es_client.indices.put_settings.calls == []
194176
assert es_client.indices.forcemerge.calls == []
195177

196-
def test_retry_on_lock(self, db_request, monkeypatch):
197-
task = pretend.stub(
198-
retry=pretend.call_recorder(
199-
pretend.raiser(celery.exceptions.Retry)
200-
)
201-
)
202-
203-
db_request.registry.settings = {
204-
"celery.scheduler_url": "redis://redis:6379/0",
205-
}
206-
207-
le = redis.exceptions.LockError()
208-
monkeypatch.setattr(
209-
redis.StrictRedis, "from_url",
210-
lambda *a, **kw: pretend.stub(lock=pretend.raiser(le)))
211-
212-
with pytest.raises(celery.exceptions.Retry):
213-
reindex(task, db_request)
214-
215-
assert task.retry.calls == [
216-
pretend.call(countdown=60, exc=le)
217-
]
218-
219178
def test_successfully_indexes_and_adds_new(self, db_request, monkeypatch):
220179

221180
docs = pretend.stub()
@@ -229,7 +188,6 @@ def project_docs(db):
229188
project_docs,
230189
)
231190

232-
task = pretend.stub()
233191
es_client = FakeESClient()
234192

235193
db_request.registry.update(
@@ -240,21 +198,13 @@ def project_docs(db):
240198
}
241199
)
242200

243-
db_request.registry.settings = {
244-
"celery.scheduler_url": "redis://redis:6379/0",
245-
}
246-
247-
monkeypatch.setattr(
248-
redis.StrictRedis, "from_url",
249-
lambda *a, **kw: pretend.stub(lock=_not_lock))
250-
251201
parallel_bulk = pretend.call_recorder(lambda client, iterable: [None])
252202
monkeypatch.setattr(
253203
warehouse.search.tasks, "parallel_bulk", parallel_bulk)
254204

255205
monkeypatch.setattr(os, "urandom", lambda n: b"\xcb" * n)
256206

257-
reindex(task, db_request)
207+
reindex(db_request)
258208

259209
assert parallel_bulk.calls == [pretend.call(es_client, docs)]
260210
assert es_client.indices.create.calls == [
@@ -301,7 +251,6 @@ def project_docs(db):
301251
project_docs,
302252
)
303253

304-
task = pretend.stub()
305254
es_client = FakeESClient()
306255
es_client.indices.indices["warehouse-aaaaaaaaaa"] = None
307256
es_client.indices.aliases["warehouse"] = ["warehouse-aaaaaaaaaa"]
@@ -316,21 +265,13 @@ def project_docs(db):
316265
},
317266
)
318267

319-
db_request.registry.settings = {
320-
"celery.scheduler_url": "redis://redis:6379/0",
321-
}
322-
323-
monkeypatch.setattr(
324-
redis.StrictRedis, "from_url",
325-
lambda *a, **kw: pretend.stub(lock=_not_lock))
326-
327268
parallel_bulk = pretend.call_recorder(lambda client, iterable: [None])
328269
monkeypatch.setattr(
329270
warehouse.search.tasks, "parallel_bulk", parallel_bulk)
330271

331272
monkeypatch.setattr(os, "urandom", lambda n: b"\xcb" * n)
332273

333-
reindex(task, db_request)
274+
reindex(db_request)
334275

335276
assert parallel_bulk.calls == [pretend.call(es_client, docs)]
336277
assert es_client.indices.create.calls == [
@@ -382,7 +323,6 @@ def project_docs(db, project_name=None):
382323
project_docs,
383324
)
384325

385-
task = pretend.stub()
386326
es_client = FakeESClient()
387327

388328
db_request.registry.update(
@@ -392,10 +332,6 @@ def project_docs(db, project_name=None):
392332
},
393333
)
394334

395-
db_request.registry.settings = {
396-
"celery.scheduler_url": "redis://redis:6379/0",
397-
}
398-
399335
class TestException(Exception):
400336
pass
401337

@@ -404,24 +340,19 @@ def parallel_bulk(client, iterable):
404340
assert iterable is docs
405341
raise TestException
406342

407-
monkeypatch.setattr(
408-
redis.StrictRedis, "from_url",
409-
lambda *a, **kw: pretend.stub(lock=_not_lock))
410-
411343
monkeypatch.setattr(
412344
warehouse.search.tasks, "parallel_bulk", parallel_bulk)
413345

414346
with pytest.raises(TestException):
415-
reindex_project(task, db_request, 'foo')
347+
reindex_project(db_request, 'foo')
416348

417349
assert es_client.indices.put_settings.calls == []
418350
assert es_client.indices.forcemerge.calls == []
419351

420-
def test_unindex_fails_when_raising(self, db_request, monkeypatch):
352+
def test_unindex_fails_when_raising(self, db_request):
421353
class TestException(Exception):
422354
pass
423355

424-
task = pretend.stub()
425356
es_client = FakeESClient()
426357
es_client.delete = pretend.raiser(TestException)
427358

@@ -432,65 +363,10 @@ class TestException(Exception):
432363
},
433364
)
434365

435-
db_request.registry.settings = {
436-
"celery.scheduler_url": "redis://redis:6379/0",
437-
}
438-
439-
monkeypatch.setattr(
440-
redis.StrictRedis, "from_url",
441-
lambda *a, **kw: pretend.stub(lock=_not_lock))
442-
443366
with pytest.raises(TestException):
444-
unindex_project(task, db_request, 'foo')
445-
446-
def test_unindex_retry_on_lock(self, db_request, monkeypatch):
447-
task = pretend.stub(
448-
retry=pretend.call_recorder(
449-
pretend.raiser(celery.exceptions.Retry)
450-
)
451-
)
452-
453-
db_request.registry.settings = {
454-
"celery.scheduler_url": "redis://redis:6379/0",
455-
}
456-
457-
le = redis.exceptions.LockError()
458-
monkeypatch.setattr(
459-
redis.StrictRedis, "from_url",
460-
lambda *a, **kw: pretend.stub(lock=pretend.raiser(le)))
461-
462-
with pytest.raises(celery.exceptions.Retry):
463-
unindex_project(task, db_request, "foo")
464-
465-
assert task.retry.calls == [
466-
pretend.call(countdown=60, exc=le)
467-
]
468-
469-
def test_reindex_retry_on_lock(self, db_request, monkeypatch):
470-
task = pretend.stub(
471-
retry=pretend.call_recorder(
472-
pretend.raiser(celery.exceptions.Retry)
473-
)
474-
)
367+
unindex_project(db_request, 'foo')
475368

476-
db_request.registry.settings = {
477-
"celery.scheduler_url": "redis://redis:6379/0",
478-
}
479-
480-
le = redis.exceptions.LockError()
481-
monkeypatch.setattr(
482-
redis.StrictRedis, "from_url",
483-
lambda *a, **kw: pretend.stub(lock=pretend.raiser(le)))
484-
485-
with pytest.raises(celery.exceptions.Retry):
486-
reindex_project(task, db_request, "foo")
487-
488-
assert task.retry.calls == [
489-
pretend.call(countdown=60, exc=le)
490-
]
491-
492-
def test_unindex_accepts_defeat(self, db_request, monkeypatch):
493-
task = pretend.stub()
369+
def test_unindex_accepts_defeat(self, db_request):
494370
es_client = FakeESClient()
495371
es_client.delete = pretend.call_recorder(
496372
pretend.raiser(elasticsearch.exceptions.NotFoundError))
@@ -502,15 +378,7 @@ def test_unindex_accepts_defeat(self, db_request, monkeypatch):
502378
},
503379
)
504380

505-
db_request.registry.settings = {
506-
"celery.scheduler_url": "redis://redis:6379/0",
507-
}
508-
509-
monkeypatch.setattr(
510-
redis.StrictRedis, "from_url",
511-
lambda *a, **kw: pretend.stub(lock=_not_lock))
512-
513-
unindex_project(task, db_request, 'foo')
381+
unindex_project(db_request, 'foo')
514382

515383
assert es_client.delete.calls == [
516384
pretend.call(index="warehouse", doc_type="project", id="foo")
@@ -528,7 +396,6 @@ def project_docs(db, project_name=None):
528396
project_docs,
529397
)
530398

531-
task = pretend.stub()
532399
es_client = FakeESClient()
533400
es_client.indices.indices["warehouse-aaaaaaaaaa"] = None
534401
es_client.indices.aliases["warehouse"] = ["warehouse-aaaaaaaaaa"]
@@ -543,19 +410,11 @@ def project_docs(db, project_name=None):
543410
},
544411
)
545412

546-
db_request.registry.settings = {
547-
"celery.scheduler_url": "redis://redis:6379/0",
548-
}
549-
550-
monkeypatch.setattr(
551-
redis.StrictRedis, "from_url",
552-
lambda *a, **kw: pretend.stub(lock=_not_lock))
553-
554413
parallel_bulk = pretend.call_recorder(lambda client, iterable: [None])
555414
monkeypatch.setattr(
556415
warehouse.search.tasks, "parallel_bulk", parallel_bulk)
557416

558-
reindex_project(task, db_request, 'foo')
417+
reindex_project(db_request, 'foo')
559418

560419
assert parallel_bulk.calls == [pretend.call(es_client, docs)]
561420
assert es_client.indices.create.calls == []

0 commit comments

Comments
 (0)