Skip to content

Commit 20d0b7b

Browse files
committed
ain't no redis in travis
1 parent 7202ac5 commit 20d0b7b

File tree

1 file changed

+37
-2
lines changed

1 file changed

+37
-2
lines changed

tests/unit/search/test_tasks.py

Lines changed: 37 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212

1313
import os
1414

15+
from contextlib import contextmanager
16+
1517
import celery
1618
import elasticsearch
1719
import packaging.version
@@ -132,6 +134,11 @@ def __init__(self):
132134
self.indices = FakeESIndices()
133135

134136

137+
@contextmanager
138+
def _not_lock(*a, **kw):
139+
yield True
140+
141+
135142
class TestReindex:
136143

137144
def test_fails_when_raising(self, db_request, monkeypatch):
@@ -168,6 +175,10 @@ def parallel_bulk(client, iterable):
168175
assert iterable is docs
169176
raise TestException
170177

178+
monkeypatch.setattr(
179+
redis.StrictRedis, "from_url",
180+
lambda *a, **kw: pretend.stub(lock=_not_lock))
181+
171182
monkeypatch.setattr(
172183
warehouse.search.tasks, "parallel_bulk", parallel_bulk)
173184

@@ -233,6 +244,10 @@ def project_docs(db):
233244
"celery.scheduler_url": "redis://redis:6379/0",
234245
}
235246

247+
monkeypatch.setattr(
248+
redis.StrictRedis, "from_url",
249+
lambda *a, **kw: pretend.stub(lock=_not_lock))
250+
236251
parallel_bulk = pretend.call_recorder(lambda client, iterable: [None])
237252
monkeypatch.setattr(
238253
warehouse.search.tasks, "parallel_bulk", parallel_bulk)
@@ -305,6 +320,10 @@ def project_docs(db):
305320
"celery.scheduler_url": "redis://redis:6379/0",
306321
}
307322

323+
monkeypatch.setattr(
324+
redis.StrictRedis, "from_url",
325+
lambda *a, **kw: pretend.stub(lock=_not_lock))
326+
308327
parallel_bulk = pretend.call_recorder(lambda client, iterable: [None])
309328
monkeypatch.setattr(
310329
warehouse.search.tasks, "parallel_bulk", parallel_bulk)
@@ -385,6 +404,10 @@ def parallel_bulk(client, iterable):
385404
assert iterable is docs
386405
raise TestException
387406

407+
monkeypatch.setattr(
408+
redis.StrictRedis, "from_url",
409+
lambda *a, **kw: pretend.stub(lock=_not_lock))
410+
388411
monkeypatch.setattr(
389412
warehouse.search.tasks, "parallel_bulk", parallel_bulk)
390413

@@ -394,7 +417,7 @@ def parallel_bulk(client, iterable):
394417
assert es_client.indices.put_settings.calls == []
395418
assert es_client.indices.forcemerge.calls == []
396419

397-
def test_unindex_fails_when_raising(self, db_request):
420+
def test_unindex_fails_when_raising(self, db_request, monkeypatch):
398421
class TestException(Exception):
399422
pass
400423

@@ -413,6 +436,10 @@ class TestException(Exception):
413436
"celery.scheduler_url": "redis://redis:6379/0",
414437
}
415438

439+
monkeypatch.setattr(
440+
redis.StrictRedis, "from_url",
441+
lambda *a, **kw: pretend.stub(lock=_not_lock))
442+
416443
with pytest.raises(TestException):
417444
unindex_project(task, db_request, 'foo')
418445

@@ -462,7 +489,7 @@ def test_reindex_retry_on_lock(self, db_request, monkeypatch):
462489
pretend.call(countdown=60, exc=le)
463490
]
464491

465-
def test_unindex_accepts_defeat(self, db_request):
492+
def test_unindex_accepts_defeat(self, db_request, monkeypatch):
466493
task = pretend.stub()
467494
es_client = FakeESClient()
468495
es_client.delete = pretend.call_recorder(
@@ -479,6 +506,10 @@ def test_unindex_accepts_defeat(self, db_request):
479506
"celery.scheduler_url": "redis://redis:6379/0",
480507
}
481508

509+
monkeypatch.setattr(
510+
redis.StrictRedis, "from_url",
511+
lambda *a, **kw: pretend.stub(lock=_not_lock))
512+
482513
unindex_project(task, db_request, 'foo')
483514

484515
assert es_client.delete.calls == [
@@ -516,6 +547,10 @@ def project_docs(db, project_name=None):
516547
"celery.scheduler_url": "redis://redis:6379/0",
517548
}
518549

550+
monkeypatch.setattr(
551+
redis.StrictRedis, "from_url",
552+
lambda *a, **kw: pretend.stub(lock=_not_lock))
553+
519554
parallel_bulk = pretend.call_recorder(lambda client, iterable: [None])
520555
monkeypatch.setattr(
521556
warehouse.search.tasks, "parallel_bulk", parallel_bulk)

0 commit comments

Comments
 (0)