From 42a809ee3edbd68ea343049ce6d4dd4156e2dc83 Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Thu, 6 Jun 2024 21:22:38 +0300 Subject: [PATCH 01/16] Make sure the CI actually runs RESP3 tests The CI tests were not running with RESP3 protocol, it was just an illusion that they do. Fix this, and also preserve coverage and test artifacts from those runs too. Take the opportunity to fix the indentation of the YAML file. --- .github/workflows/integration.yaml | 199 ++++++++++++++++------------- 1 file changed, 111 insertions(+), 88 deletions(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index 8f60efe6c7..a6c0f0ccb5 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -25,87 +25,87 @@ permissions: jobs: - dependency-audit: - name: Dependency audit - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: pypa/gh-action-pip-audit@v1.0.8 - with: - inputs: requirements.txt dev_requirements.txt - ignore-vulns: | - GHSA-w596-4wvx-j9j6 # subversion related git pull, dependency for pytest. There is no impact here. - - lint: - name: Code linters - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: 3.9 - cache: 'pip' - - name: run code linters - run: | - pip install -r dev_requirements.txt - invoke linters - - run-tests: - runs-on: ubuntu-latest - timeout-minutes: 60 - strategy: - max-parallel: 15 - fail-fast: false - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', 'pypy-3.8', 'pypy-3.9'] - test-type: ['standalone', 'cluster'] - connection-type: ['hiredis', 'plain'] - env: - ACTIONS_ALLOW_UNSECURE_COMMANDS: true - name: Python ${{ matrix.python-version }} ${{matrix.test-type}}-${{matrix.connection-type}} tests - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - - name: run tests - run: | - pip install -U setuptools wheel - pip install -r requirements.txt - pip install -r dev_requirements.txt - if [ "${{matrix.connection-type}}" == "hiredis" ]; then - pip install hiredis - fi - invoke devenv - sleep 10 # time to settle - invoke ${{matrix.test-type}}-tests - - - uses: actions/upload-artifact@v4 - if: success() || failure() - with: - name: pytest-results-${{matrix.test-type}}-${{matrix.connection-type}}-${{matrix.python-version}} - path: '${{matrix.test-type}}*results.xml' - - - name: Upload codecov coverage - uses: codecov/codecov-action@v4 - with: - fail_ci_if_error: false - - - name: View Test Results - uses: dorny/test-reporter@v1 - if: success() || failure() - continue-on-error: true - with: - name: Test Results ${{matrix.python-version}} ${{matrix.test-type}}-${{matrix.connection-type}} - path: '*.xml' - reporter: java-junit - list-suites: all - list-tests: all - max-annotations: 10 - fail-on-error: 'false' - - resp3_tests: + dependency-audit: + name: Dependency audit + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: pypa/gh-action-pip-audit@v1.0.8 + with: + inputs: requirements.txt dev_requirements.txt + ignore-vulns: | + GHSA-w596-4wvx-j9j6 # subversion related git pull, dependency for pytest. There is no impact here. + + lint: + name: Code linters + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: 3.9 + cache: 'pip' + - name: run code linters + run: | + pip install -r dev_requirements.txt + invoke linters + + run-tests: + runs-on: ubuntu-latest + timeout-minutes: 60 + strategy: + max-parallel: 15 + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', 'pypy-3.8', 'pypy-3.9'] + test-type: ['standalone', 'cluster'] + connection-type: ['hiredis', 'plain'] + env: + ACTIONS_ALLOW_UNSECURE_COMMANDS: true + name: Python ${{ matrix.python-version }} ${{matrix.test-type}}-${{matrix.connection-type}} tests + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + - name: run tests + run: | + pip install -U setuptools wheel + pip install -r requirements.txt + pip install -r dev_requirements.txt + if [ "${{matrix.connection-type}}" == "hiredis" ]; then + pip install hiredis + fi + invoke devenv + sleep 10 # time to settle + invoke ${{matrix.test-type}}-tests + + - uses: actions/upload-artifact@v4 + if: success() || failure() + with: + name: pytest-results-${{matrix.test-type}}-${{matrix.connection-type}}-${{matrix.python-version}} + path: '${{matrix.test-type}}*results.xml' + + - name: Upload codecov coverage + uses: codecov/codecov-action@v4 + with: + fail_ci_if_error: false + + - name: View Test Results + uses: dorny/test-reporter@v1 + if: success() || failure() + continue-on-error: true + with: + name: Test Results ${{matrix.python-version}} ${{matrix.test-type}}-${{matrix.connection-type}} + path: '*.xml' + reporter: java-junit + list-suites: all + list-tests: all + max-annotations: 10 + fail-on-error: 'false' + + resp3_tests: runs-on: ubuntu-latest strategy: fail-fast: false @@ -113,9 +113,8 @@ jobs: python-version: ['3.8', '3.11'] test-type: ['standalone', 'cluster'] connection-type: ['hiredis', 'plain'] - protocol: ['3'] env: - ACTIONS_ALLOW_UNSECURE_COMMANDS: true + ACTIONS_ALLOW_UNSECURE_COMMANDS: true name: RESP3 [${{ matrix.python-version }} ${{matrix.test-type}}-${{matrix.connection-type}}] steps: - uses: actions/checkout@v4 @@ -132,11 +131,35 @@ jobs: pip install hiredis fi invoke devenv - sleep 5 # time to settle - invoke ${{matrix.test-type}}-tests - invoke ${{matrix.test-type}}-tests --uvloop + sleep 10 # time to settle + invoke ${{matrix.test-type}}-tests --protocol=3 + invoke ${{matrix.test-type}}-tests --uvloop --protocol=3 + + - uses: actions/upload-artifact@v4 + if: success() || failure() + with: + name: pytest-results-${{matrix.test-type}}-${{matrix.connection-type}}-${{matrix.python-version}}-resp3 + path: '${{matrix.test-type}}*results.xml' + + - name: Upload codecov coverage + uses: codecov/codecov-action@v4 + with: + fail_ci_if_error: false + + - name: View Test Results + uses: dorny/test-reporter@v1 + if: success() || failure() + continue-on-error: true + with: + name: Test Results ${{matrix.python-version}} ${{matrix.test-type}}-${{matrix.connection-type}}-resp3 + path: '*.xml' + reporter: java-junit + list-suites: all + list-tests: all + max-annotations: 10 + fail-on-error: 'false' - build_and_test_package: + build_and_test_package: name: Validate building and installing the package runs-on: ubuntu-latest needs: [run-tests] @@ -153,7 +176,7 @@ jobs: run: | bash .github/workflows/install_and_test.sh ${{ matrix.extension }} - install_package_from_commit: + install_package_from_commit: name: Install package from commit hash runs-on: ubuntu-latest strategy: From 6367be673647f80ef69a31fd9d70a6dfbdc33c1c Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Fri, 7 Jun 2024 10:56:50 +0300 Subject: [PATCH 02/16] Adapt failing RESP3 tests --- tests/test_asyncio/test_json.py | 98 +++++++++++++------------------ tests/test_commands.py | 9 ++- tests/test_json.py | 101 ++++++++++++++------------------ tests/test_search.py | 18 +++++- 4 files changed, 108 insertions(+), 118 deletions(-) diff --git a/tests/test_asyncio/test_json.py b/tests/test_asyncio/test_json.py index c713f1aca2..55f9447a2f 100644 --- a/tests/test_asyncio/test_json.py +++ b/tests/test_asyncio/test_json.py @@ -22,7 +22,7 @@ async def test_json_setbinarykey(decoded_r: redis.Redis): @pytest.mark.redismod async def test_json_setgetdeleteforget(decoded_r: redis.Redis): assert await decoded_r.json().set("foo", Path.root_path(), "bar") - assert_resp_response(decoded_r, await decoded_r.json().get("foo"), "bar", [["bar"]]) + assert await decoded_r.json().get("foo") == "bar" assert await decoded_r.json().get("baz") is None assert await decoded_r.json().delete("foo") == 1 assert await decoded_r.json().forget("foo") == 0 # second delete @@ -32,13 +32,13 @@ async def test_json_setgetdeleteforget(decoded_r: redis.Redis): @pytest.mark.redismod async def test_jsonget(decoded_r: redis.Redis): await decoded_r.json().set("foo", Path.root_path(), "bar") - assert_resp_response(decoded_r, await decoded_r.json().get("foo"), "bar", [["bar"]]) + assert await decoded_r.json().get("foo") == "bar" @pytest.mark.redismod async def test_json_get_jset(decoded_r: redis.Redis): assert await decoded_r.json().set("foo", Path.root_path(), "bar") - assert_resp_response(decoded_r, await decoded_r.json().get("foo"), "bar", [["bar"]]) + assert await decoded_r.json().get("foo") == "bar" assert await decoded_r.json().get("baz") is None assert 1 == await decoded_r.json().delete("foo") assert await decoded_r.exists("foo") == 0 @@ -47,10 +47,7 @@ async def test_json_get_jset(decoded_r: redis.Redis): @pytest.mark.redismod async def test_nonascii_setgetdelete(decoded_r: redis.Redis): assert await decoded_r.json().set("notascii", Path.root_path(), "hyvää-élève") - res = "hyvää-élève" - assert_resp_response( - decoded_r, await decoded_r.json().get("notascii", no_escape=True), res, [[res]] - ) + assert await decoded_r.json().get("notascii", no_escape=True) == "hyvää-élève" assert 1 == await decoded_r.json().delete("notascii") assert await decoded_r.exists("notascii") == 0 @@ -192,8 +189,7 @@ async def test_toggle(decoded_r: redis.Redis): async def test_strappend(decoded_r: redis.Redis): await decoded_r.json().set("jsonkey", Path.root_path(), "foo") assert 6 == await decoded_r.json().strappend("jsonkey", "bar") - res = await decoded_r.json().get("jsonkey", Path.root_path()) - assert_resp_response(decoded_r, res, "foobar", [["foobar"]]) + assert "foobar" == await decoded_r.json().get("jsonkey", Path.root_path()) @pytest.mark.redismod @@ -230,14 +226,12 @@ async def test_arrindex(decoded_r: redis.Redis): async def test_arrinsert(decoded_r: redis.Redis): await decoded_r.json().set("arr", Path.root_path(), [0, 4]) assert 5 == await decoded_r.json().arrinsert("arr", Path.root_path(), 1, *[1, 2, 3]) - res = [0, 1, 2, 3, 4] - assert_resp_response(decoded_r, await decoded_r.json().get("arr"), res, [[res]]) + assert await decoded_r.json().get("arr") == [0, 1, 2, 3, 4] # test prepends await decoded_r.json().set("val2", Path.root_path(), [5, 6, 7, 8, 9]) await decoded_r.json().arrinsert("val2", Path.root_path(), 0, ["some", "thing"]) - res = [["some", "thing"], 5, 6, 7, 8, 9] - assert_resp_response(decoded_r, await decoded_r.json().get("val2"), res, [[res]]) + assert await decoded_r.json().get("val2") == [["some", "thing"], 5, 6, 7, 8, 9] @pytest.mark.redismod @@ -255,7 +249,7 @@ async def test_arrpop(decoded_r: redis.Redis): assert 3 == await decoded_r.json().arrpop("arr", Path.root_path(), -1) assert 2 == await decoded_r.json().arrpop("arr", Path.root_path()) assert 0 == await decoded_r.json().arrpop("arr", Path.root_path(), 0) - assert_resp_response(decoded_r, await decoded_r.json().get("arr"), [1], [[[1]]]) + assert [1] == await decoded_r.json().get("arr") # test out of bounds await decoded_r.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) @@ -270,8 +264,7 @@ async def test_arrpop(decoded_r: redis.Redis): async def test_arrtrim(decoded_r: redis.Redis): await decoded_r.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) assert 3 == await decoded_r.json().arrtrim("arr", Path.root_path(), 1, 3) - res = await decoded_r.json().get("arr") - assert_resp_response(decoded_r, res, [1, 2, 3], [[[1, 2, 3]]]) + assert [1, 2, 3] == await decoded_r.json().get("arr") # <0 test, should be 0 equivalent await decoded_r.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) @@ -356,15 +349,14 @@ async def test_json_delete_with_dollar(decoded_r: redis.Redis): doc1 = {"a": 1, "nested": {"a": 2, "b": 3}} assert await decoded_r.json().set("doc1", "$", doc1) assert await decoded_r.json().delete("doc1", "$..a") == 2 - res = [{"nested": {"b": 3}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == [{"nested": {"b": 3}}] doc2 = {"a": {"a": 2, "b": 3}, "b": ["a", "b"], "nested": {"b": [True, "a", "b"]}} assert await decoded_r.json().set("doc2", "$", doc2) assert await decoded_r.json().delete("doc2", "$..a") == 1 - res = await decoded_r.json().get("doc2", "$") - res = [{"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc2", "$"), res, [res]) + assert await decoded_r.json().get("doc2", "$") == [ + {"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]} + ] doc3 = [ { @@ -395,8 +387,7 @@ async def test_json_delete_with_dollar(decoded_r: redis.Redis): } ] ] - res = await decoded_r.json().get("doc3", "$") - assert_resp_response(decoded_r, res, doc3val, [doc3val]) + assert await decoded_r.json().get("doc3", "$") == doc3val # Test async default path assert await decoded_r.json().delete("doc3") == 1 @@ -410,14 +401,14 @@ async def test_json_forget_with_dollar(decoded_r: redis.Redis): doc1 = {"a": 1, "nested": {"a": 2, "b": 3}} assert await decoded_r.json().set("doc1", "$", doc1) assert await decoded_r.json().forget("doc1", "$..a") == 2 - res = [{"nested": {"b": 3}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == [{"nested": {"b": 3}}] doc2 = {"a": {"a": 2, "b": 3}, "b": ["a", "b"], "nested": {"b": [True, "a", "b"]}} assert await decoded_r.json().set("doc2", "$", doc2) assert await decoded_r.json().forget("doc2", "$..a") == 1 - res = [{"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc2", "$"), res, [res]) + assert await decoded_r.json().get("doc2", "$") == [ + {"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]} + ] doc3 = [ { @@ -448,8 +439,7 @@ async def test_json_forget_with_dollar(decoded_r: redis.Redis): } ] ] - res = await decoded_r.json().get("doc3", "$") - assert_resp_response(decoded_r, res, doc3val, [doc3val]) + assert await decoded_r.json().get("doc3", "$") == doc3val # Test async default path assert await decoded_r.json().forget("doc3") == 1 @@ -473,14 +463,8 @@ async def test_json_mget_dollar(decoded_r: redis.Redis): {"a": 4, "b": 5, "nested": {"a": 6}, "c": None, "nested2": {"a": [None]}}, ) # Compare also to single JSON.GET - res = [1, 3, None] - assert_resp_response( - decoded_r, await decoded_r.json().get("doc1", "$..a"), res, [res] - ) - res = [4, 6, [None]] - assert_resp_response( - decoded_r, await decoded_r.json().get("doc2", "$..a"), res, [res] - ) + assert await decoded_r.json().get("doc1", "$..a") == [1, 3, None] + assert await decoded_r.json().get("doc2", "$..a") == [4, 6, [None]] # Test mget with single path await decoded_r.json().mget("doc1", "$..a") == [1, 3, None] @@ -559,13 +543,13 @@ async def test_strappend_dollar(decoded_r: redis.Redis): await decoded_r.json().strappend("doc1", "bar", "$..a") == [6, 8, None] res = [{"a": "foobar", "nested1": {"a": "hellobar"}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single await decoded_r.json().strappend("doc1", "baz", "$.nested1.a") == [11] res = [{"a": "foobar", "nested1": {"a": "hellobarbaz"}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -574,7 +558,7 @@ async def test_strappend_dollar(decoded_r: redis.Redis): # Test multi await decoded_r.json().strappend("doc1", "bar", ".*.a") == 8 res = [{"a": "foobar", "nested1": {"a": "hellobarbazbar"}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing path with pytest.raises(exceptions.ResponseError): @@ -622,7 +606,7 @@ async def test_arrappend_dollar(decoded_r: redis.Redis): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single assert await decoded_r.json().arrappend("doc1", "$.nested1.a", "baz") == [6] @@ -633,7 +617,7 @@ async def test_arrappend_dollar(decoded_r: redis.Redis): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -659,7 +643,7 @@ async def test_arrappend_dollar(decoded_r: redis.Redis): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single assert await decoded_r.json().arrappend("doc1", ".nested1.a", "baz") == 6 res = [ @@ -669,7 +653,7 @@ async def test_arrappend_dollar(decoded_r: redis.Redis): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -698,7 +682,7 @@ async def test_arrinsert_dollar(decoded_r: redis.Redis): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single assert await decoded_r.json().arrinsert("doc1", "$.nested1.a", -2, "baz") == [6] res = [ @@ -708,7 +692,7 @@ async def test_arrinsert_dollar(decoded_r: redis.Redis): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -777,7 +761,7 @@ async def test_arrpop_dollar(decoded_r: redis.Redis): assert await decoded_r.json().arrpop("doc1", "$..a", 1) == ['"foo"', None, None] res = [{"a": [], "nested1": {"a": ["hello", "world"]}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -796,7 +780,7 @@ async def test_arrpop_dollar(decoded_r: redis.Redis): # Test multi (all paths are updated, but return result of last path) await decoded_r.json().arrpop("doc1", "..a", "1") is None res = [{"a": [], "nested1": {"a": ["hello", "world"]}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # # Test missing key with pytest.raises(exceptions.ResponseError): @@ -817,15 +801,15 @@ async def test_arrtrim_dollar(decoded_r: redis.Redis): # Test multi assert await decoded_r.json().arrtrim("doc1", "$..a", "1", -1) == [0, 2, None] res = [{"a": [], "nested1": {"a": [None, "world"]}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res assert await decoded_r.json().arrtrim("doc1", "$..a", "1", "1") == [0, 1, None] res = [{"a": [], "nested1": {"a": ["world"]}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single assert await decoded_r.json().arrtrim("doc1", "$.nested1.a", 1, 0) == [0] res = [{"a": [], "nested1": {"a": []}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -848,7 +832,7 @@ async def test_arrtrim_dollar(decoded_r: redis.Redis): # Test single assert await decoded_r.json().arrtrim("doc1", ".nested1.a", "1", "1") == 1 res = [{"a": [], "nested1": {"a": ["world"]}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -978,7 +962,7 @@ async def test_clear_dollar(decoded_r: redis.Redis): res = [ {"nested1": {"a": {}}, "a": [], "nested2": {"a": "claro"}, "nested3": {"a": {}}} ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single await decoded_r.json().set( @@ -1000,13 +984,11 @@ async def test_clear_dollar(decoded_r: redis.Redis): "nested3": {"a": {"baz": 50}}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing path (async defaults to root) assert await decoded_r.json().clear("doc1") == 1 - assert_resp_response( - decoded_r, await decoded_r.json().get("doc1", "$"), [{}], [[{}]] - ) + assert await decoded_r.json().get("doc1", "$") == [{}] # Test missing key with pytest.raises(exceptions.ResponseError): @@ -1035,7 +1017,7 @@ async def test_toggle_dollar(decoded_r: redis.Redis): "nested3": {"a": False}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): diff --git a/tests/test_commands.py b/tests/test_commands.py index d0c235daf9..07466b61a1 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -1841,7 +1841,14 @@ def test_tfunction_list(self, stack_r): assert len(functions) == 3 expected_names = [b"lib1", b"lib2", b"lib3"] - actual_names = [functions[0][13], functions[1][13], functions[2][13]] + if is_resp2_connection(stack_r): + actual_names = [functions[0][13], functions[1][13], functions[2][13]] + else: + actual_names = [ + functions[0][b"name"], + functions[1][b"name"], + functions[2][b"name"], + ] assert sorted(expected_names) == sorted(actual_names) assert stack_r.tfunction_delete("lib1") diff --git a/tests/test_json.py b/tests/test_json.py index 5cf9b11e17..558547ae95 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -25,7 +25,7 @@ def test_json_setbinarykey(client): @pytest.mark.redismod def test_json_setgetdeleteforget(client): assert client.json().set("foo", Path.root_path(), "bar") - assert_resp_response(client, client.json().get("foo"), "bar", [["bar"]]) + assert client.json().get("foo") == "bar" assert client.json().get("baz") is None assert client.json().delete("foo") == 1 assert client.json().forget("foo") == 0 # second delete @@ -35,13 +35,13 @@ def test_json_setgetdeleteforget(client): @pytest.mark.redismod def test_jsonget(client): client.json().set("foo", Path.root_path(), "bar") - assert_resp_response(client, client.json().get("foo"), "bar", [["bar"]]) + assert client.json().get("foo") == "bar" @pytest.mark.redismod def test_json_get_jset(client): assert client.json().set("foo", Path.root_path(), "bar") - assert_resp_response(client, client.json().get("foo"), "bar", [["bar"]]) + assert client.json().get("foo") == "bar" assert client.json().get("baz") is None assert 1 == client.json().delete("foo") assert client.exists("foo") == 0 @@ -83,10 +83,7 @@ def test_json_merge(client): @pytest.mark.redismod def test_nonascii_setgetdelete(client): assert client.json().set("notascii", Path.root_path(), "hyvää-élève") - res = "hyvää-élève" - assert_resp_response( - client, client.json().get("notascii", no_escape=True), res, [[res]] - ) + assert client.json().get("notascii", no_escape=True) == "hyvää-élève" assert 1 == client.json().delete("notascii") assert client.exists("notascii") == 0 @@ -191,9 +188,7 @@ def test_toggle(client): def test_strappend(client): client.json().set("jsonkey", Path.root_path(), "foo") assert 6 == client.json().strappend("jsonkey", "bar") - assert_resp_response( - client, client.json().get("jsonkey", Path.root_path()), "foobar", [["foobar"]] - ) + assert "foobar" == client.json().get("jsonkey", Path.root_path()) @pytest.mark.redismod @@ -229,14 +224,12 @@ def test_arrindex(client): def test_arrinsert(client): client.json().set("arr", Path.root_path(), [0, 4]) assert 5 - -client.json().arrinsert("arr", Path.root_path(), 1, *[1, 2, 3]) - res = [0, 1, 2, 3, 4] - assert_resp_response(client, client.json().get("arr"), res, [[res]]) + assert client.json().get("arr") == [0, 1, 2, 3, 4] # test prepends client.json().set("val2", Path.root_path(), [5, 6, 7, 8, 9]) client.json().arrinsert("val2", Path.root_path(), 0, ["some", "thing"]) - res = [["some", "thing"], 5, 6, 7, 8, 9] - assert_resp_response(client, client.json().get("val2"), res, [[res]]) + assert client.json().get("val2") == [["some", "thing"], 5, 6, 7, 8, 9] @pytest.mark.redismod @@ -254,7 +247,7 @@ def test_arrpop(client): assert 3 == client.json().arrpop("arr", Path.root_path(), -1) assert 2 == client.json().arrpop("arr", Path.root_path()) assert 0 == client.json().arrpop("arr", Path.root_path(), 0) - assert_resp_response(client, client.json().get("arr"), [1], [[[1]]]) + assert [1] == client.json().get("arr") # test out of bounds client.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) @@ -269,7 +262,7 @@ def test_arrpop(client): def test_arrtrim(client): client.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) assert 3 == client.json().arrtrim("arr", Path.root_path(), 1, 3) - assert_resp_response(client, client.json().get("arr"), [1, 2, 3], [[[1, 2, 3]]]) + assert [1, 2, 3] == client.json().get("arr") # <0 test, should be 0 equivalent client.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) @@ -331,7 +324,7 @@ def test_json_commands_in_pipeline(client): p.set("foo", Path.root_path(), "bar") p.get("foo") p.delete("foo") - assert_resp_response(client, p.execute(), [True, "bar", 1], [True, [["bar"]], 1]) + assert p.execute() == [True, "bar", 1] assert client.keys() == [] assert client.get("foo") is None @@ -344,7 +337,7 @@ def test_json_commands_in_pipeline(client): p.jsonget("foo") p.exists("notarealkey") p.delete("foo") - assert_resp_response(client, p.execute(), [True, d, 0, 1], [True, [[d]], 0, 1]) + assert p.execute() == [True, d, 0, 1] assert client.keys() == [] assert client.get("foo") is None @@ -355,13 +348,13 @@ def test_json_delete_with_dollar(client): assert client.json().set("doc1", "$", doc1) assert client.json().delete("doc1", "$..a") == 2 res = [{"nested": {"b": 3}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res doc2 = {"a": {"a": 2, "b": 3}, "b": ["a", "b"], "nested": {"b": [True, "a", "b"]}} assert client.json().set("doc2", "$", doc2) assert client.json().delete("doc2", "$..a") == 1 res = [{"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]}] - assert_resp_response(client, client.json().get("doc2", "$"), res, [res]) + assert client.json().get("doc2", "$") == res doc3 = [ { @@ -392,7 +385,7 @@ def test_json_delete_with_dollar(client): } ] ] - assert_resp_response(client, client.json().get("doc3", "$"), doc3val, [doc3val]) + assert client.json().get("doc3", "$") == doc3val # Test default path assert client.json().delete("doc3") == 1 @@ -407,13 +400,13 @@ def test_json_forget_with_dollar(client): assert client.json().set("doc1", "$", doc1) assert client.json().forget("doc1", "$..a") == 2 res = [{"nested": {"b": 3}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res doc2 = {"a": {"a": 2, "b": 3}, "b": ["a", "b"], "nested": {"b": [True, "a", "b"]}} assert client.json().set("doc2", "$", doc2) assert client.json().forget("doc2", "$..a") == 1 res = [{"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]}] - assert_resp_response(client, client.json().get("doc2", "$"), res, [res]) + assert client.json().get("doc2", "$") == res doc3 = [ { @@ -444,7 +437,7 @@ def test_json_forget_with_dollar(client): } ] ] - assert_resp_response(client, client.json().get("doc3", "$"), doc3val, [doc3val]) + assert client.json().get("doc3", "$") == doc3val # Test default path assert client.json().forget("doc3") == 1 @@ -468,9 +461,9 @@ def test_json_mget_dollar(client): ) # Compare also to single JSON.GET res = [1, 3, None] - assert_resp_response(client, client.json().get("doc1", "$..a"), res, [res]) + assert client.json().get("doc1", "$..a") == res res = [4, 6, [None]] - assert_resp_response(client, client.json().get("doc2", "$..a"), res, [res]) + assert client.json().get("doc2", "$..a") == res # Test mget with single path client.json().mget("doc1", "$..a") == [1, 3, None] @@ -599,7 +592,7 @@ def test_arrappend_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test single assert client.json().arrappend("doc1", "$.nested1.a", "baz") == [6] @@ -610,7 +603,7 @@ def test_arrappend_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -636,7 +629,7 @@ def test_arrappend_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test single assert client.json().arrappend("doc1", ".nested1.a", "baz") == 6 @@ -647,7 +640,7 @@ def test_arrappend_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -675,7 +668,7 @@ def test_arrinsert_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test single assert client.json().arrinsert("doc1", "$.nested1.a", -2, "baz") == [6] @@ -686,7 +679,7 @@ def test_arrinsert_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -758,7 +751,7 @@ def test_arrpop_dollar(client): assert client.json().arrpop("doc1", "$..a", 1) == ['"foo"', None, None] res = [{"a": [], "nested1": {"a": ["hello", "world"]}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -777,7 +770,7 @@ def test_arrpop_dollar(client): # Test multi (all paths are updated, but return result of last path) client.json().arrpop("doc1", "..a", "1") is None res = [{"a": [], "nested1": {"a": ["hello", "world"]}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # # Test missing key with pytest.raises(exceptions.ResponseError): @@ -798,16 +791,16 @@ def test_arrtrim_dollar(client): # Test multi assert client.json().arrtrim("doc1", "$..a", "1", -1) == [0, 2, None] res = [{"a": [], "nested1": {"a": [None, "world"]}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res assert client.json().arrtrim("doc1", "$..a", "1", "1") == [0, 1, None] res = [{"a": [], "nested1": {"a": ["world"]}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test single assert client.json().arrtrim("doc1", "$.nested1.a", 1, 0) == [0] res = [{"a": [], "nested1": {"a": []}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -830,7 +823,7 @@ def test_arrtrim_dollar(client): # Test single assert client.json().arrtrim("doc1", ".nested1.a", "1", "1") == 1 res = [{"a": [], "nested1": {"a": ["world"]}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -958,7 +951,7 @@ def test_clear_dollar(client): res = [ {"nested1": {"a": {}}, "a": [], "nested2": {"a": "claro"}, "nested3": {"a": {}}} ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test single client.json().set( @@ -980,11 +973,11 @@ def test_clear_dollar(client): "nested3": {"a": {"baz": 50}}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing path (defaults to root) assert client.json().clear("doc1") == 1 - assert_resp_response(client, client.json().get("doc1", "$"), [{}], [[{}]]) + assert client.json().get("doc1", "$") == [{}] # Test missing key with pytest.raises(exceptions.ResponseError): @@ -1013,7 +1006,7 @@ def test_toggle_dollar(client): "nested3": {"a": False}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -1293,12 +1286,10 @@ def test_arrindex_dollar(client): }, ) - assert_resp_response( - client, - client.json().get("store", "$.store.book[?(@.price<10)].size"), - [[10, 20, 30, 40], [5, 10, 20, 30]], - [[[10, 20, 30, 40], [5, 10, 20, 30]]], - ) + assert client.json().get("store", "$.store.book[?(@.price<10)].size") == [ + [10, 20, 30, 40], + [5, 10, 20, 30], + ] assert client.json().arrindex( "store", "$.store.book[?(@.price<10)].size", "20" @@ -1327,7 +1318,7 @@ def test_arrindex_dollar(client): "3", [], ] - assert_resp_response(client, client.json().get("test_num", "$..arr"), res, [res]) + assert client.json().get("test_num", "$..arr") == res assert client.json().arrindex("test_num", "$..arr", 3) == [3, 2, -1, None, -1] @@ -1360,7 +1351,7 @@ def test_arrindex_dollar(client): "3", [], ] - assert_resp_response(client, client.json().get("test_string", "$..arr"), res, [res]) + assert client.json().get("test_string", "$..arr") == res assert client.json().arrindex("test_string", "$..arr", "baz") == [ 3, @@ -1453,7 +1444,7 @@ def test_arrindex_dollar(client): None, [], ] - assert_resp_response(client, client.json().get("test_None", "$..arr"), res, [res]) + assert client.json().get("test_None", "$..arr") == res # Test with none-scalar value assert client.json().arrindex( @@ -1494,7 +1485,7 @@ def test_custom_decoder(client): cj = client.json(encoder=ujson, decoder=ujson) assert cj.set("foo", Path.root_path(), "bar") - assert_resp_response(client, cj.get("foo"), "bar", [["bar"]]) + assert cj.get("foo") == "bar" assert cj.get("baz") is None assert 1 == cj.delete("foo") assert client.exists("foo") == 0 @@ -1516,7 +1507,7 @@ def test_set_file(client): nojsonfile.write(b"Hello World") assert client.json().set_file("test", Path.root_path(), jsonfile.name) - assert_resp_response(client, client.json().get("test"), obj, [[obj]]) + assert client.json().get("test") == obj with pytest.raises(json.JSONDecodeError): client.json().set_file("test2", Path.root_path(), nojsonfile.name) @@ -1539,6 +1530,4 @@ def test_set_path(client): result = {jsonfile: True, nojsonfile: False} assert client.json().set_path(Path.root_path(), root) == result res = {"hello": "world"} - assert_resp_response( - client, client.json().get(jsonfile.rsplit(".")[0]), res, [[res]] - ) + assert client.json().get(jsonfile.rsplit(".")[0]) == res diff --git a/tests/test_search.py b/tests/test_search.py index 6b40c5129d..2be7d6356f 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -2278,7 +2278,19 @@ def test_geoshape(client: redis.Redis): q2 = Query("@geom:[CONTAINS $poly]").dialect(3) qp2 = {"poly": "POLYGON((2 2, 2 50, 50 50, 50 2, 2 2))"} result = client.ft().search(q1, query_params=qp1) - assert len(result.docs) == 1 - assert result.docs[0]["id"] == "small" + _assert_geosearch_result(client, result, ["small"]) result = client.ft().search(q2, query_params=qp2) - assert len(result.docs) == 2 + _assert_geosearch_result(client, result, ["small", "large"]) + + +def _assert_geosearch_result(client, result, expected_doc_ids): + """ + Make sure the result of a geo search is as expected, taking into account the RESP + version being used. + """ + if is_resp2_connection(client): + assert set([doc.id for doc in result.docs]) == set(expected_doc_ids) + assert result.total == len(expected_doc_ids) + else: + assert set([doc["id"] for doc in result["results"]]) == set(expected_doc_ids) + assert result["total_results"] == len(expected_doc_ids) From 3b7f6bf48c26d2565039198502064a2f76e051d5 Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Tue, 11 Jun 2024 12:39:51 +0300 Subject: [PATCH 03/16] Don't run command parser tests with HIREDIS --- tests/test_command_parser.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_command_parser.py b/tests/test_command_parser.py index e3b44a147f..5c23a7096f 100644 --- a/tests/test_command_parser.py +++ b/tests/test_command_parser.py @@ -1,5 +1,6 @@ import pytest from redis._parsers import CommandsParser +from redis.utils import HIREDIS_AVAILABLE from .conftest import ( assert_resp_response, @@ -8,6 +9,9 @@ ) +# The response to COMMAND contains maps inside sets, which are not handled +# by the hiredis-py parser (see https://github.com/redis/hiredis-py/issues/188) +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") class TestCommandsParser: def test_init_commands(self, r): commands_parser = CommandsParser(r) From a86b72e6a426a0ef2debefec294f760cb3b8090b Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Tue, 11 Jun 2024 13:04:59 +0300 Subject: [PATCH 04/16] More hiredis-py tests to ignore --- tests/test_commands.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_commands.py b/tests/test_commands.py index 07466b61a1..62e2d2e832 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -18,6 +18,7 @@ parse_info, ) from redis.client import EMPTY_RESPONSE, NEVER_DECODE +from redis.utils import HIREDIS_AVAILABLE from .conftest import ( _get_client, @@ -5002,6 +5003,9 @@ def test_command_getkeys(self, r): r, res, ["key1", "key2", "key3"], [b"key1", b"key2", b"key3"] ) + # The response to COMMAND contains maps inside sets, which are not handled + # by the hiredis-py parser (see https://github.com/redis/hiredis-py/issues/188) + @pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @skip_if_server_version_lt("2.8.13") def test_command(self, r): res = r.command() From 3d54cb14a08652d1eb7587ddc4c6db5f3be1d4aa Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Tue, 11 Jun 2024 15:12:05 +0300 Subject: [PATCH 05/16] Don't run hiredis + RESP3 + cluster tests --- .github/workflows/integration.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index a6c0f0ccb5..67a02fed7b 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -113,6 +113,9 @@ jobs: python-version: ['3.8', '3.11'] test-type: ['standalone', 'cluster'] connection-type: ['hiredis', 'plain'] + exclude: + - test-type: 'cluster' + connection-type: 'hiredis' env: ACTIONS_ALLOW_UNSECURE_COMMANDS: true name: RESP3 [${{ matrix.python-version }} ${{matrix.test-type}}-${{matrix.connection-type}}] From 33366d9b4ca0e40d6eadcd5981d9a660d015d3c9 Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Tue, 11 Jun 2024 15:22:06 +0300 Subject: [PATCH 06/16] Fix CI syntax --- .github/workflows/integration.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index 67a02fed7b..8a54576a26 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -113,9 +113,9 @@ jobs: python-version: ['3.8', '3.11'] test-type: ['standalone', 'cluster'] connection-type: ['hiredis', 'plain'] - exclude: - - test-type: 'cluster' - connection-type: 'hiredis' + exclude: + - test-type: 'cluster' + connection-type: 'hiredis' env: ACTIONS_ALLOW_UNSECURE_COMMANDS: true name: RESP3 [${{ matrix.python-version }} ${{matrix.test-type}}-${{matrix.connection-type}}] From 030869e97a70ef05f30dab123d6e85ea83202d52 Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Tue, 11 Jun 2024 18:45:16 +0300 Subject: [PATCH 07/16] Skip more tests when hiredis --- tests/test_asyncio/test_bloom.py | 10 ++++++++++ tests/test_bloom.py | 10 ++++++++++ 2 files changed, 20 insertions(+) diff --git a/tests/test_asyncio/test_bloom.py b/tests/test_asyncio/test_bloom.py index c63559a31c..f2cae13a70 100644 --- a/tests/test_asyncio/test_bloom.py +++ b/tests/test_asyncio/test_bloom.py @@ -42,6 +42,8 @@ async def test_tdigest_create(decoded_r: redis.Redis): assert await decoded_r.tdigest().create("tDigest", 100) +# hiredis-py can't process well boolean responses +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod async def test_bf_add(decoded_r: redis.Redis): assert await decoded_r.bf().create("bloom", 0.01, 1000) @@ -55,6 +57,8 @@ async def test_bf_add(decoded_r: redis.Redis): assert [1, 0] == intlist(await decoded_r.bf().mexists("bloom", "foo", "noexist")) +# hiredis-py can't process well boolean responses +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod async def test_bf_insert(decoded_r: redis.Redis): assert await decoded_r.bf().create("bloom", 0.01, 1000) @@ -86,6 +90,8 @@ async def test_bf_insert(decoded_r: redis.Redis): ) +# hiredis-py can't process well boolean responses +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod async def test_bf_scandump_and_loadchunk(decoded_r: redis.Redis): # Store a filter @@ -185,6 +191,8 @@ async def test_bf_card(decoded_r: redis.Redis): await decoded_r.bf().card("setKey") +# hiredis-py can't process well boolean responses +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod async def test_cf_add_and_insert(decoded_r: redis.Redis): assert await decoded_r.cf().create("cuckoo", 1000) @@ -211,6 +219,8 @@ async def test_cf_add_and_insert(decoded_r: redis.Redis): ) +# hiredis-py can't process well boolean responses +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod async def test_cf_exists_and_del(decoded_r: redis.Redis): assert await decoded_r.cf().create("cuckoo", 1000) diff --git a/tests/test_bloom.py b/tests/test_bloom.py index d1a0484225..af8efd5097 100644 --- a/tests/test_bloom.py +++ b/tests/test_bloom.py @@ -73,6 +73,8 @@ def test_tdigest_create(client): assert client.tdigest().create("tDigest", 100) +# hiredis-py can't process well boolean responses +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod def test_bf_add(client): assert client.bf().create("bloom", 0.01, 1000) @@ -86,6 +88,8 @@ def test_bf_add(client): assert [1, 0] == intlist(client.bf().mexists("bloom", "foo", "noexist")) +# hiredis-py can't process well boolean responses +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod def test_bf_insert(client): assert client.bf().create("bloom", 0.01, 1000) @@ -117,6 +121,8 @@ def test_bf_insert(client): ) +# hiredis-py can't process well boolean responses +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod def test_bf_scandump_and_loadchunk(client): # Store a filter @@ -216,6 +222,8 @@ def test_bf_card(client): client.bf().card("setKey") +# hiredis-py can't process well boolean responses +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod def test_cf_add_and_insert(client): assert client.cf().create("cuckoo", 1000) @@ -242,6 +250,8 @@ def test_cf_add_and_insert(client): ) +# hiredis-py can't process well boolean responses +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod def test_cf_exists_and_del(client): assert client.cf().create("cuckoo", 1000) From a9dabf9138267b262232db61375e01a6e8a4564b Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Wed, 12 Jun 2024 08:42:24 +0300 Subject: [PATCH 08/16] Fix handling of no data in hiredis-py Use a sentinel object instance to signal lack of data in hiredis-py, instead of piggybacking of `False`, which can also be returned by parsing valid RESP payloads. --- redis/_parsers/hiredis.py | 25 ++++++++++++++++--------- tests/test_asyncio/test_bloom.py | 10 ---------- tests/test_bloom.py | 10 ---------- 3 files changed, 16 insertions(+), 29 deletions(-) diff --git a/redis/_parsers/hiredis.py b/redis/_parsers/hiredis.py index a52dbbd013..c807bd903a 100644 --- a/redis/_parsers/hiredis.py +++ b/redis/_parsers/hiredis.py @@ -19,6 +19,11 @@ SERVER_CLOSED_CONNECTION_ERROR, ) +# Used to signal that hiredis-py does not have enough data to parse. +# Using `False` or `None` is not reliable, given that the parser can +# return `False` or `None` for legitimate reasons from RESP payloads. +NOT_ENOUGH_DATA = object() + class _HiredisReaderArgs(TypedDict, total=False): protocolError: Callable[[str], Exception] @@ -51,25 +56,26 @@ def on_connect(self, connection, **kwargs): "protocolError": InvalidResponse, "replyError": self.parse_error, "errors": connection.encoder.encoding_errors, + "notEnoughData": NOT_ENOUGH_DATA, } if connection.encoder.decode_responses: kwargs["encoding"] = connection.encoder.encoding self._reader = hiredis.Reader(**kwargs) - self._next_response = False + self._next_response = NOT_ENOUGH_DATA def on_disconnect(self): self._sock = None self._reader = None - self._next_response = False + self._next_response = NOT_ENOUGH_DATA def can_read(self, timeout): if not self._reader: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) - if self._next_response is False: + if self._next_response is NOT_ENOUGH_DATA: self._next_response = self._reader.gets() - if self._next_response is False: + if self._next_response is NOT_ENOUGH_DATA: return self.read_from_socket(timeout=timeout, raise_on_timeout=False) return True @@ -108,9 +114,9 @@ def read_response(self, disable_decoding=False): raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) # _next_response might be cached from a can_read() call - if self._next_response is not False: + if self._next_response is not NOT_ENOUGH_DATA: response = self._next_response - self._next_response = False + self._next_response = NOT_ENOUGH_DATA return response if disable_decoding: @@ -118,7 +124,7 @@ def read_response(self, disable_decoding=False): else: response = self._reader.gets() - while response is False: + while response is NOT_ENOUGH_DATA: self.read_from_socket() if disable_decoding: response = self._reader.gets(False) @@ -156,6 +162,7 @@ def on_connect(self, connection): kwargs: _HiredisReaderArgs = { "protocolError": InvalidResponse, "replyError": self.parse_error, + "notEnoughData": NOT_ENOUGH_DATA, } if connection.encoder.decode_responses: kwargs["encoding"] = connection.encoder.encoding @@ -170,7 +177,7 @@ def on_disconnect(self): async def can_read_destructive(self): if not self._connected: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) - if self._reader.gets(): + if self._reader.gets() is not NOT_ENOUGH_DATA: return True try: async with async_timeout(0): @@ -200,7 +207,7 @@ async def read_response( response = self._reader.gets(False) else: response = self._reader.gets() - while response is False: + while response is NOT_ENOUGH_DATA: await self.read_from_socket() if disable_decoding: response = self._reader.gets(False) diff --git a/tests/test_asyncio/test_bloom.py b/tests/test_asyncio/test_bloom.py index f2cae13a70..c63559a31c 100644 --- a/tests/test_asyncio/test_bloom.py +++ b/tests/test_asyncio/test_bloom.py @@ -42,8 +42,6 @@ async def test_tdigest_create(decoded_r: redis.Redis): assert await decoded_r.tdigest().create("tDigest", 100) -# hiredis-py can't process well boolean responses -@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod async def test_bf_add(decoded_r: redis.Redis): assert await decoded_r.bf().create("bloom", 0.01, 1000) @@ -57,8 +55,6 @@ async def test_bf_add(decoded_r: redis.Redis): assert [1, 0] == intlist(await decoded_r.bf().mexists("bloom", "foo", "noexist")) -# hiredis-py can't process well boolean responses -@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod async def test_bf_insert(decoded_r: redis.Redis): assert await decoded_r.bf().create("bloom", 0.01, 1000) @@ -90,8 +86,6 @@ async def test_bf_insert(decoded_r: redis.Redis): ) -# hiredis-py can't process well boolean responses -@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod async def test_bf_scandump_and_loadchunk(decoded_r: redis.Redis): # Store a filter @@ -191,8 +185,6 @@ async def test_bf_card(decoded_r: redis.Redis): await decoded_r.bf().card("setKey") -# hiredis-py can't process well boolean responses -@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod async def test_cf_add_and_insert(decoded_r: redis.Redis): assert await decoded_r.cf().create("cuckoo", 1000) @@ -219,8 +211,6 @@ async def test_cf_add_and_insert(decoded_r: redis.Redis): ) -# hiredis-py can't process well boolean responses -@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod async def test_cf_exists_and_del(decoded_r: redis.Redis): assert await decoded_r.cf().create("cuckoo", 1000) diff --git a/tests/test_bloom.py b/tests/test_bloom.py index af8efd5097..d1a0484225 100644 --- a/tests/test_bloom.py +++ b/tests/test_bloom.py @@ -73,8 +73,6 @@ def test_tdigest_create(client): assert client.tdigest().create("tDigest", 100) -# hiredis-py can't process well boolean responses -@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod def test_bf_add(client): assert client.bf().create("bloom", 0.01, 1000) @@ -88,8 +86,6 @@ def test_bf_add(client): assert [1, 0] == intlist(client.bf().mexists("bloom", "foo", "noexist")) -# hiredis-py can't process well boolean responses -@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod def test_bf_insert(client): assert client.bf().create("bloom", 0.01, 1000) @@ -121,8 +117,6 @@ def test_bf_insert(client): ) -# hiredis-py can't process well boolean responses -@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod def test_bf_scandump_and_loadchunk(client): # Store a filter @@ -222,8 +216,6 @@ def test_bf_card(client): client.bf().card("setKey") -# hiredis-py can't process well boolean responses -@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod def test_cf_add_and_insert(client): assert client.cf().create("cuckoo", 1000) @@ -250,8 +242,6 @@ def test_cf_add_and_insert(client): ) -# hiredis-py can't process well boolean responses -@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.redismod def test_cf_exists_and_del(client): assert client.cf().create("cuckoo", 1000) From 6f9f78682ab5bcd8e1e74fbc60e367b3d96bbdb7 Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Wed, 12 Jun 2024 10:05:43 +0300 Subject: [PATCH 09/16] Fix logic around reading more push messages --- redis/_parsers/resp3.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/redis/_parsers/resp3.py b/redis/_parsers/resp3.py index 7afa43a0c2..d221a2588a 100644 --- a/redis/_parsers/resp3.py +++ b/redis/_parsers/resp3.py @@ -132,7 +132,7 @@ def handle_push_response(self, response, disable_decoding, push_request): res = self.invalidation_push_handler_func(response) else: res = self.pubsub_push_handler_func(response) - if not push_request: + if push_request: return self._read_response( disable_decoding=disable_decoding, push_request=push_request ) @@ -276,7 +276,7 @@ async def handle_push_response(self, response, disable_decoding, push_request): res = self.invalidation_push_handler_func(response) else: res = self.pubsub_push_handler_func(response) - if not push_request: + if push_request: return await self._read_response( disable_decoding=disable_decoding, push_request=push_request ) From e8a81a22440378489e40186445ec7b5da8a798bf Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Fri, 14 Jun 2024 09:57:22 +0300 Subject: [PATCH 10/16] Adapt some tests --- tests/test_asyncio/test_json.py | 4 ++-- tests/test_json.py | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/test_asyncio/test_json.py b/tests/test_asyncio/test_json.py index 34f8d401b8..150c967b54 100644 --- a/tests/test_asyncio/test_json.py +++ b/tests/test_asyncio/test_json.py @@ -523,7 +523,7 @@ async def test_numby_commands_dollar(decoded_r: redis.Redis): await decoded_r.json().set( "doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]} ) - assert await decoded_r.json().numincrby("doc1", ".b[0].a", 3) == 5 + assert await decoded_r.json().numincrby("doc1", ".b[0].a", 3) == [5] # Test legacy NUMMULTBY await decoded_r.json().set( @@ -531,7 +531,7 @@ async def test_numby_commands_dollar(decoded_r: redis.Redis): ) with pytest.deprecated_call(): - assert await decoded_r.json().nummultby("doc1", ".b[0].a", 3) == 6 + assert await decoded_r.json().nummultby("doc1", ".b[0].a", 3) == [6] @pytest.mark.redismod diff --git a/tests/test_json.py b/tests/test_json.py index 769d473691..ea2d82de50 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -512,13 +512,13 @@ def test_numby_commands_dollar(client): # Test legacy NUMINCRBY client.json().set("doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]}) - assert client.json().numincrby("doc1", ".b[0].a", 3) == 5 + assert client.json().numincrby("doc1", ".b[0].a", 3) == [5] # Test legacy NUMMULTBY client.json().set("doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]}) with pytest.deprecated_call(): - assert client.json().nummultby("doc1", ".b[0].a", 3) == 6 + assert client.json().nummultby("doc1", ".b[0].a", 3) == [6] @pytest.mark.redismod @@ -530,13 +530,13 @@ def test_strappend_dollar(client): assert client.json().strappend("doc1", "bar", "$..a") == [6, 8, None] res = [{"a": "foobar", "nested1": {"a": "hellobar"}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert_resp_response(client, client.json().get("doc1", "$"), res, res) # Test single assert client.json().strappend("doc1", "baz", "$.nested1.a") == [11] res = [{"a": "foobar", "nested1": {"a": "hellobarbaz"}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert_resp_response(client, client.json().get("doc1", "$"), res, res) # Test missing key with pytest.raises(exceptions.ResponseError): @@ -545,7 +545,7 @@ def test_strappend_dollar(client): # Test multi assert client.json().strappend("doc1", "bar", ".*.a") == 14 res = [{"a": "foobar", "nested1": {"a": "hellobarbazbar"}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert_resp_response(client, client.json().get("doc1", "$"), res, res) # Test missing path with pytest.raises(exceptions.ResponseError): From 00a5609f5cd5f961f43d5459ebd0871ce3d730f5 Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Fri, 14 Jun 2024 10:14:56 +0300 Subject: [PATCH 11/16] Adapt some tests --- tests/test_asyncio/test_json.py | 8 ++++++-- tests/test_json.py | 6 ++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/tests/test_asyncio/test_json.py b/tests/test_asyncio/test_json.py index 150c967b54..507afc5621 100644 --- a/tests/test_asyncio/test_json.py +++ b/tests/test_asyncio/test_json.py @@ -523,7 +523,9 @@ async def test_numby_commands_dollar(decoded_r: redis.Redis): await decoded_r.json().set( "doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]} ) - assert await decoded_r.json().numincrby("doc1", ".b[0].a", 3) == [5] + assert_resp_response( + decoded_r, await decoded_r.json().numincrby("doc1", ".b[0].a", 3), 5, [5] + ) # Test legacy NUMMULTBY await decoded_r.json().set( @@ -531,7 +533,9 @@ async def test_numby_commands_dollar(decoded_r: redis.Redis): ) with pytest.deprecated_call(): - assert await decoded_r.json().nummultby("doc1", ".b[0].a", 3) == [6] + assert_resp_response( + decoded_r, await decoded_r.json().nummultby("doc1", ".b[0].a", 3), 6, [6] + ) @pytest.mark.redismod diff --git a/tests/test_json.py b/tests/test_json.py index ea2d82de50..a688464874 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -512,13 +512,15 @@ def test_numby_commands_dollar(client): # Test legacy NUMINCRBY client.json().set("doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]}) - assert client.json().numincrby("doc1", ".b[0].a", 3) == [5] + assert_resp_response(client, client.json().numincrby("doc1", ".b[0].a", 3), 5, [5]) # Test legacy NUMMULTBY client.json().set("doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]}) with pytest.deprecated_call(): - assert client.json().nummultby("doc1", ".b[0].a", 3) == [6] + assert_resp_response( + client, client.json().nummultby("doc1", ".b[0].a", 3), 6, [6] + ) @pytest.mark.redismod From bf03eb63e735fa049c1c89d5861574e4621ded1e Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Tue, 18 Jun 2024 15:17:21 +0300 Subject: [PATCH 12/16] Rename variable for clarity --- redis/_parsers/resp3.py | 58 ++++++++++++++++++++++--------------- redis/asyncio/client.py | 2 +- redis/asyncio/connection.py | 10 ++++--- redis/asyncio/sentinel.py | 4 +-- redis/client.py | 4 ++- redis/connection.py | 7 +++-- redis/sentinel.py | 4 +-- 7 files changed, 52 insertions(+), 37 deletions(-) diff --git a/redis/_parsers/resp3.py b/redis/_parsers/resp3.py index d221a2588a..c9b099c95d 100644 --- a/redis/_parsers/resp3.py +++ b/redis/_parsers/resp3.py @@ -22,11 +22,12 @@ def handle_pubsub_push_response(self, response): logger.info("Push response: " + str(response)) return response - def read_response(self, disable_decoding=False, push_request=False): + def read_response(self, disable_decoding=False, read_single_push_response=False): pos = self._buffer.get_pos() if self._buffer else None try: result = self._read_response( - disable_decoding=disable_decoding, push_request=push_request + disable_decoding=disable_decoding, + read_single_push_response=read_single_push_response, ) except BaseException: if self._buffer: @@ -36,7 +37,7 @@ def read_response(self, disable_decoding=False, push_request=False): self._buffer.purge() return result - def _read_response(self, disable_decoding=False, push_request=False): + def _read_response(self, disable_decoding=False, read_single_push_response=False): raw = self._buffer.readline() if not raw: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) @@ -106,19 +107,21 @@ def _read_response(self, disable_decoding=False, push_request=False): for _ in range(int(response)): key = self._read_response(disable_decoding=disable_decoding) resp_dict[key] = self._read_response( - disable_decoding=disable_decoding, push_request=push_request + disable_decoding=disable_decoding, + read_single_push_response=read_single_push_response, ) response = resp_dict # push response elif byte == b">": response = [ self._read_response( - disable_decoding=disable_decoding, push_request=push_request + disable_decoding=disable_decoding, + read_single_push_response=read_single_push_response, ) for _ in range(int(response)) ] response = self.handle_push_response( - response, disable_decoding, push_request + response, disable_decoding, read_single_push_response ) else: raise InvalidResponse(f"Protocol Error: {raw!r}") @@ -127,17 +130,19 @@ def _read_response(self, disable_decoding=False, push_request=False): response = self.encoder.decode(response) return response - def handle_push_response(self, response, disable_decoding, push_request): + def handle_push_response( + self, response, disable_decoding, read_single_push_response + ): if response[0] in _INVALIDATION_MESSAGE: res = self.invalidation_push_handler_func(response) else: res = self.pubsub_push_handler_func(response) - if push_request: - return self._read_response( - disable_decoding=disable_decoding, push_request=push_request - ) - else: + if read_single_push_response: return res + return self._read_response( + disable_decoding=disable_decoding, + read_single_push_response=read_single_push_response, + ) def set_pubsub_push_handler(self, pubsub_push_handler_func): self.pubsub_push_handler_func = pubsub_push_handler_func @@ -158,7 +163,7 @@ def handle_pubsub_push_response(self, response): return response async def read_response( - self, disable_decoding: bool = False, push_request: bool = False + self, disable_decoding: bool = False, read_single_push_response: bool = False ): if self._chunks: # augment parsing buffer with previously read data @@ -166,14 +171,15 @@ async def read_response( self._chunks.clear() self._pos = 0 response = await self._read_response( - disable_decoding=disable_decoding, push_request=push_request + disable_decoding=disable_decoding, + read_single_push_response=read_single_push_response, ) # Successfully parsing a response allows us to clear our parsing buffer self._clear() return response async def _read_response( - self, disable_decoding: bool = False, push_request: bool = False + self, disable_decoding: bool = False, read_single_push_response: bool = False ) -> Union[EncodableT, ResponseError, None]: if not self._stream or not self.encoder: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) @@ -248,7 +254,8 @@ async def _read_response( for _ in range(int(response)): key = await self._read_response(disable_decoding=disable_decoding) resp_dict[key] = await self._read_response( - disable_decoding=disable_decoding, push_request=push_request + disable_decoding=disable_decoding, + read_single_push_response=read_single_push_response, ) response = resp_dict # push response @@ -256,13 +263,14 @@ async def _read_response( response = [ ( await self._read_response( - disable_decoding=disable_decoding, push_request=push_request + disable_decoding=disable_decoding, + read_single_push_response=read_single_push_response, ) ) for _ in range(int(response)) ] response = await self.handle_push_response( - response, disable_decoding, push_request + response, disable_decoding, read_single_push_response ) else: raise InvalidResponse(f"Protocol Error: {raw!r}") @@ -271,17 +279,19 @@ async def _read_response( response = self.encoder.decode(response) return response - async def handle_push_response(self, response, disable_decoding, push_request): + async def handle_push_response( + self, response, disable_decoding, read_single_push_response + ): if response[0] in _INVALIDATION_MESSAGE: res = self.invalidation_push_handler_func(response) else: res = self.pubsub_push_handler_func(response) - if push_request: - return await self._read_response( - disable_decoding=disable_decoding, push_request=push_request - ) - else: + if read_single_push_response: return res + return await self._read_response( + disable_decoding=disable_decoding, + read_single_push_response=read_single_push_response, + ) def set_pubsub_push_handler(self, pubsub_push_handler_func): self.pubsub_push_handler_func = pubsub_push_handler_func diff --git a/redis/asyncio/client.py b/redis/asyncio/client.py index 1845b7252f..dbf725b1f7 100644 --- a/redis/asyncio/client.py +++ b/redis/asyncio/client.py @@ -966,7 +966,7 @@ async def parse_response(self, block: bool = True, timeout: float = 0): conn.read_response, timeout=read_timeout, disconnect_on_error=False, - push_request=True, + read_single_push_response=True, ) if conn.health_check_interval and response in self.health_check_response: diff --git a/redis/asyncio/connection.py b/redis/asyncio/connection.py index 96f18876ea..5951570442 100644 --- a/redis/asyncio/connection.py +++ b/redis/asyncio/connection.py @@ -551,7 +551,7 @@ async def read_response( timeout: Optional[float] = None, *, disconnect_on_error: bool = True, - push_request: Optional[bool] = False, + read_single_push_response: Optional[bool] = False, ): """Read the response from a previously sent command""" read_timeout = timeout if timeout is not None else self.socket_timeout @@ -564,7 +564,8 @@ async def read_response( ): async with async_timeout(read_timeout): response = await self._parser.read_response( - disable_decoding=disable_decoding, push_request=push_request + disable_decoding=disable_decoding, + read_single_push_response=read_single_push_response, ) elif read_timeout is not None: async with async_timeout(read_timeout): @@ -573,7 +574,8 @@ async def read_response( ) elif self.protocol in ["3", 3] and not HIREDIS_AVAILABLE: response = await self._parser.read_response( - disable_decoding=disable_decoding, push_request=push_request + disable_decoding=disable_decoding, + read_single_push_response=read_single_push_response, ) else: response = await self._parser.read_response( @@ -713,7 +715,7 @@ async def _get_from_local_cache(self, command: str): ): return None while not self._socket_is_empty(): - await self.read_response(push_request=True) + await self.read_response(read_single_push_response=True) return self.client_cache.get(command) def _add_to_local_cache( diff --git a/redis/asyncio/sentinel.py b/redis/asyncio/sentinel.py index 6fd233adc8..a0207b0fd1 100644 --- a/redis/asyncio/sentinel.py +++ b/redis/asyncio/sentinel.py @@ -72,14 +72,14 @@ async def read_response( timeout: Optional[float] = None, *, disconnect_on_error: Optional[float] = True, - push_request: Optional[bool] = False, + read_single_push_response: Optional[bool] = False, ): try: return await super().read_response( disable_decoding=disable_decoding, timeout=timeout, disconnect_on_error=disconnect_on_error, - push_request=push_request, + read_single_push_response=read_single_push_response, ) except ReadOnlyError: if self.connection_pool.is_master: diff --git a/redis/client.py b/redis/client.py index b7a1f88d92..b8a56ed9ee 100755 --- a/redis/client.py +++ b/redis/client.py @@ -880,7 +880,9 @@ def try_read(): return None else: conn.connect() - return conn.read_response(disconnect_on_error=False, push_request=True) + return conn.read_response( + disconnect_on_error=False, read_single_push_response=True + ) response = self._execute(conn, try_read) diff --git a/redis/connection.py b/redis/connection.py index f745ecc1d5..6d29422d37 100644 --- a/redis/connection.py +++ b/redis/connection.py @@ -533,7 +533,7 @@ def read_response( disable_decoding=False, *, disconnect_on_error=True, - push_request=False, + read_single_push_response=False, ): """Read the response from a previously sent command""" @@ -542,7 +542,8 @@ def read_response( try: if self.protocol in ["3", 3] and not HIREDIS_AVAILABLE: response = self._parser.read_response( - disable_decoding=disable_decoding, push_request=push_request + disable_decoding=disable_decoding, + read_single_push_response=read_single_push_response, ) else: response = self._parser.read_response(disable_decoding=disable_decoding) @@ -634,7 +635,7 @@ def _get_from_local_cache(self, command: Sequence[str]): ): return None while self.can_read(): - self.read_response(push_request=True) + self.read_response(read_single_push_response=True) return self.client_cache.get(command) def _add_to_local_cache( diff --git a/redis/sentinel.py b/redis/sentinel.py index 72b5bef548..dfa276d054 100644 --- a/redis/sentinel.py +++ b/redis/sentinel.py @@ -62,13 +62,13 @@ def read_response( disable_decoding=False, *, disconnect_on_error: Optional[bool] = False, - push_request: Optional[bool] = False, + read_single_push_response: Optional[bool] = False, ): try: return super().read_response( disable_decoding=disable_decoding, disconnect_on_error=disconnect_on_error, - push_request=push_request, + read_single_push_response=read_single_push_response, ) except ReadOnlyError: if self.connection_pool.is_master: From e4cc54d023efa145bcaf2e7c3d2e9b7f67a390ea Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Tue, 18 Jun 2024 17:39:20 +0300 Subject: [PATCH 13/16] Adapt asserts for timeseries tests --- tests/test_asyncio/test_timeseries.py | 31 ++++++++++-------- tests/test_timeseries.py | 47 +++++++++++++++++---------- 2 files changed, 46 insertions(+), 32 deletions(-) diff --git a/tests/test_asyncio/test_timeseries.py b/tests/test_asyncio/test_timeseries.py index c93af1ea5b..0475c318ec 100644 --- a/tests/test_asyncio/test_timeseries.py +++ b/tests/test_asyncio/test_timeseries.py @@ -780,8 +780,12 @@ async def test_create_with_insertion_filters(decoded_r: redis.Redis): assert 1021 == await decoded_r.ts().add("time-series-1", 1021, 22.0) data_points = await decoded_r.ts().range("time-series-1", "-", "+") - expected_points = [(1000, 1.0), (1010, 11.0), (1020, 11.5), (1021, 22.0)] - assert expected_points == data_points + assert_resp_response( + decoded_r, + data_points, + [(1000, 1.0), (1010, 11.0), (1020, 11.5), (1021, 22.0)], + [[1000, 1.0], [1010, 11.0], [1020, 11.5], [1021, 22.0]], + ) @skip_ifmodversion_lt("1.12.0", "timeseries") @@ -800,8 +804,12 @@ async def test_alter_with_insertion_filters(decoded_r: redis.Redis): assert 1013 == await decoded_r.ts().add("time-series-1", 1015, 11.5) data_points = await decoded_r.ts().range("time-series-1", "-", "+") - expected_points = [(1000, 1.0), (1010, 11.0), (1013, 10.0)] - assert expected_points == data_points + assert_resp_response( + decoded_r, + data_points, + [(1000, 1.0), (1010, 11.0), (1013, 10.0)], + [[1000, 1.0], [1010, 11.0], [1013, 10.0]], + ) @skip_ifmodversion_lt("1.12.0", "timeseries") @@ -818,8 +826,7 @@ async def test_add_with_insertion_filters(decoded_r: redis.Redis): assert 1000 == await decoded_r.ts().add("time-series-1", 1004, 3.0) data_points = await decoded_r.ts().range("time-series-1", "-", "+") - expected_points = [(1000, 1.0)] - assert expected_points == data_points + assert_resp_response(decoded_r, data_points, [(1000, 1.0)], [[1000, 1.0]]) @skip_ifmodversion_lt("1.12.0", "timeseries") @@ -836,14 +843,12 @@ async def test_incrby_with_insertion_filters(decoded_r: redis.Redis): assert 1000 == await decoded_r.ts().incrby("time-series-1", 3.0, timestamp=1000) data_points = await decoded_r.ts().range("time-series-1", "-", "+") - expected_points = [(1000, 1.0)] - assert expected_points == data_points + assert_resp_response(decoded_r, data_points, [(1000, 1.0)], [[1000, 1.0]]) assert 1000 == await decoded_r.ts().incrby("time-series-1", 10.1, timestamp=1000) data_points = await decoded_r.ts().range("time-series-1", "-", "+") - expected_points = [(1000, 11.1)] - assert expected_points == data_points + assert_resp_response(decoded_r, data_points, [(1000, 11.1)], [[1000, 11.1]]) @skip_ifmodversion_lt("1.12.0", "timeseries") @@ -860,11 +865,9 @@ async def test_decrby_with_insertion_filters(decoded_r: redis.Redis): assert 1000 == await decoded_r.ts().decrby("time-series-1", 3.0, timestamp=1000) data_points = await decoded_r.ts().range("time-series-1", "-", "+") - expected_points = [(1000, -1.0)] - assert expected_points == data_points + assert_resp_response(decoded_r, data_points, [(1000, -1.0)], [[1000, -1.0]]) assert 1000 == await decoded_r.ts().decrby("time-series-1", 10.1, timestamp=1000) data_points = await decoded_r.ts().range("time-series-1", "-", "+") - expected_points = [(1000, -11.1)] - assert expected_points == data_points + assert_resp_response(decoded_r, data_points, [(1000, -11.1)], [[1000, -11.1]]) diff --git a/tests/test_timeseries.py b/tests/test_timeseries.py index 5647bd45c6..95402a1cee 100644 --- a/tests/test_timeseries.py +++ b/tests/test_timeseries.py @@ -1039,8 +1039,12 @@ def test_create_with_insertion_filters(client): assert 1021 == client.ts().add("time-series-1", 1021, 22.0) data_points = client.ts().range("time-series-1", "-", "+") - expected_points = [(1000, 1.0), (1010, 11.0), (1020, 11.5), (1021, 22.0)] - assert expected_points == data_points + assert_resp_response( + client, + data_points, + [(1000, 1.0), (1010, 11.0), (1020, 11.5), (1021, 22.0)], + [[1000, 1.0], [1010, 11.0], [1020, 11.5], [1021, 22.0]], + ) @skip_ifmodversion_lt("1.12.0", "timeseries") @@ -1056,8 +1060,12 @@ def test_create_with_insertion_filters_other_duplicate_policy(client): assert 1013 == client.ts().add("time-series-1", 1013, 10.0) data_points = client.ts().range("time-series-1", "-", "+") - expected_points = [(1000, 1.0), (1010, 11.0), (1013, 10)] - assert expected_points == data_points + assert_resp_response( + client, + data_points, + [(1000, 1.0), (1010, 11.0), (1013, 10)], + [[1000, 1.0], [1010, 11.0], [1013, 10]], + ) @skip_ifmodversion_lt("1.12.0", "timeseries") @@ -1076,8 +1084,12 @@ def test_alter_with_insertion_filters(client): assert 1013 == client.ts().add("time-series-1", 1015, 11.5) data_points = client.ts().range("time-series-1", "-", "+") - expected_points = [(1000, 1.0), (1010, 11.0), (1013, 10.0)] - assert expected_points == data_points + assert_resp_response( + client, + data_points, + [(1000, 1.0), (1010, 11.0), (1013, 10.0)], + [[1000, 1.0], [1010, 11.0], [1013, 10.0]], + ) @skip_ifmodversion_lt("1.12.0", "timeseries") @@ -1094,8 +1106,7 @@ def test_add_with_insertion_filters(client): assert 1000 == client.ts().add("time-series-1", 1004, 3.0) data_points = client.ts().range("time-series-1", "-", "+") - expected_points = [(1000, 1.0)] - assert expected_points == data_points + assert_resp_response(client, data_points, [(1000, 1.0)], [[1000, 1.0]]) @skip_ifmodversion_lt("1.12.0", "timeseries") @@ -1112,14 +1123,12 @@ def test_incrby_with_insertion_filters(client): assert 1000 == client.ts().incrby("time-series-1", 3.0, timestamp=1000) data_points = client.ts().range("time-series-1", "-", "+") - expected_points = [(1000, 1.0)] - assert expected_points == data_points + assert_resp_response(client, data_points, [(1000, 1.0)], [[1000, 1.0]]) assert 1000 == client.ts().incrby("time-series-1", 10.1, timestamp=1000) data_points = client.ts().range("time-series-1", "-", "+") - expected_points = [(1000, 11.1)] - assert expected_points == data_points + assert_resp_response(client, data_points, [(1000, 11.1)], [[1000, 11.1]]) @skip_ifmodversion_lt("1.12.0", "timeseries") @@ -1136,14 +1145,12 @@ def test_decrby_with_insertion_filters(client): assert 1000 == client.ts().decrby("time-series-1", 3.0, timestamp=1000) data_points = client.ts().range("time-series-1", "-", "+") - expected_points = [(1000, -1.0)] - assert expected_points == data_points + assert_resp_response(client, data_points, [(1000, -1.0)], [[1000, -1.0]]) assert 1000 == client.ts().decrby("time-series-1", 10.1, timestamp=1000) data_points = client.ts().range("time-series-1", "-", "+") - expected_points = [(1000, -11.1)] - assert expected_points == data_points + assert_resp_response(client, data_points, [(1000, -11.1)], [[1000, -11.1]]) @skip_ifmodversion_lt("1.12.0", "timeseries") @@ -1165,5 +1172,9 @@ def test_madd_with_insertion_filters(client): ) data_points = client.ts().range("time-series-1", "-", "+") - expected_points = [(1010, 1.0), (1020, 2.0), (1021, 22.0)] - assert expected_points == data_points + assert_resp_response( + client, + data_points, + [(1010, 1.0), (1020, 2.0), (1021, 22.0)], + [[1010, 1.0], [1020, 2.0], [1021, 22.0]], + ) From 595eb518b2a10e688c5a9899c6d52cf67e2dba4e Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Tue, 18 Jun 2024 18:16:53 +0300 Subject: [PATCH 14/16] Remove async parser from test fixture params Leave the decision for the async parser to be used in tests to be taken based on the availability of hiredir and on the protocol that is set for the tests. --- tests/test_asyncio/conftest.py | 35 ++++++---------------------------- 1 file changed, 6 insertions(+), 29 deletions(-) diff --git a/tests/test_asyncio/conftest.py b/tests/test_asyncio/conftest.py index cff239fa11..6e93407b4c 100644 --- a/tests/test_asyncio/conftest.py +++ b/tests/test_asyncio/conftest.py @@ -6,13 +6,11 @@ import pytest_asyncio import redis.asyncio as redis from packaging.version import Version -from redis._parsers import _AsyncHiredisParser, _AsyncRESP2Parser from redis.asyncio import Sentinel from redis.asyncio.client import Monitor from redis.asyncio.connection import Connection, parse_url from redis.asyncio.retry import Retry from redis.backoff import NoBackoff -from redis.utils import HIREDIS_AVAILABLE from tests.conftest import REDIS_INFO from .compat import mock @@ -28,41 +26,21 @@ async def _get_info(redis_url): @pytest_asyncio.fixture( params=[ pytest.param( - (True, _AsyncRESP2Parser), + (True,), marks=pytest.mark.skipif( 'config.REDIS_INFO["cluster_enabled"]', reason="cluster mode enabled" ), ), - (False, _AsyncRESP2Parser), - pytest.param( - (True, _AsyncHiredisParser), - marks=[ - pytest.mark.skipif( - 'config.REDIS_INFO["cluster_enabled"]', - reason="cluster mode enabled", - ), - pytest.mark.skipif( - not HIREDIS_AVAILABLE, reason="hiredis is not installed" - ), - ], - ), - pytest.param( - (False, _AsyncHiredisParser), - marks=pytest.mark.skipif( - not HIREDIS_AVAILABLE, reason="hiredis is not installed" - ), - ), + (False,), ], ids=[ - "single-python-parser", - "pool-python-parser", - "single-hiredis", - "pool-hiredis", + "single", + "pool", ], ) async def create_redis(request): """Wrapper around redis.create_redis.""" - single_connection, parser_cls = request.param + (single_connection,) = request.param teardown_clients = [] @@ -78,10 +56,9 @@ async def client_factory( cluster_mode = REDIS_INFO["cluster_enabled"] if not cluster_mode: single = kwargs.pop("single_connection_client", False) or single_connection - parser_class = kwargs.pop("parser_class", None) or parser_cls url_options = parse_url(url) url_options.update(kwargs) - pool = redis.ConnectionPool(parser_class=parser_class, **url_options) + pool = redis.ConnectionPool(**url_options) client = cls(connection_pool=pool) else: client = redis.RedisCluster.from_url(url, **kwargs) From 7c75bfa2c38560d7f8400b6071b9e2d0dd419bea Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Wed, 19 Jun 2024 09:55:56 +0300 Subject: [PATCH 15/16] Undo parameter rename, to not break the API --- redis/_parsers/resp3.py | 40 +++++++++++++++++-------------------- redis/asyncio/client.py | 2 +- redis/asyncio/connection.py | 8 ++++---- redis/asyncio/sentinel.py | 4 ++-- redis/client.py | 4 +--- redis/connection.py | 6 +++--- redis/sentinel.py | 4 ++-- 7 files changed, 31 insertions(+), 37 deletions(-) diff --git a/redis/_parsers/resp3.py b/redis/_parsers/resp3.py index c9b099c95d..9f73f15c2f 100644 --- a/redis/_parsers/resp3.py +++ b/redis/_parsers/resp3.py @@ -22,12 +22,12 @@ def handle_pubsub_push_response(self, response): logger.info("Push response: " + str(response)) return response - def read_response(self, disable_decoding=False, read_single_push_response=False): + def read_response(self, disable_decoding=False, push_request=False): pos = self._buffer.get_pos() if self._buffer else None try: result = self._read_response( disable_decoding=disable_decoding, - read_single_push_response=read_single_push_response, + push_request=push_request, ) except BaseException: if self._buffer: @@ -37,7 +37,7 @@ def read_response(self, disable_decoding=False, read_single_push_response=False) self._buffer.purge() return result - def _read_response(self, disable_decoding=False, read_single_push_response=False): + def _read_response(self, disable_decoding=False, push_request=False): raw = self._buffer.readline() if not raw: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) @@ -108,7 +108,7 @@ def _read_response(self, disable_decoding=False, read_single_push_response=False key = self._read_response(disable_decoding=disable_decoding) resp_dict[key] = self._read_response( disable_decoding=disable_decoding, - read_single_push_response=read_single_push_response, + push_request=push_request, ) response = resp_dict # push response @@ -116,12 +116,12 @@ def _read_response(self, disable_decoding=False, read_single_push_response=False response = [ self._read_response( disable_decoding=disable_decoding, - read_single_push_response=read_single_push_response, + push_request=push_request, ) for _ in range(int(response)) ] response = self.handle_push_response( - response, disable_decoding, read_single_push_response + response, disable_decoding, push_request ) else: raise InvalidResponse(f"Protocol Error: {raw!r}") @@ -130,18 +130,16 @@ def _read_response(self, disable_decoding=False, read_single_push_response=False response = self.encoder.decode(response) return response - def handle_push_response( - self, response, disable_decoding, read_single_push_response - ): + def handle_push_response(self, response, disable_decoding, push_request): if response[0] in _INVALIDATION_MESSAGE: res = self.invalidation_push_handler_func(response) else: res = self.pubsub_push_handler_func(response) - if read_single_push_response: + if push_request: return res return self._read_response( disable_decoding=disable_decoding, - read_single_push_response=read_single_push_response, + push_request=push_request, ) def set_pubsub_push_handler(self, pubsub_push_handler_func): @@ -163,7 +161,7 @@ def handle_pubsub_push_response(self, response): return response async def read_response( - self, disable_decoding: bool = False, read_single_push_response: bool = False + self, disable_decoding: bool = False, push_request: bool = False ): if self._chunks: # augment parsing buffer with previously read data @@ -172,14 +170,14 @@ async def read_response( self._pos = 0 response = await self._read_response( disable_decoding=disable_decoding, - read_single_push_response=read_single_push_response, + push_request=push_request, ) # Successfully parsing a response allows us to clear our parsing buffer self._clear() return response async def _read_response( - self, disable_decoding: bool = False, read_single_push_response: bool = False + self, disable_decoding: bool = False, push_request: bool = False ) -> Union[EncodableT, ResponseError, None]: if not self._stream or not self.encoder: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) @@ -255,7 +253,7 @@ async def _read_response( key = await self._read_response(disable_decoding=disable_decoding) resp_dict[key] = await self._read_response( disable_decoding=disable_decoding, - read_single_push_response=read_single_push_response, + push_request=push_request, ) response = resp_dict # push response @@ -264,13 +262,13 @@ async def _read_response( ( await self._read_response( disable_decoding=disable_decoding, - read_single_push_response=read_single_push_response, + push_request=push_request, ) ) for _ in range(int(response)) ] response = await self.handle_push_response( - response, disable_decoding, read_single_push_response + response, disable_decoding, push_request ) else: raise InvalidResponse(f"Protocol Error: {raw!r}") @@ -279,18 +277,16 @@ async def _read_response( response = self.encoder.decode(response) return response - async def handle_push_response( - self, response, disable_decoding, read_single_push_response - ): + async def handle_push_response(self, response, disable_decoding, push_request): if response[0] in _INVALIDATION_MESSAGE: res = self.invalidation_push_handler_func(response) else: res = self.pubsub_push_handler_func(response) - if read_single_push_response: + if push_request: return res return await self._read_response( disable_decoding=disable_decoding, - read_single_push_response=read_single_push_response, + push_request=push_request, ) def set_pubsub_push_handler(self, pubsub_push_handler_func): diff --git a/redis/asyncio/client.py b/redis/asyncio/client.py index dbf725b1f7..1845b7252f 100644 --- a/redis/asyncio/client.py +++ b/redis/asyncio/client.py @@ -966,7 +966,7 @@ async def parse_response(self, block: bool = True, timeout: float = 0): conn.read_response, timeout=read_timeout, disconnect_on_error=False, - read_single_push_response=True, + push_request=True, ) if conn.health_check_interval and response in self.health_check_response: diff --git a/redis/asyncio/connection.py b/redis/asyncio/connection.py index 5951570442..b0a3b2db68 100644 --- a/redis/asyncio/connection.py +++ b/redis/asyncio/connection.py @@ -551,7 +551,7 @@ async def read_response( timeout: Optional[float] = None, *, disconnect_on_error: bool = True, - read_single_push_response: Optional[bool] = False, + push_request: Optional[bool] = False, ): """Read the response from a previously sent command""" read_timeout = timeout if timeout is not None else self.socket_timeout @@ -565,7 +565,7 @@ async def read_response( async with async_timeout(read_timeout): response = await self._parser.read_response( disable_decoding=disable_decoding, - read_single_push_response=read_single_push_response, + push_request=push_request, ) elif read_timeout is not None: async with async_timeout(read_timeout): @@ -575,7 +575,7 @@ async def read_response( elif self.protocol in ["3", 3] and not HIREDIS_AVAILABLE: response = await self._parser.read_response( disable_decoding=disable_decoding, - read_single_push_response=read_single_push_response, + push_request=push_request, ) else: response = await self._parser.read_response( @@ -715,7 +715,7 @@ async def _get_from_local_cache(self, command: str): ): return None while not self._socket_is_empty(): - await self.read_response(read_single_push_response=True) + await self.read_response(push_request=True) return self.client_cache.get(command) def _add_to_local_cache( diff --git a/redis/asyncio/sentinel.py b/redis/asyncio/sentinel.py index a0207b0fd1..6fd233adc8 100644 --- a/redis/asyncio/sentinel.py +++ b/redis/asyncio/sentinel.py @@ -72,14 +72,14 @@ async def read_response( timeout: Optional[float] = None, *, disconnect_on_error: Optional[float] = True, - read_single_push_response: Optional[bool] = False, + push_request: Optional[bool] = False, ): try: return await super().read_response( disable_decoding=disable_decoding, timeout=timeout, disconnect_on_error=disconnect_on_error, - read_single_push_response=read_single_push_response, + push_request=push_request, ) except ReadOnlyError: if self.connection_pool.is_master: diff --git a/redis/client.py b/redis/client.py index b8a56ed9ee..b7a1f88d92 100755 --- a/redis/client.py +++ b/redis/client.py @@ -880,9 +880,7 @@ def try_read(): return None else: conn.connect() - return conn.read_response( - disconnect_on_error=False, read_single_push_response=True - ) + return conn.read_response(disconnect_on_error=False, push_request=True) response = self._execute(conn, try_read) diff --git a/redis/connection.py b/redis/connection.py index 6d29422d37..d618da710a 100644 --- a/redis/connection.py +++ b/redis/connection.py @@ -533,7 +533,7 @@ def read_response( disable_decoding=False, *, disconnect_on_error=True, - read_single_push_response=False, + push_request=False, ): """Read the response from a previously sent command""" @@ -543,7 +543,7 @@ def read_response( if self.protocol in ["3", 3] and not HIREDIS_AVAILABLE: response = self._parser.read_response( disable_decoding=disable_decoding, - read_single_push_response=read_single_push_response, + push_request=push_request, ) else: response = self._parser.read_response(disable_decoding=disable_decoding) @@ -635,7 +635,7 @@ def _get_from_local_cache(self, command: Sequence[str]): ): return None while self.can_read(): - self.read_response(read_single_push_response=True) + self.read_response(push_request=True) return self.client_cache.get(command) def _add_to_local_cache( diff --git a/redis/sentinel.py b/redis/sentinel.py index dfa276d054..72b5bef548 100644 --- a/redis/sentinel.py +++ b/redis/sentinel.py @@ -62,13 +62,13 @@ def read_response( disable_decoding=False, *, disconnect_on_error: Optional[bool] = False, - read_single_push_response: Optional[bool] = False, + push_request: Optional[bool] = False, ): try: return super().read_response( disable_decoding=disable_decoding, disconnect_on_error=disconnect_on_error, - read_single_push_response=read_single_push_response, + push_request=push_request, ) except ReadOnlyError: if self.connection_pool.is_master: From 9ded3b5b86713067fb8546243685ade7b0878938 Mon Sep 17 00:00:00 2001 From: Gabriel Erzse Date: Wed, 19 Jun 2024 09:57:43 +0300 Subject: [PATCH 16/16] Undo some more changes --- redis/_parsers/resp3.py | 38 ++++++++++++++++--------------------- redis/asyncio/connection.py | 6 ++---- redis/connection.py | 3 +-- 3 files changed, 19 insertions(+), 28 deletions(-) diff --git a/redis/_parsers/resp3.py b/redis/_parsers/resp3.py index 9f73f15c2f..7afa43a0c2 100644 --- a/redis/_parsers/resp3.py +++ b/redis/_parsers/resp3.py @@ -26,8 +26,7 @@ def read_response(self, disable_decoding=False, push_request=False): pos = self._buffer.get_pos() if self._buffer else None try: result = self._read_response( - disable_decoding=disable_decoding, - push_request=push_request, + disable_decoding=disable_decoding, push_request=push_request ) except BaseException: if self._buffer: @@ -107,16 +106,14 @@ def _read_response(self, disable_decoding=False, push_request=False): for _ in range(int(response)): key = self._read_response(disable_decoding=disable_decoding) resp_dict[key] = self._read_response( - disable_decoding=disable_decoding, - push_request=push_request, + disable_decoding=disable_decoding, push_request=push_request ) response = resp_dict # push response elif byte == b">": response = [ self._read_response( - disable_decoding=disable_decoding, - push_request=push_request, + disable_decoding=disable_decoding, push_request=push_request ) for _ in range(int(response)) ] @@ -135,12 +132,12 @@ def handle_push_response(self, response, disable_decoding, push_request): res = self.invalidation_push_handler_func(response) else: res = self.pubsub_push_handler_func(response) - if push_request: + if not push_request: + return self._read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + else: return res - return self._read_response( - disable_decoding=disable_decoding, - push_request=push_request, - ) def set_pubsub_push_handler(self, pubsub_push_handler_func): self.pubsub_push_handler_func = pubsub_push_handler_func @@ -169,8 +166,7 @@ async def read_response( self._chunks.clear() self._pos = 0 response = await self._read_response( - disable_decoding=disable_decoding, - push_request=push_request, + disable_decoding=disable_decoding, push_request=push_request ) # Successfully parsing a response allows us to clear our parsing buffer self._clear() @@ -252,8 +248,7 @@ async def _read_response( for _ in range(int(response)): key = await self._read_response(disable_decoding=disable_decoding) resp_dict[key] = await self._read_response( - disable_decoding=disable_decoding, - push_request=push_request, + disable_decoding=disable_decoding, push_request=push_request ) response = resp_dict # push response @@ -261,8 +256,7 @@ async def _read_response( response = [ ( await self._read_response( - disable_decoding=disable_decoding, - push_request=push_request, + disable_decoding=disable_decoding, push_request=push_request ) ) for _ in range(int(response)) @@ -282,12 +276,12 @@ async def handle_push_response(self, response, disable_decoding, push_request): res = self.invalidation_push_handler_func(response) else: res = self.pubsub_push_handler_func(response) - if push_request: + if not push_request: + return await self._read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + else: return res - return await self._read_response( - disable_decoding=disable_decoding, - push_request=push_request, - ) def set_pubsub_push_handler(self, pubsub_push_handler_func): self.pubsub_push_handler_func = pubsub_push_handler_func diff --git a/redis/asyncio/connection.py b/redis/asyncio/connection.py index b0a3b2db68..96f18876ea 100644 --- a/redis/asyncio/connection.py +++ b/redis/asyncio/connection.py @@ -564,8 +564,7 @@ async def read_response( ): async with async_timeout(read_timeout): response = await self._parser.read_response( - disable_decoding=disable_decoding, - push_request=push_request, + disable_decoding=disable_decoding, push_request=push_request ) elif read_timeout is not None: async with async_timeout(read_timeout): @@ -574,8 +573,7 @@ async def read_response( ) elif self.protocol in ["3", 3] and not HIREDIS_AVAILABLE: response = await self._parser.read_response( - disable_decoding=disable_decoding, - push_request=push_request, + disable_decoding=disable_decoding, push_request=push_request ) else: response = await self._parser.read_response( diff --git a/redis/connection.py b/redis/connection.py index d618da710a..f745ecc1d5 100644 --- a/redis/connection.py +++ b/redis/connection.py @@ -542,8 +542,7 @@ def read_response( try: if self.protocol in ["3", 3] and not HIREDIS_AVAILABLE: response = self._parser.read_response( - disable_decoding=disable_decoding, - push_request=push_request, + disable_decoding=disable_decoding, push_request=push_request ) else: response = self._parser.read_response(disable_decoding=disable_decoding)