Skip to content

Commit b08cef9

Browse files
aarnphmDarkLight1337
authored andcommitted
[CI/Build] Update Ruff version (vllm-project#8469)
Signed-off-by: Aaron Pham <[email protected]> Co-authored-by: Cyrus Leung <[email protected]> Signed-off-by: Alvant <[email protected]>
1 parent 311d80a commit b08cef9

File tree

27 files changed

+50
-77
lines changed

27 files changed

+50
-77
lines changed

.github/workflows/ruff.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,10 @@ jobs:
2525
- name: Install dependencies
2626
run: |
2727
python -m pip install --upgrade pip
28-
pip install ruff==0.1.5 codespell==2.3.0 tomli==2.0.1 isort==5.13.2
28+
pip install -r requirements-lint.txt
2929
- name: Analysing the code with ruff
3030
run: |
31-
ruff .
31+
ruff check .
3232
- name: Spelling check with codespell
3333
run: |
3434
codespell --toml pyproject.toml

benchmarks/kernels/graph_machete_bench.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,7 @@
4545
rows = int(math.ceil(len(results) / 2))
4646
fig, axs = plt.subplots(rows, 2, figsize=(12, 5 * rows))
4747
axs = axs.flatten()
48-
axs_idx = 0
49-
for shape, data in results.items():
48+
for axs_idx, (shape, data) in enumerate(results.items()):
5049
plt.sca(axs[axs_idx])
5150
df = pd.DataFrame(data)
5251
sns.lineplot(data=df,
@@ -59,6 +58,5 @@
5958
palette="Dark2")
6059
plt.title(f"Shape: {shape}")
6160
plt.ylabel("time (median, s)")
62-
axs_idx += 1
6361
plt.tight_layout()
6462
plt.savefig("graph_machete_bench.pdf")

format.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@ echo 'vLLM codespell: Done'
159159

160160
# Lint specified files
161161
lint() {
162-
ruff "$@"
162+
ruff check "$@"
163163
}
164164

165165
# Lint files that differ from main branch. Ignores dirs that are not slated
@@ -175,7 +175,7 @@ lint_changed() {
175175

176176
if ! git diff --diff-filter=ACM --quiet --exit-code "$MERGEBASE" -- '*.py' '*.pyi' &>/dev/null; then
177177
git diff --name-only --diff-filter=ACM "$MERGEBASE" -- '*.py' '*.pyi' | xargs \
178-
ruff
178+
ruff check
179179
fi
180180

181181
}

pyproject.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,8 @@ ignore = [
4242
"E731",
4343
# Loop control variable not used within loop body
4444
"B007",
45+
# f-string format
46+
"UP032",
4547
]
4648

4749
[tool.mypy]

requirements-lint.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
yapf==0.32.0
33
toml==0.10.2
44
tomli==2.0.1
5-
ruff==0.1.5
5+
ruff==0.6.5
66
codespell==2.3.0
77
isort==5.13.2
88
clang-format==18.1.5

tests/conftest.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -158,10 +158,7 @@ def should_do_global_cleanup_after_test(request) -> bool:
158158
to initialize torch.
159159
"""
160160

161-
if request.node.get_closest_marker("skip_global_cleanup"):
162-
return False
163-
164-
return True
161+
return not request.node.get_closest_marker("skip_global_cleanup")
165162

166163

167164
@pytest.fixture(autouse=True)

tests/lora/conftest.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -65,10 +65,7 @@ def should_do_global_cleanup_after_test(request) -> bool:
6565
to initialize torch.
6666
"""
6767

68-
if request.node.get_closest_marker("skip_global_cleanup"):
69-
return False
70-
71-
return True
68+
return not request.node.get_closest_marker("skip_global_cleanup")
7269

7370

7471
@pytest.fixture(autouse=True)

tests/multimodal/test_base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
def assert_nested_tensors_equal(expected: NestedTensors,
77
actual: NestedTensors):
8-
assert type(expected) == type(actual)
8+
assert type(expected) == type(actual) # noqa: E721
99
if isinstance(expected, torch.Tensor):
1010
assert torch.equal(expected, actual)
1111
else:

tests/test_cache_block_hashing.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,7 @@ def test_auto_prefix_caching(model: str, block_size: int, max_num_seqs: int,
6666

6767
hashes.append([])
6868
prompts = [prefix + prompt for prompt in sample_prompts]
69-
seq_id = 0
70-
for prompt in prompts:
69+
for seq_id, prompt in enumerate(prompts):
7170
hashes[-1].append([])
7271
prompt_token_ids = tokenizer.encode(prompt)
7372
seq = Sequence(seq_id,
@@ -83,8 +82,6 @@ def test_auto_prefix_caching(model: str, block_size: int, max_num_seqs: int,
8382
for idx in range(num_blocks):
8483
hashes[-1][-1].append(seq.hash_of_block(idx))
8584

86-
seq_id += 1
87-
8885
# Check that hashes made with two prefixes with different first blocks are
8986
# different everywhere.
9087
for hash0, hash1 in zip(flatten_2d(hashes[0]), flatten_2d(hashes[1])):

tests/test_logger.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def test_an_error_is_raised_when_custom_logging_config_file_does_not_exist():
111111
configuration occurs."""
112112
with pytest.raises(RuntimeError) as ex_info:
113113
_configure_vllm_root_logger()
114-
assert ex_info.type == RuntimeError
114+
assert ex_info.type == RuntimeError # noqa: E721
115115
assert "File does not exist" in str(ex_info)
116116

117117

@@ -152,7 +152,7 @@ def test_an_error_is_raised_when_custom_logging_config_is_unexpected_json(
152152
logging_config_file.name):
153153
with pytest.raises(ValueError) as ex_info:
154154
_configure_vllm_root_logger()
155-
assert ex_info.type == ValueError
155+
assert ex_info.type == ValueError # noqa: E721
156156
assert "Invalid logging config. Expected Dict, got" in str(ex_info)
157157

158158

0 commit comments

Comments
 (0)