Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit b71c722

Browse files
authoredMar 5, 2025··
Merge pull request #7 from justmobilize/fix-iter-issues
Fix iter issues
2 parents e4c0960 + 42e7851 commit b71c722

File tree

3 files changed

+655
-31
lines changed

3 files changed

+655
-31
lines changed
 

‎adafruit_json_stream.py

Lines changed: 69 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ def __init__(self, data_iter):
2626
self.data_iter = data_iter
2727
self.i = 0
2828
self.chunk = b""
29+
self.last_char = None
2930

3031
def read(self):
3132
"""Read the next character from the stream."""
@@ -39,16 +40,37 @@ def read(self):
3940
self.i += 1
4041
return char
4142

42-
def fast_forward(self, closer):
43-
"""Read through the stream until the character is ``closer``, ``]``
43+
def fast_forward(self, closer, *, return_object=False):
44+
"""
45+
Read through the stream until the character is ``closer``, ``]``
4446
(ending a list) or ``}`` (ending an object.) Intermediate lists and
45-
objects are skipped."""
47+
objects are skipped.
48+
49+
:param str closer: the character to read until
50+
:param bool return_object: read until the closer,
51+
and then parse the data and return as an object
52+
"""
53+
4654
closer = ord(closer)
4755
close_stack = [closer]
4856
count = 0
57+
58+
buffer = None
59+
if return_object:
60+
buffer = bytearray(32)
61+
# ] = 93, [ = 91
62+
# } = 125, { = 123
63+
buffer[0] = closer - 2
64+
4965
while close_stack:
5066
char = self.read()
5167
count += 1
68+
if buffer:
69+
if count == len(buffer):
70+
new_buffer = bytearray(len(buffer) + 32)
71+
new_buffer[: len(buffer)] = buffer
72+
buffer = new_buffer
73+
buffer[count] = char
5274
if char == close_stack[-1]:
5375
close_stack.pop()
5476
elif char == ord('"'):
@@ -63,6 +85,9 @@ def fast_forward(self, closer):
6385
close_stack.append(ord("}"))
6486
elif char == ord("["):
6587
close_stack.append(ord("]"))
88+
if buffer:
89+
value_string = bytes(memoryview(buffer)[: count + 1]).decode("utf-8")
90+
return json.loads(value_string)
6691
return False
6792

6893
def next_value(self, endswith=None):
@@ -77,10 +102,10 @@ def next_value(self, endswith=None):
77102
except EOFError:
78103
char = endswith
79104
if not in_string and (char == endswith or char in (ord("]"), ord("}"))):
105+
self.last_char = char
80106
if len(buf) == 0:
81107
return None
82108
value_string = bytes(buf).decode("utf-8")
83-
# print(f"{repr(value_string)}, {endswith=}")
84109
return json.loads(value_string)
85110
if char == ord("{"):
86111
return TransientObject(self)
@@ -94,40 +119,56 @@ def next_value(self, endswith=None):
94119
buf.append(char)
95120

96121

97-
class Transient: # pylint: disable=too-few-public-methods
122+
class Transient:
98123
"""Transient object representing a JSON object."""
99124

100-
# This is helpful for checking that something is a TransientList or TransientObject.
101-
102-
103-
class TransientList(Transient):
104-
"""Transient object that acts like a list through the stream."""
105-
106125
def __init__(self, stream):
126+
self.active_child = None
107127
self.data = stream
108128
self.done = False
109-
self.active_child = None
129+
self.has_read = False
130+
self.finish_char = ""
110131

111132
def finish(self):
112133
"""Consume all of the characters for this list from the stream."""
113134
if not self.done:
114135
if self.active_child:
115136
self.active_child.finish()
116137
self.active_child = None
117-
self.data.fast_forward("]")
138+
self.data.fast_forward(self.finish_char)
139+
self.done = True
140+
141+
def as_object(self):
142+
"""Consume all of the characters for this list from the stream and return as an object."""
143+
if self.has_read:
144+
raise BufferError("Object has already been partly read.")
145+
118146
self.done = True
147+
return self.data.fast_forward(self.finish_char, return_object=True)
148+
149+
150+
class TransientList(Transient):
151+
"""Transient object that acts like a list through the stream."""
152+
153+
def __init__(self, stream):
154+
super().__init__(stream)
155+
self.finish_char = "]"
119156

120157
def __iter__(self):
121158
return self
122159

123160
def __next__(self):
161+
self.has_read = True
162+
124163
if self.active_child:
125164
self.active_child.finish()
126165
self.done = self.data.fast_forward(",")
127166
self.active_child = None
128167
if self.done:
129168
raise StopIteration()
130169
next_value = self.data.next_value(",")
170+
if self.data.last_char == ord("]"):
171+
self.done = True
131172
if next_value is None:
132173
self.done = True
133174
raise StopIteration()
@@ -140,42 +181,39 @@ class TransientObject(Transient):
140181
"""Transient object that acts like a dictionary through the stream."""
141182

142183
def __init__(self, stream):
143-
self.data = stream
144-
self.done = False
145-
self.buf = array.array("B")
184+
super().__init__(stream)
185+
self.finish_char = "}"
186+
self.active_child_key = None
146187

147-
self.active_child = None
188+
def __getitem__(self, key):
189+
if self.active_child and self.active_child_key == key:
190+
return self.active_child
148191

149-
def finish(self):
150-
"""Consume all of the characters for this object from the stream."""
151-
if not self.done:
152-
if self.active_child:
153-
self.active_child.finish()
154-
self.active_child = None
155-
self.data.fast_forward("}")
156-
self.done = True
192+
self.has_read = True
157193

158-
def __getitem__(self, key):
159194
if self.active_child:
160195
self.active_child.finish()
161196
self.done = self.data.fast_forward(",")
162197
self.active_child = None
198+
self.active_child_key = None
163199
if self.done:
164-
raise KeyError()
200+
raise KeyError(key)
165201

166-
while True:
202+
while not self.done:
167203
current_key = self.data.next_value(":")
168204
if current_key is None:
169-
# print("object done", self)
170205
self.done = True
171206
break
172207
if current_key == key:
173208
next_value = self.data.next_value(",")
209+
if self.data.last_char == ord("}"):
210+
self.done = True
174211
if isinstance(next_value, Transient):
175212
self.active_child = next_value
213+
self.active_child_key = key
176214
return next_value
177-
self.data.fast_forward(",")
178-
raise KeyError()
215+
self.done = self.data.fast_forward(",")
216+
raise KeyError(key)
179217

180218

181219
def load(data_iter):

‎tests/test_json_stream.py

Lines changed: 545 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,545 @@
1+
# SPDX-FileCopyrightText: 2025 Justin Myers
2+
#
3+
# SPDX-License-Identifier: Unlicense
4+
5+
import json
6+
import math
7+
import pytest
8+
import adafruit_json_stream
9+
10+
11+
# pylint: disable=invalid-name,pointless-statement,redefined-outer-name
12+
13+
14+
# ---------------
15+
# Helpers
16+
# ---------------
17+
18+
19+
class BytesChunkIO:
20+
def __init__(self, data=b"", chunk_size=10):
21+
self.chunk_size = chunk_size
22+
self.chunks_read = 0
23+
self.data = data
24+
self.data_len = len(self.data)
25+
self.position = 0
26+
27+
def __iter__(self):
28+
return self
29+
30+
def __next__(self):
31+
if self.position > self.data_len:
32+
raise StopIteration
33+
34+
end = self.chunk_size
35+
if self.position + end > self.data_len:
36+
end = self.data_len
37+
chunk = self.data[self.position : self.position + self.chunk_size]
38+
39+
self.chunks_read += 1
40+
self.position += self.chunk_size
41+
42+
return chunk
43+
44+
def get_chunks_read(self):
45+
return self.chunks_read
46+
47+
48+
# ---------------
49+
# Fixtures
50+
# ---------------
51+
52+
53+
@pytest.fixture
54+
def dict_with_all_types():
55+
return """
56+
{
57+
"_check": "{\\\"a\\\": 1, \\\"b\\\": [2,3]}",
58+
"bool": true,
59+
"dict": {"key": "value"},
60+
"float": 1.1,
61+
"int": 1,
62+
"list": [1,2,3],
63+
"null": null,
64+
"string": "string"
65+
}
66+
"""
67+
68+
69+
@pytest.fixture
70+
def list_with_values():
71+
return """
72+
[
73+
1,
74+
2,
75+
3
76+
]
77+
"""
78+
79+
80+
@pytest.fixture
81+
def dict_with_keys():
82+
return """
83+
{
84+
"field_1": 1,
85+
"field_2": 2,
86+
"field_3": 3
87+
}
88+
"""
89+
90+
91+
@pytest.fixture
92+
def dict_with_list_with_single_entries():
93+
return """
94+
{
95+
"list_1": [
96+
{
97+
"dict_id": 1
98+
},
99+
{
100+
"dict_id": 2
101+
},
102+
{
103+
"dict_id": 3
104+
},
105+
{
106+
"dict_id": 4
107+
}
108+
]
109+
}
110+
"""
111+
112+
113+
@pytest.fixture
114+
def complex_dict():
115+
return """
116+
{
117+
"list_1": [
118+
{
119+
"dict_id": 1,
120+
"dict_name": "one",
121+
"sub_dict": {
122+
"sub_dict_id": 1.1,
123+
"sub_dict_name": "one point one"
124+
},
125+
"sub_list": [
126+
"a",
127+
"b",
128+
"c"
129+
]
130+
},
131+
{
132+
"dict_id": 2,
133+
"dict_name": "two",
134+
"sub_dict": {
135+
"sub_dict_id": 2.1,
136+
"sub_dict_name": "two point one"
137+
},
138+
"sub_list": [
139+
"d",
140+
"e",
141+
"f"
142+
]
143+
}
144+
],
145+
"list_2": [
146+
{
147+
"dict_id": 3,
148+
"dict_name": "three",
149+
"sub_dict": {
150+
"sub_dict_id": 3.1,
151+
"sub_dict_name": "three point one"
152+
},
153+
"sub_list": [
154+
"g",
155+
"h",
156+
"i"
157+
]
158+
},
159+
{
160+
"dict_id": 4,
161+
"dict_name": "four",
162+
"sub_dict": {
163+
"sub_dict_id": 4.1,
164+
"sub_dict_name": "four point one"
165+
},
166+
"sub_list": [
167+
"j",
168+
"k",
169+
"l"
170+
]
171+
}
172+
]
173+
}
174+
"""
175+
176+
177+
# ---------------
178+
# Tests
179+
# ---------------
180+
181+
182+
def test_all_types(dict_with_all_types):
183+
"""Test loading a simple dict all data types."""
184+
185+
assert json.loads(dict_with_all_types)
186+
187+
stream = adafruit_json_stream.load(BytesChunkIO(dict_with_all_types.encode()))
188+
189+
assert stream["bool"] is True
190+
assert stream["dict"]["key"] == "value"
191+
assert stream["float"] == 1.1
192+
assert stream["int"] == 1
193+
assert next(stream["list"]) == 1
194+
assert stream["null"] is None
195+
assert stream["string"] == "string"
196+
197+
198+
def test_simple_dict_with_keys(dict_with_keys):
199+
"""Test loading a simple dict with keys."""
200+
201+
assert json.loads(dict_with_keys)
202+
203+
stream = adafruit_json_stream.load(BytesChunkIO(dict_with_keys.encode()))
204+
for i in range(1, 4):
205+
assert stream[f"field_{i}"] == i
206+
with pytest.raises(KeyError, match="field_4"):
207+
stream["field_4"]
208+
209+
210+
def test_simple_dict_with_grabbing_key_twice_raises(dict_with_keys):
211+
"""Test loading a simple dict with keys twice raises."""
212+
213+
assert json.loads(dict_with_keys)
214+
215+
stream = adafruit_json_stream.load(BytesChunkIO(dict_with_keys.encode()))
216+
assert stream["field_1"] == 1
217+
with pytest.raises(KeyError, match="field_1"):
218+
stream["field_1"]
219+
220+
221+
def test_simple_dict_with_keys_middle_key(dict_with_keys):
222+
"""Test loading a simple dict and grabbing a key in the middle."""
223+
224+
assert json.loads(dict_with_keys)
225+
226+
stream = adafruit_json_stream.load(BytesChunkIO(dict_with_keys.encode()))
227+
assert stream["field_2"] == 2
228+
229+
230+
def test_simple_dict_with_keys_missing_key_raises(dict_with_keys):
231+
"""Test loading a simple dict and grabbing a key that doesn't exist raises."""
232+
233+
assert json.loads(dict_with_keys)
234+
235+
stream = adafruit_json_stream.load(BytesChunkIO(dict_with_keys.encode()))
236+
with pytest.raises(KeyError, match="field_4"):
237+
stream["field_4"]
238+
239+
240+
def test_list_with_values(list_with_values):
241+
"""Test loading a list and iterating over it."""
242+
243+
assert json.loads(list_with_values)
244+
245+
stream = adafruit_json_stream.load(BytesChunkIO(list_with_values.encode()))
246+
counter = 0
247+
for value in stream:
248+
counter += 1
249+
assert value == counter
250+
251+
252+
def test_dict_with_list_of_single_entries(dict_with_list_with_single_entries):
253+
"""Test loading an dict with a list of dicts with one entry each."""
254+
255+
assert json.loads(dict_with_list_with_single_entries)
256+
257+
stream = adafruit_json_stream.load(
258+
BytesChunkIO(dict_with_list_with_single_entries.encode())
259+
)
260+
counter = 0
261+
for obj in stream["list_1"]:
262+
counter += 1
263+
assert obj["dict_id"] == counter
264+
assert counter == 4
265+
266+
267+
def test_complex_dict(complex_dict):
268+
"""Test loading a complex dict."""
269+
270+
assert json.loads(complex_dict)
271+
272+
dict_names = [
273+
"one",
274+
"two",
275+
"three",
276+
"four",
277+
]
278+
279+
stream = adafruit_json_stream.load(BytesChunkIO(complex_dict.encode()))
280+
counter = 0
281+
sub_counter = 0
282+
for obj in stream["list_1"]:
283+
counter += 1
284+
assert obj["dict_id"] == counter
285+
assert obj["dict_name"] == dict_names[counter - 1]
286+
sub_dict = obj["sub_dict"]
287+
assert sub_dict["sub_dict_id"] == counter + 0.1
288+
assert sub_dict["sub_dict_name"] == f"{dict_names[counter-1]} point one"
289+
for item in obj["sub_list"]:
290+
sub_counter += 1
291+
assert item == chr(96 + sub_counter)
292+
293+
assert counter == 2
294+
assert sub_counter == 6
295+
296+
for obj in stream["list_2"]:
297+
counter += 1
298+
assert obj["dict_id"] == counter
299+
assert obj["dict_name"] == dict_names[counter - 1]
300+
sub_dict = obj["sub_dict"]
301+
assert sub_dict["sub_dict_id"] == counter + 0.1
302+
assert sub_dict["sub_dict_name"] == f"{dict_names[counter-1]} point one"
303+
for item in obj["sub_list"]:
304+
sub_counter += 1
305+
assert item == chr(96 + sub_counter)
306+
307+
assert counter == 4
308+
assert sub_counter == 12
309+
310+
311+
def test_complex_dict_grabbing(complex_dict):
312+
"""Test loading a complex dict and grabbing specific keys."""
313+
314+
assert json.loads(complex_dict)
315+
316+
stream = adafruit_json_stream.load(BytesChunkIO(complex_dict.encode()))
317+
318+
list_1 = stream["list_1"]
319+
dict_1 = next(list_1)
320+
sub_list = dict_1["sub_list"]
321+
assert next(sub_list) == "a"
322+
list_2 = stream["list_2"]
323+
next(list_2)
324+
dict_2 = next(list_2)
325+
sub_list = dict_2["sub_list"]
326+
assert next(sub_list) == "j"
327+
328+
329+
def test_complex_dict_passed_key_raises(complex_dict):
330+
"""
331+
Test loading a complex dict and attempting to grab a specific key that has been passed raises.
332+
"""
333+
334+
assert json.loads(complex_dict)
335+
336+
stream = adafruit_json_stream.load(BytesChunkIO(complex_dict.encode()))
337+
338+
list_1 = stream["list_1"]
339+
dict_1 = next(list_1)
340+
assert dict_1["dict_name"] == "one"
341+
with pytest.raises(KeyError, match="obects_id"):
342+
stream["obects_id"]
343+
344+
345+
def test_complex_dict_passed_reference_raises(complex_dict):
346+
"""
347+
Test loading a complex dict and attempting to grab a data from a saved reference that has
348+
been passed raises.
349+
"""
350+
351+
assert json.loads(complex_dict)
352+
353+
stream = adafruit_json_stream.load(BytesChunkIO(complex_dict.encode()))
354+
355+
list_1 = stream["list_1"]
356+
dict_1 = next(list_1)
357+
sub_dict = dict_1["sub_dict"]
358+
sub_list = dict_1["sub_list"]
359+
list_2 = stream["list_2"]
360+
next(list_2)
361+
with pytest.raises(KeyError, match="sub_dict_id"):
362+
sub_dict["sub_dict_id"]
363+
with pytest.raises(StopIteration):
364+
next(sub_list)
365+
366+
367+
# complex_dict is 1518 bytes
368+
@pytest.mark.parametrize(
369+
("chunk_size", "expected_chunks"), ((10, 152), (50, 31), (100, 16), (5000, 1))
370+
)
371+
def test_complex_dict_buffer_sizes(chunk_size, complex_dict, expected_chunks):
372+
"""Test loading a complex dict and checking the chunking."""
373+
374+
assert json.loads(complex_dict)
375+
376+
bytes_io_chunk = BytesChunkIO(complex_dict.encode(), chunk_size)
377+
378+
stream = adafruit_json_stream.load(bytes_io_chunk)
379+
380+
list_1 = stream["list_1"]
381+
dict_1 = next(list_1)
382+
sub_list = dict_1["sub_list"]
383+
assert next(sub_list) == "a"
384+
list_2 = stream["list_2"]
385+
next(list_2)
386+
dict_2 = next(list_2)
387+
sub_list = dict_2["sub_list"]
388+
assert next(sub_list) == "j"
389+
for _ in sub_list:
390+
pass
391+
with pytest.raises(KeyError):
392+
stream["list_3"]
393+
394+
assert bytes_io_chunk.get_chunks_read() == expected_chunks
395+
assert math.ceil(len(complex_dict) / chunk_size) == expected_chunks
396+
397+
398+
# complex_dict is 1518 bytes
399+
@pytest.mark.parametrize(
400+
("chunk_size", "expected_chunks"), ((5, 61), (10, 31), (50, 7), (100, 4))
401+
)
402+
def test_complex_dict_not_looking_at_all_data_buffer_sizes(
403+
chunk_size, complex_dict, expected_chunks
404+
):
405+
"""Test loading a complex dict and checking the chunking."""
406+
407+
assert json.loads(complex_dict)
408+
409+
bytes_io_chunk = BytesChunkIO(complex_dict.encode(), chunk_size)
410+
411+
stream = adafruit_json_stream.load(bytes_io_chunk)
412+
413+
list_1 = stream["list_1"]
414+
dict_1 = next(list_1)
415+
sub_list = dict_1["sub_list"]
416+
assert next(sub_list) == "a"
417+
418+
assert bytes_io_chunk.get_chunks_read() == expected_chunks
419+
assert math.ceil(len(complex_dict) / chunk_size) >= (expected_chunks / 4)
420+
421+
422+
def test_incomplete_json_raises():
423+
"""Test incomplete json raises."""
424+
425+
data = """
426+
{
427+
"field_1": 1
428+
"""
429+
430+
with pytest.raises(json.JSONDecodeError):
431+
json.loads(data)
432+
433+
stream = adafruit_json_stream.load(BytesChunkIO(data.encode()))
434+
435+
with pytest.raises(EOFError):
436+
stream["field_2"]
437+
438+
439+
def test_as_object(complex_dict):
440+
"""Test loading a complex dict and grabbing parts as objects."""
441+
442+
assert json.loads(complex_dict)
443+
444+
stream = adafruit_json_stream.load(BytesChunkIO(complex_dict.encode()))
445+
446+
list_1 = stream["list_1"]
447+
dict_1 = next(list_1)
448+
assert dict_1["sub_dict"].as_object() == {
449+
"sub_dict_id": 1.1,
450+
"sub_dict_name": "one point one",
451+
}
452+
assert dict_1["sub_list"].as_object() == ["a", "b", "c"]
453+
dict_2 = next(list_1)
454+
assert dict_2.as_object() == {
455+
"dict_id": 2,
456+
"dict_name": "two",
457+
"sub_dict": {"sub_dict_id": 2.1, "sub_dict_name": "two point one"},
458+
"sub_list": ["d", "e", "f"],
459+
}
460+
assert stream["list_2"].as_object() == [
461+
{
462+
"dict_id": 3,
463+
"dict_name": "three",
464+
"sub_dict": {"sub_dict_id": 3.1, "sub_dict_name": "three point one"},
465+
"sub_list": ["g", "h", "i"],
466+
},
467+
{
468+
"dict_id": 4,
469+
"dict_name": "four",
470+
"sub_dict": {"sub_dict_id": 4.1, "sub_dict_name": "four point one"},
471+
"sub_list": ["j", "k", "l"],
472+
},
473+
]
474+
475+
476+
def test_as_object_stream(dict_with_all_types):
477+
478+
assert json.loads(dict_with_all_types)
479+
480+
stream = adafruit_json_stream.load(BytesChunkIO(dict_with_all_types.encode()))
481+
482+
obj = stream.as_object()
483+
assert obj == {
484+
"_check": '{"a": 1, "b": [2,3]}',
485+
"bool": True,
486+
"dict": {"key": "value"},
487+
"float": 1.1,
488+
"int": 1,
489+
"list": [1, 2, 3],
490+
"null": None,
491+
"string": "string",
492+
}
493+
assert json.loads(obj["_check"]) == {
494+
"a": 1,
495+
"b": [
496+
2,
497+
3,
498+
],
499+
}
500+
501+
502+
def test_as_object_that_is_partially_read_raises(complex_dict):
503+
"""Test loading a complex dict and grabbing partially read raises."""
504+
505+
assert json.loads(complex_dict)
506+
507+
stream = adafruit_json_stream.load(BytesChunkIO(complex_dict.encode()))
508+
509+
list_1 = stream["list_1"]
510+
dict_1 = next(list_1)
511+
assert dict_1["dict_id"] == 1
512+
with pytest.raises(BufferError):
513+
dict_1.as_object()
514+
515+
516+
def test_as_object_grabbing_multiple_subscriptable_levels_twice(complex_dict):
517+
"""Test loading a complex dict and grabbing multiple subscriptable levels twice."""
518+
519+
assert json.loads(complex_dict)
520+
521+
stream = adafruit_json_stream.load(BytesChunkIO(complex_dict.encode()))
522+
523+
list_1 = stream["list_1"]
524+
dict_1 = next(list_1)
525+
assert dict_1["sub_dict"]["sub_dict_id"] == 1.1
526+
assert dict_1["sub_dict"]["sub_dict_name"] == "one point one"
527+
528+
529+
def test_as_object_grabbing_multiple_subscriptable_levels_again_after_passed_raises(
530+
complex_dict,
531+
):
532+
"""
533+
Test loading a complex dict and grabbing multiple subscriptable levels after passing it raises.
534+
"""
535+
536+
assert json.loads(complex_dict)
537+
538+
stream = adafruit_json_stream.load(BytesChunkIO(complex_dict.encode()))
539+
540+
list_1 = stream["list_1"]
541+
dict_1 = next(list_1)
542+
assert dict_1["sub_dict"]["sub_dict_id"] == 1.1
543+
assert next(dict_1["sub_list"]) == "a"
544+
with pytest.raises(KeyError, match="sub_dict"):
545+
dict_1["sub_dict"]["sub_dict_name"]

‎tox.ini

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
# SPDX-FileCopyrightText: 2022 Kevin Conley
2+
# SPDX-FileCopyrightText: 2024 Justin Myers for Adafruit Industries
3+
#
4+
# SPDX-License-Identifier: MIT
5+
6+
[tox]
7+
envlist = py311
8+
9+
[testenv]
10+
description = run tests
11+
deps =
12+
pytest==7.4.3
13+
requests
14+
commands = pytest
15+
16+
[testenv:coverage]
17+
description = run coverage
18+
deps =
19+
pytest==7.4.3
20+
pytest-cov==4.1.0
21+
requests
22+
package = editable
23+
commands =
24+
coverage run --source=. --omit=tests/* --branch {posargs} -m pytest
25+
coverage report
26+
coverage html
27+
28+
[testenv:lint]
29+
description = run linters
30+
deps =
31+
pre-commit==3.6.0
32+
skip_install = true
33+
commands = pre-commit run {posargs}
34+
35+
[testenv:docs]
36+
description = build docs
37+
deps =
38+
-r requirements.txt
39+
-r docs/requirements.txt
40+
skip_install = true
41+
commands = sphinx-build -E -W -b html docs/. _build/html

0 commit comments

Comments
 (0)
Please sign in to comment.