Skip to content

Commit 68c96f1

Browse files
committed
New API: Comprehensive serialization testing
The idea of this commit is to separate (de)serialization testing outside test_api.py and make sure we are testing from_dict/to_dict for all possible valid data for all classes. Jussi in his comment here: #1391 (comment) proposed using decorators when creating comprehensive testing for metadata serialization. The main problems he pointed out is that: 1) there is a lot of code needed to generate the data for each case 2) the test implementation scales badly when you want to add new cases for your tests, then you would have to add code as well 3) the dictionary format is not visible - we are loading external files and assuming they are not changed and valid In this change, I am using a decorator with an argument that complicates the implementation of the decorator and requires three nested functions, but the advantages are that we are resolving the above three problems: 1) we don't need new code when adding a new test case 2) a small amount of hardcoded data is required for each new test 3) the dictionaries are all in the test module without the need of creating new directories and copying data. Signed-off-by: Martin Vrachev <[email protected]>
1 parent 2a5bfb9 commit 68c96f1

File tree

2 files changed

+207
-132
lines changed

2 files changed

+207
-132
lines changed

tests/test_api.py

Lines changed: 1 addition & 132 deletions
Original file line numberDiff line numberDiff line change
@@ -266,27 +266,6 @@ def test_metafile_class(self):
266266
self.assertEqual(metafile_obj.to_dict(), data)
267267

268268

269-
def test_targetfile_class(self):
270-
# Test from_dict and to_dict with all attributes.
271-
data = {
272-
"custom": {
273-
"file_permissions": "0644"
274-
},
275-
"hashes": {
276-
"sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da",
277-
"sha512": "467430a68afae8e9f9c0771ea5d78bf0b3a0d79a2d3d3b40c69fde4dd42c461448aef76fcef4f5284931a1ffd0ac096d138ba3a0d6ca83fa8d7285a47a296f77"
278-
},
279-
"length": 31
280-
}
281-
targetfile_obj = TargetFile.from_dict(copy.copy(data))
282-
self.assertEqual(targetfile_obj.to_dict(), data)
283-
284-
# Test from_dict and to_dict without custom.
285-
del data["custom"]
286-
targetfile_obj = TargetFile.from_dict(copy.copy(data))
287-
self.assertEqual(targetfile_obj.to_dict(), data)
288-
289-
290269
def test_metadata_snapshot(self):
291270
snapshot_path = os.path.join(
292271
self.repo_dir, 'metadata', 'snapshot.json')
@@ -305,13 +284,6 @@ def test_metadata_snapshot(self):
305284
snapshot.signed.meta['role1.json'].to_dict(), fileinfo.to_dict()
306285
)
307286

308-
# Test from_dict and to_dict without hashes and length.
309-
snapshot_dict = snapshot.to_dict()
310-
del snapshot_dict['signed']['meta']['role1.json']['length']
311-
del snapshot_dict['signed']['meta']['role1.json']['hashes']
312-
test_dict = copy.deepcopy(snapshot_dict['signed'])
313-
snapshot = Snapshot.from_dict(test_dict)
314-
self.assertEqual(snapshot_dict['signed'], snapshot.to_dict())
315287

316288
def test_metadata_timestamp(self):
317289
timestamp_path = os.path.join(
@@ -350,13 +322,6 @@ def test_metadata_timestamp(self):
350322
timestamp.signed.meta['snapshot.json'].to_dict(), fileinfo.to_dict()
351323
)
352324

353-
# Test from_dict and to_dict without hashes and length.
354-
timestamp_dict = timestamp.to_dict()
355-
del timestamp_dict['signed']['meta']['snapshot.json']['length']
356-
del timestamp_dict['signed']['meta']['snapshot.json']['hashes']
357-
test_dict = copy.deepcopy(timestamp_dict['signed'])
358-
timestamp_test = Timestamp.from_dict(test_dict)
359-
self.assertEqual(timestamp_dict['signed'], timestamp_test.to_dict())
360325

361326
def test_key_class(self):
362327
keys = {
@@ -369,21 +334,12 @@ def test_key_class(self):
369334
},
370335
}
371336
for key_dict in keys.values():
372-
# Testing that the workflow of deserializing and serializing
373-
# a key dictionary doesn't change the content.
374-
test_key_dict = key_dict.copy()
375-
key_obj = Key.from_dict("id", test_key_dict)
376-
self.assertEqual(key_dict, key_obj.to_dict())
377337
# Test creating an instance without a required attribute.
378338
for key in key_dict.keys():
379339
test_key_dict = key_dict.copy()
380340
del test_key_dict[key]
381341
with self.assertRaises(KeyError):
382342
Key.from_dict("id", test_key_dict)
383-
# Test creating a Key instance with wrong keyval format.
384-
key_dict["keyval"] = {}
385-
with self.assertRaises(ValueError):
386-
Key.from_dict("id", key_dict)
387343

388344

389345
def test_role_class(self):
@@ -402,23 +358,12 @@ def test_role_class(self):
402358
},
403359
}
404360
for role_dict in roles.values():
405-
# Testing that the workflow of deserializing and serializing
406-
# a role dictionary doesn't change the content.
407-
test_role_dict = role_dict.copy()
408-
role_obj = Role.from_dict(test_role_dict)
409-
self.assertEqual(role_dict, role_obj.to_dict())
410361
# Test creating an instance without a required attribute.
411362
for role_attr in role_dict.keys():
412363
test_role_dict = role_dict.copy()
413364
del test_role_dict[role_attr]
414365
with self.assertRaises(KeyError):
415-
Key.from_dict("id", test_role_dict)
416-
# Test creating a Role instance with keyid dublicates.
417-
# for keyid in role_dict["keyids"]:
418-
role_dict["keyids"].append(role_dict["keyids"][0])
419-
test_role_dict = role_dict.copy()
420-
with self.assertRaises(ValueError):
421-
Role.from_dict(test_role_dict)
366+
Role.from_dict(test_role_dict)
422367

423368

424369
def test_metadata_root(self):
@@ -465,84 +410,8 @@ def test_metadata_root(self):
465410
with self.assertRaises(KeyError):
466411
root.signed.remove_key('root', 'nosuchkey')
467412

468-
# Test serializing and deserializing without consistent_snapshot.
469-
root_dict = root.to_dict()
470-
del root_dict["signed"]["consistent_snapshot"]
471-
root = Root.from_dict(copy.deepcopy(root_dict["signed"]))
472-
self.assertEqual(root_dict["signed"], root.to_dict())
473-
474-
def test_delegated_role_class(self):
475-
roles = [
476-
{
477-
"keyids": [
478-
"c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a"
479-
],
480-
"name": "role1",
481-
"paths": [
482-
"file3.txt"
483-
],
484-
"terminating": False,
485-
"threshold": 1
486-
}
487-
]
488-
for role in roles:
489-
# Testing that the workflow of deserializing and serializing
490-
# a delegation role dictionary doesn't change the content.
491-
key_obj = DelegatedRole.from_dict(role.copy())
492-
self.assertEqual(role, key_obj.to_dict())
493-
494-
# Test creating a DelegatedRole object with both "paths" and
495-
# "path_hash_prefixes" set.
496-
role["path_hash_prefixes"] = "foo"
497-
with self.assertRaises(ValueError):
498-
DelegatedRole.from_dict(role.copy())
499-
500-
# Test creating DelegatedRole only with "path_hash_prefixes" (an empty one)
501-
del role["paths"]
502-
role["path_hash_prefixes"] = []
503-
role_obj = DelegatedRole.from_dict(role.copy())
504-
self.assertEqual(role_obj.to_dict(), role)
505-
506-
# Test creating DelegatedRole only with "paths" (now an empty one)
507-
del role["path_hash_prefixes"]
508-
role["paths"] = []
509-
role_obj = DelegatedRole.from_dict(role.copy())
510-
self.assertEqual(role_obj.to_dict(), role)
511-
512-
# Test creating DelegatedRole without "paths" and
513-
# "path_hash_prefixes" set
514-
del role["paths"]
515-
role_obj = DelegatedRole.from_dict(role.copy())
516-
self.assertEqual(role_obj.to_dict(), role)
517-
518413

519414
def test_delegation_class(self):
520-
roles = [
521-
{
522-
"keyids": [
523-
"c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a"
524-
],
525-
"name": "role1",
526-
"paths": [
527-
"file3.txt"
528-
],
529-
"terminating": False,
530-
"threshold": 1
531-
}
532-
]
533-
keys = {
534-
"59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d":{
535-
"keytype": "ed25519",
536-
"keyval": {
537-
"public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd"
538-
},
539-
"scheme": "ed25519"
540-
},
541-
}
542-
delegations_dict = {"keys": keys, "roles": roles}
543-
delegations = Delegations.from_dict(copy.deepcopy(delegations_dict))
544-
self.assertEqual(delegations_dict, delegations.to_dict())
545-
546415
# empty keys and roles
547416
delegations_dict = {"keys":{}, "roles":[]}
548417
delegations = Delegations.from_dict(delegations_dict.copy())

tests/test_metadata_serialization.py

Lines changed: 206 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,206 @@
1+
# Copyright New York University and the TUF contributors
2+
# SPDX-License-Identifier: MIT OR Apache-2.0
3+
4+
""" Unit tests testing tuf/api/metadata.py classes
5+
serialization and deserialization.
6+
7+
"""
8+
9+
import json
10+
import sys
11+
import logging
12+
import unittest
13+
import copy
14+
15+
from typing import Any, Dict, Callable, Type
16+
17+
from tests import utils
18+
19+
from tuf.api.metadata import (
20+
Root,
21+
Snapshot,
22+
Timestamp,
23+
Targets,
24+
Key,
25+
Role,
26+
MetaFile,
27+
TargetFile,
28+
Delegations,
29+
DelegatedRole,
30+
)
31+
32+
logger = logging.getLogger(__name__)
33+
34+
# DataSet is only here so type hints can be used:
35+
# It is a dict of name to test dict
36+
DataSet = Dict[str, str]
37+
38+
# Test runner decorator: Runs the test as a set of N SubTests,
39+
# (where N is number of items in dataset), feeding the actual test
40+
# function one data item at a time
41+
def run_sub_tests_with_dataset(dataset: Type[DataSet]):
42+
def real_decorator(function: Callable[["TestSerialization", DataSet], None]):
43+
def wrapper(test_cls: "TestSerialization"):
44+
for case, data in dataset.items():
45+
with test_cls.subTest(case=case):
46+
function(test_cls, data)
47+
return wrapper
48+
return real_decorator
49+
50+
51+
class TestSerialization(unittest.TestCase):
52+
53+
KEY = '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", \
54+
"keyval": {"public": "foo"}}'
55+
SIGNED_COMMON = '"spec_version": "1.0.0", "version": 1, \
56+
"expires": "2030-01-01T00:00:00Z"'
57+
58+
valid_keys: DataSet = {
59+
"all": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", \
60+
"keyval": {"public": "foo"}}',
61+
}
62+
63+
@run_sub_tests_with_dataset(valid_keys)
64+
def test_key_serialization(self, test_case_data: str):
65+
case_dict = json.loads(test_case_data)
66+
key = Key.from_dict("id", copy.copy(case_dict))
67+
self.assertDictEqual(case_dict, key.to_dict())
68+
69+
70+
valid_roles: DataSet = {
71+
"all": '{"keyids": ["keyid"], "threshold": 3}'
72+
}
73+
74+
@run_sub_tests_with_dataset(valid_roles)
75+
def test_role_serialization(self, test_case_data: str):
76+
case_dict = json.loads(test_case_data)
77+
role = Role.from_dict(copy.deepcopy(case_dict))
78+
self.assertDictEqual(case_dict, role.to_dict())
79+
80+
81+
valid_roots: DataSet = {
82+
"all": f'{{ "_type": "root", {SIGNED_COMMON}, \
83+
"consistent_snapshot": false, "keys": {{"keyid" : {KEY} }}, \
84+
"roles": {{ "targets": {{"keyids": ["keyid"], "threshold": 3}} }} \
85+
}}',
86+
"no consistent_snapshot": f'{{ "_type": "root", {SIGNED_COMMON}, \
87+
"keys": {{"keyid" : {KEY}}}, \
88+
"roles": {{ "targets": {{"keyids": ["keyid"], "threshold": 3}} }} \
89+
}}',
90+
}
91+
92+
@run_sub_tests_with_dataset(valid_roots)
93+
def test_root_serialization(self, test_case_data: str):
94+
case_dict = json.loads(test_case_data)
95+
root = Root.from_dict(copy.deepcopy(case_dict))
96+
self.assertDictEqual(case_dict, root.to_dict())
97+
98+
valid_metafiles: DataSet = {
99+
"all": '{"hashes": {"sha256" : "abc"}, "length": 12, "version": 1}',
100+
"no length": '{"hashes": {"sha256" : "abc"}, "version": 1 }',
101+
"no hashes": '{"length": 12, "version": 1}'
102+
}
103+
104+
@run_sub_tests_with_dataset(valid_metafiles)
105+
def test_metafile_serialization(self, test_case_data: str):
106+
case_dict = json.loads(test_case_data)
107+
metafile = MetaFile.from_dict(copy.copy(case_dict))
108+
self.assertDictEqual(case_dict, metafile.to_dict())
109+
110+
111+
valid_timestamps: DataSet = {
112+
"all": f'{{ "_type": "timestamp", {SIGNED_COMMON}, \
113+
"meta": {{ "snapshot.json": {{ "hashes": {{"sha256" : "abc"}}, "version": 1 }} }} \
114+
}}'
115+
}
116+
117+
@run_sub_tests_with_dataset(valid_timestamps)
118+
def test_timestamp_serialization(self, test_case_data: str):
119+
case_dict = json.loads(test_case_data)
120+
timestamp = Timestamp.from_dict(copy.deepcopy(case_dict))
121+
self.assertDictEqual(case_dict, timestamp.to_dict())
122+
123+
124+
valid_snapshots: DataSet = {
125+
"all": f'{{ "_type": "snapshot", {SIGNED_COMMON}, \
126+
"meta": {{ "file.txt": \
127+
{{ "hashes": {{"sha256" : "abc"}}, "version": 1 }} }} }}'
128+
}
129+
130+
@run_sub_tests_with_dataset(valid_snapshots)
131+
def test_snapshot_serialization(self, test_case_data: str):
132+
case_dict = json.loads(test_case_data)
133+
snapshot = Snapshot.from_dict(copy.deepcopy(case_dict))
134+
self.assertDictEqual(case_dict, snapshot.to_dict())
135+
136+
137+
valid_delegated_roles: DataSet = {
138+
"no hash prefix attribute":
139+
'{"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], \
140+
"terminating": false, "threshold": 1}',
141+
"no path attribute":
142+
'{"keyids": ["keyid"], "name": "a", "terminating": false, \
143+
"path_hash_prefixes": ["h1", "h2"], "threshold": 99}',
144+
"no hash or path prefix":
145+
'{"keyids": ["keyid"], "name": "a", "terminating": true, "threshold": 3}',
146+
}
147+
148+
@run_sub_tests_with_dataset(valid_delegated_roles)
149+
def test_delegated_role_serialization(self, test_case_data: str):
150+
case_dict = json.loads(test_case_data)
151+
deserialized_role = DelegatedRole.from_dict(copy.copy(case_dict))
152+
self.assertDictEqual(case_dict, deserialized_role.to_dict())
153+
154+
155+
valid_delegations: DataSet = {
156+
"all": f'{{"keys": {{"keyid" : {KEY}}}, "roles": [ {{"keyids": ["keyid"], \
157+
"name": "a", "terminating": true, "threshold": 3}} ]}}'
158+
}
159+
160+
@run_sub_tests_with_dataset(valid_delegations)
161+
def test_delegation_serialization(self, test_case_data: str):
162+
case_dict = json.loads(test_case_data)
163+
delegation = Delegations.from_dict(copy.deepcopy(case_dict))
164+
self.assertDictEqual(case_dict, delegation.to_dict())
165+
166+
167+
valid_targetfiles: DataSet = {
168+
"all": '{"length": 12, "hashes": {"sha256" : "abc"}, \
169+
"custom" : {"foo": "bar"} }',
170+
"no custom": '{"length": 12, "hashes": {"sha256" : "abc"}}'
171+
}
172+
173+
@run_sub_tests_with_dataset(valid_targetfiles)
174+
def test_targetfile_serialization(self, test_case_data: str):
175+
case_dict = json.loads(test_case_data)
176+
target_file = TargetFile.from_dict(copy.copy(case_dict))
177+
self.assertDictEqual(case_dict, target_file.to_dict())
178+
179+
180+
valid_targets: DataSet = {
181+
"all attributes": f'{{"_type": "targets", {SIGNED_COMMON}, \
182+
"targets": {{ "file.txt": {{"length": 12, "hashes": {{"sha256" : "abc"}} }} }}, \
183+
"delegations": {{"keys": {{"keyid" : {KEY}}}, \
184+
"roles": [ {{"keyids": ["keyid"], "name": "a", "terminating": true, "threshold": 3}} ]}} \
185+
}}',
186+
"empty targets": f'{{"_type": "targets", {SIGNED_COMMON}, \
187+
"targets": {{}}, \
188+
"delegations": {{"keys": {{"keyid" : {KEY}}}, \
189+
"roles": [ {{"keyids": ["keyid"], "name": "a", "terminating": true, "threshold": 3}} ] \
190+
}} }}',
191+
"no delegations": f'{{"_type": "targets", {SIGNED_COMMON}, \
192+
"targets": {{ "file.txt": {{"length": 12, "hashes": {{"sha256" : "abc"}} }} }} \
193+
}}'
194+
}
195+
196+
@run_sub_tests_with_dataset(valid_targets)
197+
def test_targets_serialization(self, test_case_data):
198+
case_dict = json.loads(test_case_data)
199+
targets = Targets.from_dict(copy.deepcopy(case_dict))
200+
self.assertDictEqual(case_dict, targets.to_dict())
201+
202+
203+
# Run unit test.
204+
if __name__ == '__main__':
205+
utils.configure_test_logging(sys.argv)
206+
unittest.main()

0 commit comments

Comments
 (0)