Skip to content

Commit 4dcbbc0

Browse files
committed
Test new API: serialization tests for invalid arg
A while ago we decided that it's best to research each of the individuals attributes one by one and identify what level of validation it needs compared to how we use it: #1366 (comment). This work is ongoing and there are a couple of commits already merged for this: - 6c5d970 - f20664d - 41afb1e We want to be able to test the attributes validation against known bad values. The way we want to do that is with table testing we have added using decorators for our metadata classes defined in New API: #1416. This gives us an easy way to add new cases for each of the attributes and not depend on external files. Signed-off-by: Martin Vrachev <[email protected]>
1 parent c45aac7 commit 4dcbbc0

File tree

1 file changed

+104
-2
lines changed

1 file changed

+104
-2
lines changed

tests/test_metadata_serialization.py

Lines changed: 104 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,13 @@
1212
import unittest
1313
import copy
1414

15-
from typing import Dict, Callable
15+
from typing import Dict, Callable, Optional, Mapping, Any
16+
from datetime import datetime
1617

1718
from tests import utils
1819

1920
from tuf.api.metadata import (
21+
Signed,
2022
Root,
2123
Snapshot,
2224
Timestamp,
@@ -50,18 +52,87 @@ def wrapper(test_cls: "TestSerialization"):
5052

5153
class TestSerialization(unittest.TestCase):
5254

55+
invalid_signed: DataSet = {
56+
"no _type": '{"spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z", "meta": {"f.txt": {"version": 1}}}',
57+
"no spec_version": '{"_type": "signed", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {"f.txt": {"version": 1}}}',
58+
"no version": '{"_type": "signed", "spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z", "meta": {"f.txt": {"version": 1}}}',
59+
"no expires": '{"_type": "signed", "spec_version": "1.0.0", "version": 1, "meta": {"f.txt": {"version": 1}}}',
60+
"empty str _type": '{"_type": "", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \
61+
"meta": {"f.txt": {"version": 1}}}',
62+
"empty str spec_version": '{"_type": "signed", "spec_version": "", "version": 1, "expires": "2030-01-01T00:00:00Z", \
63+
"meta": {"f.txt": {"version": 1}}}',
64+
"_type wrong type": '{"_type": "foo", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \
65+
"meta": {"f.txt": {"version": 1}}}',
66+
"version wrong type": '{"_type": "signed", "spec_version": "1.0.0", "version": "a", "expires": "2030-01-01T00:00:00Z", \
67+
"meta": {"f.txt": {"version": 1}}}',
68+
"invalid spec_version str": '{"_type": "signed", "spec_version": "abc", "version": 1, "expires": "2030-01-01T00:00:00Z", \
69+
"meta": {"f.txt": {"version": 1}}}',
70+
"two digit spec_version": '{"_type": "signed", "spec_version": "1.2.a", "version": 1, "expires": "2030-01-01T00:00:00Z", \
71+
"meta": {"f.txt": {"version": 1}}}',
72+
"no digit spec_version": '{"_type": "signed", "spec_version": "a.b.c", "version": 1, "expires": "2030-01-01T00:00:00Z", \
73+
"meta": {"f.txt": {"version": 1}}}',
74+
"different major spec_version": '{"_type": "signed", "spec_version": "0.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \
75+
"meta": {"f.txt": {"version": 1}}}',
76+
"version 0": '{"_type": "signed", "spec_version": "1.0.0", "version": 0, "expires": "2030-01-01T00:00:00Z", \
77+
"meta": {"f.txt": {"version": 1}}}',
78+
"version below 0": '{"_type": "signed", "spec_version": "1.0.0", "version": -1, "expires": "2030-01-01T00:00:00Z", \
79+
"meta": {"f.txt": {"version": 1}}}',
80+
"wrong datetime string": '{"_type": "signed", "spec_version": "1.0.0", "version": 1, "expires": "abc", \
81+
"meta": {"f.txt": {"version": 1}}}',
82+
}
83+
84+
@run_sub_tests_with_dataset(invalid_signed)
85+
def test_invalid_signed_serialization(self, test_case_data: Dict[str, str]):
86+
case_dict = json.loads(test_case_data)
87+
with self.assertRaises((KeyError, ValueError, TypeError)):
88+
Snapshot.from_dict(copy.deepcopy(case_dict))
89+
90+
5391
valid_keys: DataSet = {
5492
"all": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", \
5593
"keyval": {"public": "foo"}}',
5694
}
5795

5896
@run_sub_tests_with_dataset(valid_keys)
59-
def test_key_serialization(self, test_case_data: str):
97+
def test_valid_key_serialization(self, test_case_data: str):
6098
case_dict = json.loads(test_case_data)
6199
key = Key.from_dict("id", copy.copy(case_dict))
62100
self.assertDictEqual(case_dict, key.to_dict())
63101

64102

103+
invalid_keys: DataSet = {
104+
"no keyid": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}',
105+
"no keytype": '{"keyid": "id", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}',
106+
"no scheme": '{"keyid": "id", "keytype": "rsa", "keyval": {"public": "foo"}}',
107+
"no keyval": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256"}',
108+
"keyid wrong type": '{"keyid": 1, "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}',
109+
"keytype wrong type": '{"keyid": "id", "keytype": 1, "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}',
110+
"scheme wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": 1, "keyval": {"public": "abc"}}',
111+
"keyval wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": 1}',
112+
}
113+
114+
@run_sub_tests_with_dataset(invalid_keys)
115+
def test_invalid_key_serialization(self, test_case_data: Dict[str, str]):
116+
case_dict = json.loads(test_case_data)
117+
with self.assertRaises((TypeError, KeyError)):
118+
keyid = case_dict.pop("keyid")
119+
Key.from_dict(keyid, copy.copy(case_dict))
120+
121+
invalid_roles: DataSet = {
122+
"no threshold": '{"keyids": ["keyid"]}',
123+
"no keyids": '{"threshold": 3}',
124+
"wrong threshold type": '{"keyids": ["keyid"], "threshold": "a"}',
125+
"threshold below 1": '{"keyids": ["keyid"], "threshold": 0}',
126+
"duplicate keyids": '{"keyids": ["keyid", "keyid"], "threshold": 3}',
127+
}
128+
129+
@run_sub_tests_with_dataset(invalid_roles)
130+
def test_invalid_role_serialization(self, test_case_data: Dict[str, str]):
131+
case_dict = json.loads(test_case_data)
132+
with self.assertRaises((KeyError, TypeError, ValueError)):
133+
Role.from_dict(copy.deepcopy(case_dict))
134+
135+
65136
valid_roles: DataSet = {
66137
"all": '{"keyids": ["keyid"], "threshold": 3}'
67138
}
@@ -92,6 +163,23 @@ def test_root_serialization(self, test_case_data: str):
92163
root = Root.from_dict(copy.deepcopy(case_dict))
93164
self.assertDictEqual(case_dict, root.to_dict())
94165

166+
167+
invalid_metafiles: DataSet = {
168+
"wrong length type": '{"version": 1, "length": "a", "hashes": {"sha256" : "abc"}}',
169+
"length 0": '{"version": 1, "length": 0, "hashes": {"sha256" : "abc"}}',
170+
"length below 0": '{"version": 1, "length": -1, "hashes": {"sha256" : "abc"}}',
171+
"empty hashes dict": '{"version": 1, "length": 1, "hashes": {}}',
172+
"hashes wrong type": '{"version": 1, "length": 1, "hashes": 1}',
173+
"hashes values wrong type": '{"version": 1, "length": 1, "hashes": {"sha256": 1}}',
174+
}
175+
176+
@run_sub_tests_with_dataset(invalid_metafiles)
177+
def test_invalid_metafile_serialization(self, test_case_data: Dict[str, str]):
178+
case_dict = json.loads(test_case_data)
179+
with self.assertRaises((TypeError, ValueError, AttributeError)):
180+
MetaFile.from_dict(copy.deepcopy(case_dict))
181+
182+
95183
valid_metafiles: DataSet = {
96184
"all": '{"hashes": {"sha256" : "abc"}, "length": 12, "version": 1}',
97185
"no length": '{"hashes": {"sha256" : "abc"}, "version": 1 }',
@@ -159,6 +247,20 @@ def test_delegation_serialization(self, test_case_data: str):
159247
self.assertDictEqual(case_dict, delegation.to_dict())
160248

161249

250+
invalid_targetfiles: DataSet = {
251+
"no hashes": '{"length": 1}',
252+
"no length": '{"hashes": {"sha256": 1}}'
253+
# The remaining cases are the same as for invalid_hashes and
254+
# invalid_length datasets.
255+
}
256+
257+
@run_sub_tests_with_dataset(invalid_targetfiles)
258+
def test_invalid_targetfile_serialization(self, test_case_data: Dict[str, str]):
259+
case_dict = json.loads(test_case_data)
260+
with self.assertRaises(KeyError):
261+
TargetFile.from_dict(copy.deepcopy(case_dict))
262+
263+
162264
valid_targetfiles: DataSet = {
163265
"all": '{"length": 12, "hashes": {"sha256" : "abc"}, \
164266
"custom" : {"foo": "bar"} }',

0 commit comments

Comments
 (0)