Skip to content

Commit 9c8aa1d

Browse files
committed
Test new API: serialization tests for invalid arg
A while ago we decided that it's best to research each of the individuals attributes one by one and identify what level of validation it needs compared to how we use it: #1366 (comment). This work is ongoing and there are a couple of commits already merged for this: - 6c5d970 - f20664d - 41afb1e We want to be able to test the attributes validation against known bad values. The way we want to do that is with table testing we have added using decorators for our metadata classes defined in New API: #1416. This gives us an easy way to add new cases for each of the attributes and not depend on external files. Signed-off-by: Martin Vrachev <[email protected]>
1 parent 452aad8 commit 9c8aa1d

File tree

1 file changed

+107
-4
lines changed

1 file changed

+107
-4
lines changed

tests/test_metadata_serialization.py

Lines changed: 107 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,13 @@
1212
import unittest
1313
import copy
1414

15-
from typing import Dict, Callable
15+
from typing import Dict, Callable, Optional, Mapping, Any
16+
from datetime import datetime
1617

1718
from tests import utils
1819

1920
from tuf.api.metadata import (
21+
Signed,
2022
Root,
2123
Snapshot,
2224
Timestamp,
@@ -50,6 +52,44 @@ def wrapper(test_cls: "TestSerialization"):
5052

5153
class TestSerialization(unittest.TestCase):
5254

55+
# Snapshot instances with meta = {} are valid, but for a full valid
56+
# repository it's required that meta has at least one element inside it.
57+
invalid_signed: DataSet = {
58+
"no _type": '{"spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z", "meta": {}}',
59+
"no spec_version": '{"_type": "signed", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
60+
"no version": '{"_type": "signed", "spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z", "meta": {}}',
61+
"no expires": '{"_type": "signed", "spec_version": "1.0.0", "version": 1, "meta": {}}',
62+
"empty str _type":
63+
'{"_type": "", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
64+
"empty str spec_version":
65+
'{"_type": "signed", "spec_version": "", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
66+
"_type wrong type":
67+
'{"_type": "foo", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
68+
"version wrong type":
69+
'{"_type": "signed", "spec_version": "1.0.0", "version": "a", "expires": "2030-01-01T00:00:00Z", "meta": {}}',
70+
"invalid spec_version str":
71+
'{"_type": "signed", "spec_version": "abc", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
72+
"two digit spec_version":
73+
'{"_type": "signed", "spec_version": "1.2.a", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
74+
"no digit spec_version":
75+
'{"_type": "signed", "spec_version": "a.b.c", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
76+
"different major spec_version":
77+
'{"_type": "signed", "spec_version": "0.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
78+
"version 0":
79+
'{"_type": "signed", "spec_version": "1.0.0", "version": 0, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
80+
"version below 0":
81+
'{"_type": "signed", "spec_version": "1.0.0", "version": -1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
82+
"wrong datetime string":
83+
'{"_type": "signed", "spec_version": "1.0.0", "version": 1, "expires": "abc", "meta": {}}',
84+
}
85+
86+
@run_sub_tests_with_dataset(invalid_signed)
87+
def test_invalid_signed_serialization(self, test_case_data: Dict[str, str]):
88+
case_dict = json.loads(test_case_data)
89+
with self.assertRaises((KeyError, ValueError, TypeError)):
90+
Snapshot.from_dict(copy.deepcopy(case_dict))
91+
92+
5393
valid_keys: DataSet = {
5494
"all": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", \
5595
"keyval": {"public": "foo"}}',
@@ -60,12 +100,45 @@ class TestSerialization(unittest.TestCase):
60100
}
61101

62102
@run_sub_tests_with_dataset(valid_keys)
63-
def test_key_serialization(self, test_case_data: str):
103+
def test_valid_key_serialization(self, test_case_data: str):
64104
case_dict = json.loads(test_case_data)
65105
key = Key.from_dict("id", copy.copy(case_dict))
66106
self.assertDictEqual(case_dict, key.to_dict())
67107

68108

109+
invalid_keys: DataSet = {
110+
"no keyid": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}',
111+
"no keytype": '{"keyid": "id", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}',
112+
"no scheme": '{"keyid": "id", "keytype": "rsa", "keyval": {"public": "foo"}}',
113+
"no keyval": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256"}',
114+
"keyid wrong type": '{"keyid": 1, "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}',
115+
"keytype wrong type": '{"keyid": "id", "keytype": 1, "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}',
116+
"scheme wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": 1, "keyval": {"public": "abc"}}',
117+
"keyval wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": 1}',
118+
}
119+
120+
@run_sub_tests_with_dataset(invalid_keys)
121+
def test_invalid_key_serialization(self, test_case_data: Dict[str, str]):
122+
case_dict = json.loads(test_case_data)
123+
with self.assertRaises((TypeError, KeyError)):
124+
keyid = case_dict.pop("keyid")
125+
Key.from_dict(keyid, copy.copy(case_dict))
126+
127+
invalid_roles: DataSet = {
128+
"no threshold": '{"keyids": ["keyid"]}',
129+
"no keyids": '{"threshold": 3}',
130+
"wrong threshold type": '{"keyids": ["keyid"], "threshold": "a"}',
131+
"threshold below 1": '{"keyids": ["keyid"], "threshold": 0}',
132+
"duplicate keyids": '{"keyids": ["keyid", "keyid"], "threshold": 3}',
133+
}
134+
135+
@run_sub_tests_with_dataset(invalid_roles)
136+
def test_invalid_role_serialization(self, test_case_data: Dict[str, str]):
137+
case_dict = json.loads(test_case_data)
138+
with self.assertRaises((KeyError, TypeError, ValueError)):
139+
Role.from_dict(copy.deepcopy(case_dict))
140+
141+
69142
valid_roles: DataSet = {
70143
"all": '{"keyids": ["keyid"], "threshold": 3}',
71144
"unrecognized field": '{"keyids": ["keyid"], "threshold": 3, "foo": "bar"}',
@@ -102,12 +175,28 @@ def test_root_serialization(self, test_case_data: str):
102175
root = Root.from_dict(copy.deepcopy(case_dict))
103176
self.assertDictEqual(case_dict, root.to_dict())
104177

178+
179+
invalid_metafiles: DataSet = {
180+
"wrong length type": '{"version": 1, "length": "a", "hashes": {"sha256" : "abc"}}',
181+
"length 0": '{"version": 1, "length": 0, "hashes": {"sha256" : "abc"}}',
182+
"length below 0": '{"version": 1, "length": -1, "hashes": {"sha256" : "abc"}}',
183+
"empty hashes dict": '{"version": 1, "length": 1, "hashes": {}}',
184+
"hashes wrong type": '{"version": 1, "length": 1, "hashes": 1}',
185+
"hashes values wrong type": '{"version": 1, "length": 1, "hashes": {"sha256": 1}}',
186+
}
187+
188+
@run_sub_tests_with_dataset(invalid_metafiles)
189+
def test_invalid_metafile_serialization(self, test_case_data: Dict[str, str]):
190+
case_dict = json.loads(test_case_data)
191+
with self.assertRaises((TypeError, ValueError, AttributeError)):
192+
MetaFile.from_dict(copy.deepcopy(case_dict))
193+
194+
105195
valid_metafiles: DataSet = {
106196
"all": '{"hashes": {"sha256" : "abc"}, "length": 12, "version": 1}',
107197
"no length": '{"hashes": {"sha256" : "abc"}, "version": 1 }',
108198
"no hashes": '{"length": 12, "version": 1}',
109-
"unrecognized field": '{"hashes": {"sha256" : "abc"}, "length": 12, "version": 1, \
110-
"foo": "bar"}',
199+
"unrecognized field": '{"hashes": {"sha256" : "abc"}, "length": 12, "version": 1, "foo": "bar"}',
111200
}
112201

113202
@run_sub_tests_with_dataset(valid_metafiles)
@@ -181,6 +270,20 @@ def test_delegation_serialization(self, test_case_data: str):
181270
self.assertDictEqual(case_dict, delegation.to_dict())
182271

183272

273+
invalid_targetfiles: DataSet = {
274+
"no hashes": '{"length": 1}',
275+
"no length": '{"hashes": {"sha256": "abc"}}'
276+
# The remaining cases are the same as for invalid_hashes and
277+
# invalid_length datasets.
278+
}
279+
280+
@run_sub_tests_with_dataset(invalid_targetfiles)
281+
def test_invalid_targetfile_serialization(self, test_case_data: Dict[str, str]):
282+
case_dict = json.loads(test_case_data)
283+
with self.assertRaises(KeyError):
284+
TargetFile.from_dict(copy.deepcopy(case_dict))
285+
286+
184287
valid_targetfiles: DataSet = {
185288
"all": '{"length": 12, "hashes": {"sha256" : "abc"}, \
186289
"custom" : {"foo": "bar"} }',

0 commit comments

Comments
 (0)