|
12 | 12 | import unittest
|
13 | 13 | import copy
|
14 | 14 |
|
15 |
| -from typing import Dict, Callable |
| 15 | +from typing import Dict, Callable, Optional, Mapping, Any |
| 16 | +from datetime import datetime |
16 | 17 |
|
17 | 18 | from tests import utils
|
18 | 19 |
|
19 | 20 | from tuf.api.metadata import (
|
| 21 | + Signed, |
20 | 22 | Root,
|
21 | 23 | Snapshot,
|
22 | 24 | Timestamp,
|
@@ -197,6 +199,198 @@ def test_targets_serialization(self, test_case_data):
|
197 | 199 | self.assertDictEqual(case_dict, targets.to_dict())
|
198 | 200 |
|
199 | 201 |
|
| 202 | +class TestSigned(Signed): |
| 203 | + """Used for testing the abstract "Signed" class.""" |
| 204 | + |
| 205 | + _signed_type = "signed" |
| 206 | + |
| 207 | + def __init__( |
| 208 | + self, |
| 209 | + version: int, |
| 210 | + spec_version: str, |
| 211 | + expires: datetime, |
| 212 | + unrecognized_fields: Optional[Mapping[str, Any]] |
| 213 | + ) -> None: |
| 214 | + super().__init__( |
| 215 | + version, spec_version, expires, unrecognized_fields |
| 216 | + ) |
| 217 | + |
| 218 | + @classmethod |
| 219 | + def from_dict(cls, signed_dict: Dict[str, Any]) -> "Signed": |
| 220 | + common_args = super()._common_fields_from_dict(signed_dict) |
| 221 | + # All fields left in the signed_dict are unrecognized. |
| 222 | + return cls(*common_args, signed_dict) |
| 223 | + |
| 224 | + def to_dict(self) -> Dict[str, Any]: |
| 225 | + """Returns the dict representation of self.""" |
| 226 | + return super()._common_fields_to_dict() |
| 227 | + |
| 228 | + |
| 229 | +class TestInvalidSerialization(unittest.TestCase): |
| 230 | + |
| 231 | + invalid_type: DataSet = { |
| 232 | + "no _type": '{"spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z"}', |
| 233 | + "empty str _type": '{"_type": "", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z"}', |
| 234 | + "_type wrong type": '{"_type": "foo", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z"}' |
| 235 | + } |
| 236 | + |
| 237 | + @run_sub_tests_with_dataset(invalid_type) |
| 238 | + def test_invalid_type(self, test_case_data: Dict[str, str]): |
| 239 | + case_dict = json.loads(test_case_data) |
| 240 | + with self.assertRaises((KeyError, ValueError)): |
| 241 | + TestSigned.from_dict(copy.copy(case_dict)) |
| 242 | + |
| 243 | + invalid_spec_version: DataSet = { |
| 244 | + "no spec_version": '{"_type": "signed", "version": 1, "expires": "2030-01-01T00:00:00Z"}', |
| 245 | + "empty str spec_version": '{"_type": "signed", "spec_version": "", "version": 1, "expires": "2030-01-01T00:00:00Z"}', |
| 246 | + "invalid spec_version str": '{"_type": "signed", "spec_version": "abc", "version": 1, "expires": "2030-01-01T00:00:00Z"}', |
| 247 | + "one digit spec_version": '{"_type": "signed", "spec_version": "1", "version": 1, "expires": "2030-01-01T00:00:00Z"}', |
| 248 | + "two digit spec_version": '{"_type": "signed", "spec_version": "1.2", "version": 1, "expires": "2030-01-01T00:00:00Z"}', |
| 249 | + "no digit spec_version": '{"_type": "signed", "spec_version": "a.b.c", "version": 1, "expires": "2030-01-01T00:00:00Z"}', |
| 250 | + "different major spec_version": '{"_type": "signed", "spec_version": "0.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z"}', |
| 251 | + } |
| 252 | + |
| 253 | + @run_sub_tests_with_dataset(invalid_spec_version) |
| 254 | + def test_invalid_spec_version(self, test_case_data: Dict[str, str]): |
| 255 | + case_dict = json.loads(test_case_data) |
| 256 | + with self.assertRaises((KeyError, ValueError)): |
| 257 | + TestSigned.from_dict(copy.copy(case_dict)) |
| 258 | + |
| 259 | + invalid_version: DataSet = { |
| 260 | + "no version": '{"_type": "signed", "spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z"}', |
| 261 | + "version wrong type": '{"_type": "signed", "spec_version": "1.0.0", "version": "a", "expires": "2030-01-01T00:00:00Z"}', |
| 262 | + "version 0": '{"_type": "signed", "spec_version": "1.0.0", "version": 0, "expires": "2030-01-01T00:00:00Z"}', |
| 263 | + "version below 0": '{"_type": "signed", "spec_version": "1.0.0", "version": -1, "expires": "2030-01-01T00:00:00Z"}' |
| 264 | + } |
| 265 | + |
| 266 | + @run_sub_tests_with_dataset(invalid_version) |
| 267 | + def test_invalid_version(self, test_case_data: Dict[str, str]): |
| 268 | + case_dict = json.loads(test_case_data) |
| 269 | + with self.assertRaises((KeyError, TypeError, ValueError)): |
| 270 | + TestSigned.from_dict(copy.copy(case_dict)) |
| 271 | + |
| 272 | + invalid_expires: DataSet = { |
| 273 | + "no expires": '{"_type": "signed", "spec_version": "1.0.0", "version": 1}', |
| 274 | + "wrong datetime string": '{"_type": "signed", "spec_version": "1.0.0", "version": 1, "expires": "abc"}' |
| 275 | + } |
| 276 | + |
| 277 | + @run_sub_tests_with_dataset(invalid_expires) |
| 278 | + def test_invalid_expires(self, test_case_data: Dict[str, str]): |
| 279 | + case_dict = json.loads(test_case_data) |
| 280 | + with self.assertRaises((KeyError, ValueError)): |
| 281 | + TestSigned.from_dict(copy.copy(case_dict)) |
| 282 | + |
| 283 | + invalid_keyid: DataSet = { |
| 284 | + "no keyid": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}', |
| 285 | + "keyid wrong type": '{"keyid": 1, "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}', |
| 286 | + } |
| 287 | + |
| 288 | + @run_sub_tests_with_dataset(invalid_keyid) |
| 289 | + def test_invalid_keyid(self, test_case_data: Dict[str, str]): |
| 290 | + case_dict = json.loads(test_case_data) |
| 291 | + with self.assertRaises((TypeError, KeyError)): |
| 292 | + keyid = case_dict.pop("keyid") |
| 293 | + Key.from_dict(keyid, copy.copy(case_dict)) |
| 294 | + |
| 295 | + invalid_keytype: DataSet = { |
| 296 | + "no keytype": '{"keyid": "id", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}', |
| 297 | + "keytype wrong type": '{"keyid": "id", "keytype": 1, "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}', |
| 298 | + } |
| 299 | + |
| 300 | + @run_sub_tests_with_dataset(invalid_keytype) |
| 301 | + def test_invalid_keytype(self, test_case_data: Dict[str, str]): |
| 302 | + case_dict = json.loads(test_case_data) |
| 303 | + with self.assertRaises((TypeError, KeyError)): |
| 304 | + keyid = case_dict.pop("keyid") |
| 305 | + Key.from_dict(keyid, copy.copy(case_dict)) |
| 306 | + |
| 307 | + invalid_scheme: DataSet = { |
| 308 | + "no scheme": '{"keyid": "id", "keytype": "rsa", "keyval": {"public": "foo"}}', |
| 309 | + "scheme wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": 1, "keyval": {"public": "abc"}}', |
| 310 | + } |
| 311 | + |
| 312 | + @run_sub_tests_with_dataset(invalid_scheme) |
| 313 | + def test_invalid_scheme(self, test_case_data: Dict[str, str]): |
| 314 | + case_dict = json.loads(test_case_data) |
| 315 | + with self.assertRaises((TypeError, KeyError)): |
| 316 | + keyid = case_dict.pop("keyid") |
| 317 | + Key.from_dict(keyid, copy.copy(case_dict)) |
| 318 | + |
| 319 | + invalid_keyval: DataSet = { |
| 320 | + "no keyval": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256"}', |
| 321 | + "no public in keyval": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {}}', |
| 322 | + "keyval public wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": 1}}', |
| 323 | + "keyval wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": 1}', |
| 324 | + } |
| 325 | + |
| 326 | + @run_sub_tests_with_dataset(invalid_keyval) |
| 327 | + def test_invalid_keyval(self, test_case_data: Dict[str, str]): |
| 328 | + case_dict = json.loads(test_case_data) |
| 329 | + with self.assertRaises((TypeError, KeyError)): |
| 330 | + keyid = case_dict.pop("keyid") |
| 331 | + Key.from_dict(keyid, copy.copy(case_dict)) |
| 332 | + |
| 333 | + invalid_threshold: DataSet = { |
| 334 | + "no threshold": '{"keyids": ["keyid"]}', |
| 335 | + "wrong threshold type": '{"keyids": ["keyid"], "threshold": "a"}', |
| 336 | + "threshold below 1": '{"keyids": ["keyid"], "threshold": 0}' |
| 337 | + } |
| 338 | + |
| 339 | + @run_sub_tests_with_dataset(invalid_threshold) |
| 340 | + def test_invalid_threshold(self, test_case_data: Dict[str, str]): |
| 341 | + case_dict = json.loads(test_case_data) |
| 342 | + with self.assertRaises((KeyError, TypeError, ValueError)): |
| 343 | + Role.from_dict(copy.deepcopy(case_dict)) |
| 344 | + |
| 345 | + invalid_keyids: DataSet = { |
| 346 | + "no keyids": '{"threshold": 3}', |
| 347 | + "duplicate keyids": '{"keyids": ["keyid", "keyid"], "threshold": 3}', |
| 348 | + } |
| 349 | + |
| 350 | + @run_sub_tests_with_dataset(invalid_keyids) |
| 351 | + def test_invalid_keyids(self, test_case_data: Dict[str, str]): |
| 352 | + case_dict = json.loads(test_case_data) |
| 353 | + with self.assertRaises((KeyError, ValueError)): |
| 354 | + Role.from_dict(copy.deepcopy(case_dict)) |
| 355 | + |
| 356 | + invalid_length: DataSet = { |
| 357 | + "wrong length type": '{"version": 1, "length": "a", "hashes": {"sha256" : "abc"}}', |
| 358 | + "length 0": '{"version": 1, "length": 0, "hashes": {"sha256" : "abc"}}', |
| 359 | + "length below 0": '{"version": 1, "length": -1, "hashes": {"sha256" : "abc"}}', |
| 360 | + } |
| 361 | + |
| 362 | + @run_sub_tests_with_dataset(invalid_length) |
| 363 | + def test_invalid_length(self, test_case_data: Dict[str, str]): |
| 364 | + case_dict = json.loads(test_case_data) |
| 365 | + with self.assertRaises((TypeError, ValueError)): |
| 366 | + MetaFile.from_dict(copy.deepcopy(case_dict)) |
| 367 | + |
| 368 | + invalid_hashes: DataSet = { |
| 369 | + "empty hashes dict": '{"version": 1, "length": 1, "hashes": {}}', |
| 370 | + "hashes wrong type": '{"version": 1, "length": 1, "hashes": 1}', |
| 371 | + # Hashes keys cannot be strings because the JSON specification. |
| 372 | + # Then, do we want to test for that? |
| 373 | + "hashes values wrong type": '{"version": 1, "length": 1, "hashes": {"sha256": 1}}', |
| 374 | + } |
| 375 | + |
| 376 | + @run_sub_tests_with_dataset(invalid_hashes) |
| 377 | + def test_invalid_hashes(self, test_case_data: Dict[str, str]): |
| 378 | + case_dict = json.loads(test_case_data) |
| 379 | + with self.assertRaises((ValueError, AttributeError, TypeError)): |
| 380 | + MetaFile.from_dict(copy.deepcopy(case_dict)) |
| 381 | + |
| 382 | + invalid_targetfile_hashes_length: DataSet = { |
| 383 | + "no hashes": '{"length": 1}', |
| 384 | + "no length": '{"hashes": {"sha256": 1}}' |
| 385 | + # The remaining cases are the same as for invalid_hashes and |
| 386 | + # invalid_length datasets. |
| 387 | + } |
| 388 | + @run_sub_tests_with_dataset(invalid_targetfile_hashes_length) |
| 389 | + def test_invalid_targetfile_hashes_length(self, test_case_data: Dict[str, str]): |
| 390 | + case_dict = json.loads(test_case_data) |
| 391 | + with self.assertRaises(KeyError): |
| 392 | + TargetFile.from_dict(copy.deepcopy(case_dict)) |
| 393 | + |
200 | 394 | # Run unit test.
|
201 | 395 | if __name__ == '__main__':
|
202 | 396 | utils.configure_test_logging(sys.argv)
|
|
0 commit comments